mirror of
https://github.com/github/codeql-action.git
synced 2025-12-08 16:58:06 +08:00
Compare commits
385 Commits
codeql-bun
...
v2.26.10
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
85b07cf1e1 | ||
|
|
531ce50d39 | ||
|
|
a680341ce5 | ||
|
|
1991d31d08 | ||
|
|
d9d1e68857 | ||
|
|
9b1856ea9f | ||
|
|
e2b3eafc8d | ||
|
|
7dbbf6d542 | ||
|
|
2617ff2d3f | ||
|
|
d97ba04b39 | ||
|
|
46e0c78da9 | ||
|
|
da7be78a1e | ||
|
|
a54d022c15 | ||
|
|
e0927014d2 | ||
|
|
0c7f114ae4 | ||
|
|
9f0e9cc2e5 | ||
|
|
6ea954642e | ||
|
|
ae1c6a2b12 | ||
|
|
461ef6c76d | ||
|
|
00b1146c45 | ||
|
|
f861efb2b3 | ||
|
|
6b2f7e7c28 | ||
|
|
af8e2bc4a1 | ||
|
|
bc68dc9d95 | ||
|
|
67b30f7c39 | ||
|
|
fa91789e81 | ||
|
|
426821d803 | ||
|
|
37309b9318 | ||
|
|
20bd6feb0b | ||
|
|
0abc1ec90b | ||
|
|
4d015b8cba | ||
|
|
07e8133971 | ||
|
|
662c71aa9e | ||
|
|
a7a6a6951e | ||
|
|
db98c27941 | ||
|
|
b1ca017eae | ||
|
|
990feb59a4 | ||
|
|
ac3fca3e9f | ||
|
|
e7309d2b5c | ||
|
|
e0a151e64e | ||
|
|
6b0ce4e274 | ||
|
|
07fd497921 | ||
|
|
2cddcb1990 | ||
|
|
6225a95822 | ||
|
|
9580b7e6d5 | ||
|
|
b436a5fca7 | ||
|
|
7a2719db05 | ||
|
|
65e62c7ee1 | ||
|
|
ef130516b1 | ||
|
|
132b18f2f0 | ||
|
|
edf2ee9ac8 | ||
|
|
74e989e9a5 | ||
|
|
bff5ba0a53 | ||
|
|
ed5036d7fa | ||
|
|
f9ae1f1149 | ||
|
|
b78ab6c660 | ||
|
|
a7b66734cc | ||
|
|
323f5ef653 | ||
|
|
a41f61f2b3 | ||
|
|
7513a95cdc | ||
|
|
422b177bb3 | ||
|
|
51826fb97a | ||
|
|
db0e7bc3d0 | ||
|
|
3f0edd48f8 | ||
|
|
7aaea3327f | ||
|
|
793bff44a5 | ||
|
|
574aaa5812 | ||
|
|
3b3a4a69cf | ||
|
|
799e477cb3 | ||
|
|
c38521e711 | ||
|
|
65efd221e9 | ||
|
|
294a9d9291 | ||
|
|
00b3604ce7 | ||
|
|
64431c66d0 | ||
|
|
e0e2d7557d | ||
|
|
cb28816228 | ||
|
|
498c508900 | ||
|
|
a1a585f2ab | ||
|
|
34666c10b6 | ||
|
|
6e24973d7a | ||
|
|
d0a3cf2152 | ||
|
|
78d398ebc6 | ||
|
|
782de45248 | ||
|
|
642bbfc83a | ||
|
|
213bf3678c | ||
|
|
dd7307d603 | ||
|
|
bbd7c801a0 | ||
|
|
80d7a6c8d4 | ||
|
|
6989ba7bd2 | ||
|
|
1afca056e3 | ||
|
|
6cc325341d | ||
|
|
bbd9c4a63d | ||
|
|
d061f2cdd0 | ||
|
|
5618c9fc1e | ||
|
|
fe22310da9 | ||
|
|
2fe1a3da42 | ||
|
|
762210d5a0 | ||
|
|
8c041afa1f | ||
|
|
81dc191d06 | ||
|
|
3452639dc8 | ||
|
|
9d8d30f5fe | ||
|
|
e8292c7849 | ||
|
|
c101242d73 | ||
|
|
86b04fb0e4 | ||
|
|
51de6a802f | ||
|
|
e1d2bc5ddf | ||
|
|
fa08c064f2 | ||
|
|
d4f57b81db | ||
|
|
8214744c54 | ||
|
|
a3b3e07cec | ||
|
|
d795ead7df | ||
|
|
bc660fcf8c | ||
|
|
e7716806b8 | ||
|
|
cb7faf53f6 | ||
|
|
4a01ec7986 | ||
|
|
762dbaeeb7 | ||
|
|
d4bfd40513 | ||
|
|
82ce3131fa | ||
|
|
4ba244037a | ||
|
|
c098b253f6 | ||
|
|
b296f2676c | ||
|
|
0d0f998f28 | ||
|
|
e817992b3d | ||
|
|
49021ad7f5 | ||
|
|
56b8418884 | ||
|
|
f824adbf9b | ||
|
|
8d9ed0b40e | ||
|
|
2a9bba1c35 | ||
|
|
5c9d95388f | ||
|
|
55c72b9aa6 | ||
|
|
be8b74c09c | ||
|
|
65b1807594 | ||
|
|
2bcad51735 | ||
|
|
bd8d52d614 | ||
|
|
4deb1ac80b | ||
|
|
2c3520bccc | ||
|
|
0f99b63108 | ||
|
|
e8b34a2aaa | ||
|
|
3c0d130c56 | ||
|
|
df56d3936b | ||
|
|
751600b467 | ||
|
|
4ad2714825 | ||
|
|
025a9940e0 | ||
|
|
89036746af | ||
|
|
4799b0f4d9 | ||
|
|
1de304405b | ||
|
|
e9925c150b | ||
|
|
a5a1c19f3d | ||
|
|
a5b09996a2 | ||
|
|
d65d0c9607 | ||
|
|
0408560f78 | ||
|
|
9a0f1902d8 | ||
|
|
0c65e51f6d | ||
|
|
cd78b0c22d | ||
|
|
4d41234370 | ||
|
|
366883a76d | ||
|
|
b6f3dd5b69 | ||
|
|
ee7156af3a | ||
|
|
79105e57fc | ||
|
|
89893dd326 | ||
|
|
36a8987598 | ||
|
|
404b712ccd | ||
|
|
e36312243c | ||
|
|
24f9e43571 | ||
|
|
88634f4927 | ||
|
|
308547dc25 | ||
|
|
57391aa8d6 | ||
|
|
5973e2313a | ||
|
|
11a4b17771 | ||
|
|
684a773172 | ||
|
|
f1e8960cb4 | ||
|
|
138862ec00 | ||
|
|
0430e6625a | ||
|
|
3e0e84636c | ||
|
|
e24bc47be8 | ||
|
|
fc5bae6a19 | ||
|
|
a8428cd9ea | ||
|
|
87819cead8 | ||
|
|
9a5f54856d | ||
|
|
e549eca490 | ||
|
|
20f361b0e9 | ||
|
|
12c986850b | ||
|
|
befbb8b4a5 | ||
|
|
461cce50d3 | ||
|
|
7e72016db9 | ||
|
|
7c2a1b9bb7 | ||
|
|
563dcafdfe | ||
|
|
4bcff1af46 | ||
|
|
bc172e1162 | ||
|
|
6abdf6c312 | ||
|
|
57d4608536 | ||
|
|
700bccb48b | ||
|
|
e113c555ef | ||
|
|
ed554bd623 | ||
|
|
3fd960a2d2 | ||
|
|
5767038bb9 | ||
|
|
f9d5614349 | ||
|
|
9029d5d6a6 | ||
|
|
d958b976dc | ||
|
|
7482df244c | ||
|
|
2cefb791de | ||
|
|
703bc820da | ||
|
|
250eb55355 | ||
|
|
5f2fdb88c3 | ||
|
|
a073c66b2a | ||
|
|
691ac2e494 | ||
|
|
d6233df2fd | ||
|
|
a78cac18f0 | ||
|
|
46c2e16272 | ||
|
|
3998c927e6 | ||
|
|
b9bbe2d606 | ||
|
|
e4f82d6949 | ||
|
|
be63ca505c | ||
|
|
69a0a4689a | ||
|
|
f9848b399e | ||
|
|
d37d7829f6 | ||
|
|
6ac9fc7e8e | ||
|
|
4103cca585 | ||
|
|
52d291229a | ||
|
|
23e160363e | ||
|
|
8308092ba6 | ||
|
|
4a7314f4b5 | ||
|
|
366cd98114 | ||
|
|
8d9b4c6dc8 | ||
|
|
eee8c708a0 | ||
|
|
fd8b26e79b | ||
|
|
fd90298f66 | ||
|
|
28cc49a08b | ||
|
|
162eb1e32a | ||
|
|
a1a97f41b2 | ||
|
|
ab186bb61a | ||
|
|
ff8133afe9 | ||
|
|
ac7c76bfc7 | ||
|
|
d429afad01 | ||
|
|
d05fceb045 | ||
|
|
78d13343a2 | ||
|
|
a953717405 | ||
|
|
b5e604ae77 | ||
|
|
ceb635d11f | ||
|
|
34710081e2 | ||
|
|
4f0ceda83a | ||
|
|
654d389318 | ||
|
|
685e64b1e3 | ||
|
|
02619f4624 | ||
|
|
8e81fd40bf | ||
|
|
52e98c0622 | ||
|
|
ceaec5c11a | ||
|
|
39f10b5b8a | ||
|
|
64ccee938f | ||
|
|
e76c3643d1 | ||
|
|
caa31633f2 | ||
|
|
04aff1f5fa | ||
|
|
9ace329d8c | ||
|
|
b849f33370 | ||
|
|
42cbd34070 | ||
|
|
9762ef1fad | ||
|
|
5122fa879d | ||
|
|
c3aa7b8d0d | ||
|
|
e949a1676c | ||
|
|
948c1ebff4 | ||
|
|
e4b5f9d015 | ||
|
|
8aad228360 | ||
|
|
c992dad089 | ||
|
|
06c30cc68c | ||
|
|
f72882a05b | ||
|
|
b9d792527d | ||
|
|
12bc863d11 | ||
|
|
ba352fa8e7 | ||
|
|
d95215f691 | ||
|
|
553a89d488 | ||
|
|
ffd3158cb9 | ||
|
|
82478fb458 | ||
|
|
2a96432c79 | ||
|
|
93075ceec3 | ||
|
|
a023017ea9 | ||
|
|
794eacf375 | ||
|
|
a1559aa4a9 | ||
|
|
a82bad7182 | ||
|
|
61ddd48e03 | ||
|
|
abca38cf8c | ||
|
|
763babe7ac | ||
|
|
72d46cb780 | ||
|
|
0b21c947c0 | ||
|
|
c2dc67199a | ||
|
|
f018a9586e | ||
|
|
6007966519 | ||
|
|
efed72eb8f | ||
|
|
bcca54f232 | ||
|
|
76d48637f2 | ||
|
|
4821ae1424 | ||
|
|
e56cfd0877 | ||
|
|
6046c633c7 | ||
|
|
ed2b6b741f | ||
|
|
570dc010e8 | ||
|
|
1a8046c7f3 | ||
|
|
8c395e0c45 | ||
|
|
de22b302a6 | ||
|
|
928ff8c822 | ||
|
|
2f52385615 | ||
|
|
272cd56763 | ||
|
|
3839e215cc | ||
|
|
61f4d893e6 | ||
|
|
30ad6c3d9e | ||
|
|
78df51c1cc | ||
|
|
045d9f3939 | ||
|
|
a3c8eb3ab0 | ||
|
|
6078595fdf | ||
|
|
395cdfe20c | ||
|
|
a56a03b370 | ||
|
|
05053827ef | ||
|
|
fa5685c7f0 | ||
|
|
8c3591c19a | ||
|
|
557a8d2306 | ||
|
|
7351df07bb | ||
|
|
d15fdd879b | ||
|
|
80eb8d5395 | ||
|
|
0ddabac401 | ||
|
|
b882b63a68 | ||
|
|
8b0f3e6135 | ||
|
|
8601b9e70a | ||
|
|
bd94b4c175 | ||
|
|
755340a6bd | ||
|
|
4a8f20f6b9 | ||
|
|
460939e7d8 | ||
|
|
ceebdeb9fa | ||
|
|
47c8d615ed | ||
|
|
c232c5de9c | ||
|
|
2452b7d8af | ||
|
|
4ffed09d48 | ||
|
|
8b6a45a6ec | ||
|
|
2ccdcd5135 | ||
|
|
4f50fb3be5 | ||
|
|
02022337ac | ||
|
|
3dafabe4d9 | ||
|
|
40cd785140 | ||
|
|
10746dd61e | ||
|
|
1a077f8f6c | ||
|
|
05d809f630 | ||
|
|
d8c4c3a4eb | ||
|
|
b301568384 | ||
|
|
f5c8be0ac1 | ||
|
|
4504810aa1 | ||
|
|
dc021d495c | ||
|
|
f822fa3067 | ||
|
|
3ed798ed18 | ||
|
|
5b498ba405 | ||
|
|
7dde705407 | ||
|
|
c004a3e831 | ||
|
|
2f93e4319b | ||
|
|
c98f2c953c | ||
|
|
554c990fe8 | ||
|
|
abe5934e80 | ||
|
|
8879ae88d9 | ||
|
|
01e8d7f881 | ||
|
|
4759df8df7 | ||
|
|
554005d264 | ||
|
|
fb29452d38 | ||
|
|
13884cb7d7 | ||
|
|
ac64986f93 | ||
|
|
cdcacf8b4f | ||
|
|
8b7fcbfac2 | ||
|
|
e24bd8fed4 | ||
|
|
8f2682add5 | ||
|
|
8f5ae1a11a | ||
|
|
99d337aef5 | ||
|
|
997d889b5d | ||
|
|
1500a13138 | ||
|
|
0d318c9f1a | ||
|
|
d6286c6fdb | ||
|
|
9c74de20cb | ||
|
|
49a6ae4966 | ||
|
|
3366e3bbdd | ||
|
|
844fc3cd56 | ||
|
|
5a201efe22 | ||
|
|
c9f92aa25f | ||
|
|
03e7845b7b | ||
|
|
54f10077ea | ||
|
|
20d1a9b175 | ||
|
|
c28d79bb2e | ||
|
|
241e8b42b2 | ||
|
|
8bc67d15e2 | ||
|
|
e106171cbb | ||
|
|
231f9f1b59 | ||
|
|
47334bf4e9 | ||
|
|
e763762131 |
@@ -1,4 +0,0 @@
|
||||
**/webpack.config.js
|
||||
lib/**
|
||||
src/testdata/**
|
||||
tests/**
|
||||
2
.github/actions/check-sarif/action.yml
vendored
2
.github/actions/check-sarif/action.yml
vendored
@@ -16,5 +16,5 @@ inputs:
|
||||
Comma separated list of query ids that should NOT be included in this SARIF file.
|
||||
|
||||
runs:
|
||||
using: node20
|
||||
using: node16
|
||||
main: index.js
|
||||
|
||||
44
.github/workflows/__go-custom-queries.yml
generated
vendored
44
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -28,53 +28,9 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.13.5
|
||||
- os: macos-12
|
||||
version: stable-v2.13.5
|
||||
- os: windows-latest
|
||||
version: stable-v2.13.5
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.14.6
|
||||
- os: macos-12
|
||||
version: stable-v2.14.6
|
||||
- os: windows-latest
|
||||
version: stable-v2.14.6
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.15.5
|
||||
- os: macos-latest
|
||||
version: stable-v2.15.5
|
||||
- os: windows-latest
|
||||
version: stable-v2.15.5
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.16.6
|
||||
- os: macos-latest
|
||||
version: stable-v2.16.6
|
||||
- os: windows-latest
|
||||
version: stable-v2.16.6
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.17.6
|
||||
- os: macos-latest
|
||||
version: stable-v2.17.6
|
||||
- os: windows-latest
|
||||
version: stable-v2.17.6
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
- os: macos-latest
|
||||
version: default
|
||||
- os: windows-latest
|
||||
version: default
|
||||
- os: ubuntu-latest
|
||||
version: linked
|
||||
- os: macos-latest
|
||||
version: linked
|
||||
- os: windows-latest
|
||||
version: linked
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Go: Custom queries'
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
44
.github/workflows/__remote-config.yml
generated
vendored
44
.github/workflows/__remote-config.yml
generated
vendored
@@ -28,53 +28,9 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.13.5
|
||||
- os: macos-12
|
||||
version: stable-v2.13.5
|
||||
- os: windows-latest
|
||||
version: stable-v2.13.5
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.14.6
|
||||
- os: macos-12
|
||||
version: stable-v2.14.6
|
||||
- os: windows-latest
|
||||
version: stable-v2.14.6
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.15.5
|
||||
- os: macos-latest
|
||||
version: stable-v2.15.5
|
||||
- os: windows-latest
|
||||
version: stable-v2.15.5
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.16.6
|
||||
- os: macos-latest
|
||||
version: stable-v2.16.6
|
||||
- os: windows-latest
|
||||
version: stable-v2.16.6
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.17.6
|
||||
- os: macos-latest
|
||||
version: stable-v2.17.6
|
||||
- os: windows-latest
|
||||
version: stable-v2.17.6
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
- os: macos-latest
|
||||
version: default
|
||||
- os: windows-latest
|
||||
version: default
|
||||
- os: ubuntu-latest
|
||||
version: linked
|
||||
- os: macos-latest
|
||||
version: linked
|
||||
- os: windows-latest
|
||||
version: linked
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: Remote config file
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
12
.github/workflows/__unset-environment.yml
generated
vendored
12
.github/workflows/__unset-environment.yml
generated
vendored
@@ -27,18 +27,6 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.13.5
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.14.6
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.15.5
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.16.6
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.17.6
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
- os: ubuntu-latest
|
||||
version: linked
|
||||
- os: ubuntu-latest
|
||||
|
||||
130
.github/workflows/__zstd-bundle-fallback.yml
generated
vendored
Normal file
130
.github/workflows/__zstd-bundle-fallback.yml
generated
vendored
Normal file
@@ -0,0 +1,130 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Zstandard bundle fallback
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v*
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
zstd-bundle-fallback:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-latest
|
||||
version: linked
|
||||
- os: windows-latest
|
||||
version: linked
|
||||
- os: ubuntu-latest
|
||||
version: linked
|
||||
name: Zstandard bundle fallback
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Setup Python on MacOS
|
||||
uses: actions/setup-python@v5
|
||||
if: >-
|
||||
runner.os == 'macOS' && (
|
||||
|
||||
matrix.version == 'stable-v2.13.5' ||
|
||||
|
||||
matrix.version == 'stable-v2.14.6')
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/actions/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Remove CodeQL from toolcache
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const codeqlPath = path.join(process.env['RUNNER_TOOL_CACHE'], 'CodeQL');
|
||||
fs.rmdirSync(codeqlPath, { recursive: true });
|
||||
- id: init
|
||||
uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: zstd-bundle.sarif
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
retention-days: 7
|
||||
- name: Check expected diagnostics
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
|
||||
const sarif = JSON.parse(fs.readFileSync(process.env['SARIF_PATH'], 'utf8'));
|
||||
const run = sarif.runs[0];
|
||||
|
||||
const toolExecutionNotifications = run.invocations[0].toolExecutionNotifications;
|
||||
const downloadTelemetryNotifications = toolExecutionNotifications.filter(n =>
|
||||
n.descriptor.id === 'codeql-action/bundle-download-telemetry'
|
||||
);
|
||||
if (downloadTelemetryNotifications.length !== 1) {
|
||||
core.setFailed(
|
||||
'Expected exactly one reporting descriptor in the ' +
|
||||
`'runs[].invocations[].toolExecutionNotifications[]' SARIF property, but found ` +
|
||||
`${downloadTelemetryNotifications.length}. All notification reporting descriptors: ` +
|
||||
`${JSON.stringify(toolExecutionNotifications)}.`
|
||||
);
|
||||
}
|
||||
|
||||
const toolsUrl = downloadTelemetryNotifications[0].properties.attributes.toolsUrl;
|
||||
console.log(`Found tools URL: ${toolsUrl}`);
|
||||
|
||||
if (!toolsUrl.endsWith('.tar.gz')) {
|
||||
core.setFailed(
|
||||
`Expected the tools URL to be a .tar.gz file, but found '${toolsUrl}'.`
|
||||
);
|
||||
}
|
||||
|
||||
const zstdFailureReason = downloadTelemetryNotifications[0].properties.attributes.zstdFailureReason;
|
||||
console.log(`Found zstd failure reason: ${zstdFailureReason}`);
|
||||
|
||||
const expectedZstdFailureReason = 'Failing since CODEQL_ACTION_FORCE_ZSTD_FAILURE is true.';
|
||||
if (zstdFailureReason !== expectedZstdFailureReason) {
|
||||
core.setFailed(
|
||||
`Expected the zstd failure reason to be '${expectedZstdFailureReason}', but found '${zstdFailureReason}'.`
|
||||
);
|
||||
}
|
||||
env:
|
||||
CODEQL_ACTION_ZSTD_BUNDLE: true
|
||||
CODEQL_ACTION_FORCE_ZSTD_FAILURE: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
119
.github/workflows/__zstd-bundle.yml
generated
vendored
Normal file
119
.github/workflows/__zstd-bundle.yml
generated
vendored
Normal file
@@ -0,0 +1,119 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Zstandard bundle
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v*
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
zstd-bundle:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-latest
|
||||
version: linked
|
||||
- os: windows-latest
|
||||
version: linked
|
||||
- os: ubuntu-latest
|
||||
version: linked
|
||||
name: Zstandard bundle
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Setup Python on MacOS
|
||||
uses: actions/setup-python@v5
|
||||
if: >-
|
||||
runner.os == 'macOS' && (
|
||||
|
||||
matrix.version == 'stable-v2.13.5' ||
|
||||
|
||||
matrix.version == 'stable-v2.14.6')
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/actions/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Remove CodeQL from toolcache
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const codeqlPath = path.join(process.env['RUNNER_TOOL_CACHE'], 'CodeQL');
|
||||
fs.rmdirSync(codeqlPath, { recursive: true });
|
||||
- id: init
|
||||
uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: zstd-bundle.sarif
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
retention-days: 7
|
||||
- name: Check diagnostic with expected tools URL appears in SARIF
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
|
||||
const sarif = JSON.parse(fs.readFileSync(process.env['SARIF_PATH'], 'utf8'));
|
||||
const run = sarif.runs[0];
|
||||
|
||||
const toolExecutionNotifications = run.invocations[0].toolExecutionNotifications;
|
||||
const downloadTelemetryNotifications = toolExecutionNotifications.filter(n =>
|
||||
n.descriptor.id === 'codeql-action/bundle-download-telemetry'
|
||||
);
|
||||
if (downloadTelemetryNotifications.length !== 1) {
|
||||
core.setFailed(
|
||||
'Expected exactly one reporting descriptor in the ' +
|
||||
`'runs[].invocations[].toolExecutionNotifications[]' SARIF property, but found ` +
|
||||
`${downloadTelemetryNotifications.length}. All notification reporting descriptors: ` +
|
||||
`${JSON.stringify(toolExecutionNotifications)}.`
|
||||
);
|
||||
}
|
||||
|
||||
const toolsUrl = downloadTelemetryNotifications[0].properties.attributes.toolsUrl;
|
||||
console.log(`Found tools URL: ${toolsUrl}`);
|
||||
|
||||
if (!toolsUrl.endsWith('.tar.zst')) {
|
||||
core.setFailed(
|
||||
`Expected the tools URL to be a .tar.zst file, but found ${toolsUrl}.`
|
||||
);
|
||||
}
|
||||
env:
|
||||
CODEQL_ACTION_ZSTD_BUNDLE: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
14
.github/workflows/pr-checks.yml
vendored
14
.github/workflows/pr-checks.yml
vendored
@@ -17,14 +17,23 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-types-version: [16.11, current] # run tests on 16.11 while CodeQL Action v2 is still supported
|
||||
node-types-version: [16.11, current] # we backport this matrix job in order to maintain the same check names
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Lint
|
||||
run: npm run-script lint
|
||||
id: lint
|
||||
run: npm run-script lint-ci
|
||||
|
||||
- name: Upload sarif
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
# Only upload SARIF for the latest version of Node.js
|
||||
if: "!cancelled() && matrix.node-types-version == 'current' && !startsWith(github.head_ref, 'dependabot/')"
|
||||
with:
|
||||
sarif_file: eslint.sarif
|
||||
category: eslint
|
||||
|
||||
- name: Update version of @types/node
|
||||
if: matrix.node-types-version != 'current'
|
||||
@@ -49,6 +58,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Check generated JS
|
||||
if: matrix.node-types-version != 'current' # we do not need to test the newer node on the v2 branch
|
||||
run: .github/workflows/script/check-js.sh
|
||||
|
||||
check-node-modules:
|
||||
|
||||
3
.github/workflows/rebuild.yml
vendored
3
.github/workflows/rebuild.yml
vendored
@@ -69,7 +69,8 @@ jobs:
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git commit -am "Rebuild"
|
||||
git add --all
|
||||
git commit -m "Rebuild"
|
||||
git push origin "HEAD:$BRANCH"
|
||||
echo "Pushed a commit to rebuild the Action." \
|
||||
"Please mark the PR as ready for review to trigger PR checks." |
|
||||
|
||||
12
.github/workflows/update-release-branch.yml
vendored
12
.github/workflows/update-release-branch.yml
vendored
@@ -104,6 +104,7 @@ jobs:
|
||||
backport:
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
environment: Automation
|
||||
needs: [prepare]
|
||||
if: ${{ (github.event_name == 'push') && needs.prepare.outputs.backport_target_branches != '[]' }}
|
||||
strategy:
|
||||
@@ -114,9 +115,18 @@ jobs:
|
||||
SOURCE_BRANCH: ${{ needs.prepare.outputs.backport_source_branch }}
|
||||
TARGET_BRANCH: ${{ matrix.target_branch }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Generate token
|
||||
uses: actions/create-github-app-token@5d869da34e18e7287c1daad50e0b8ea0f506ce69
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.AUTOMATION_APP_ID }}
|
||||
private-key: ${{ secrets.AUTOMATION_PRIVATE_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Need full history for calculation of diffs
|
||||
token: ${{ steps.app-token.outputs.token }}
|
||||
- uses: ./.github/actions/release-initialise
|
||||
|
||||
- name: Update older release branch
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -5,3 +5,7 @@ node_modules/.cache/
|
||||
*.class
|
||||
# macOS
|
||||
.DS_Store
|
||||
# eslint sarif report
|
||||
eslint.sarif
|
||||
# for local incremental compilation
|
||||
tsconfig.tsbuildinfo
|
||||
97
CHANGELOG.md
97
CHANGELOG.md
@@ -4,194 +4,203 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th
|
||||
|
||||
Note that the only difference between `v2` and `v3` of the CodeQL Action is the node version they support, with `v3` running on node 20 while we continue to release `v2` to support running on node 16. For example `3.22.11` was the first `v3` release and is functionally identical to `2.22.11`. This approach ensures an easy way to track exactly which features are included in different versions, indicated by the minor and patch version numbers.
|
||||
|
||||
## [UNRELEASED]
|
||||
## 2.26.10 - 30 Sep 2024
|
||||
|
||||
- We are rolling out a feature in September/October 2024 that sets up CodeQL using a bundle compressed with [Zstandard](http://facebook.github.io/zstd/). Our aim is to improve the performance of setting up CodeQL. [#2502](https://github.com/github/codeql-action/pull/2502)
|
||||
|
||||
## 2.26.9 - 24 Sep 2024
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 3.26.6 - 29 Aug 2024
|
||||
## 2.26.8 - 19 Sep 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.19.0. [#2483](https://github.com/github/codeql-action/pull/2483)
|
||||
|
||||
## 2.26.7 - 13 Sep 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.18.4. [#2471](https://github.com/github/codeql-action/pull/2471)
|
||||
|
||||
## 2.26.6 - 29 Aug 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.18.3. [#2449](https://github.com/github/codeql-action/pull/2449)
|
||||
|
||||
## 3.26.5 - 23 Aug 2024
|
||||
## 2.26.5 - 23 Aug 2024
|
||||
|
||||
- Fix an issue where the `csrutil` system call used for telemetry would fail on MacOS ARM machines with System Integrity Protection disabled. [#2441](https://github.com/github/codeql-action/pull/2441)
|
||||
|
||||
## 3.26.4 - 21 Aug 2024
|
||||
## 2.26.4 - 21 Aug 2024
|
||||
|
||||
- _Deprecation:_ The `add-snippets` input on the `analyze` Action is deprecated and will be removed in the first release in August 2025. [#2436](https://github.com/github/codeql-action/pull/2436)
|
||||
- Fix an issue where the disk usage system call used for telemetry would fail on MacOS ARM machines with System Integrity Protection disabled, and then surface a warning. The system call is now disabled for these machines. [#2434](https://github.com/github/codeql-action/pull/2434)
|
||||
|
||||
## 3.26.3 - 19 Aug 2024
|
||||
## 2.26.3 - 19 Aug 2024
|
||||
|
||||
- Fix an issue where the CodeQL Action could not write diagnostic messages on Windows. This issue did not impact analysis quality. [#2430](https://github.com/github/codeql-action/pull/2430)
|
||||
|
||||
## 3.26.2 - 14 Aug 2024
|
||||
## 2.26.2 - 14 Aug 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.18.2. [#2417](https://github.com/github/codeql-action/pull/2417)
|
||||
|
||||
## 3.26.1 - 13 Aug 2024
|
||||
## 2.26.1 - 13 Aug 2024
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 3.26.0 - 06 Aug 2024
|
||||
## 2.26.0 - 06 Aug 2024
|
||||
|
||||
- _Deprecation:_ Swift analysis on Ubuntu runner images is no longer supported. Please migrate to a macOS runner if this affects you. [#2403](https://github.com/github/codeql-action/pull/2403)
|
||||
- Bump the minimum CodeQL bundle version to 2.13.5. [#2408](https://github.com/github/codeql-action/pull/2408)
|
||||
|
||||
## 3.25.15 - 26 Jul 2024
|
||||
## 2.25.15 - 26 Jul 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.18.1. [#2385](https://github.com/github/codeql-action/pull/2385)
|
||||
|
||||
## 3.25.14 - 25 Jul 2024
|
||||
## 2.25.14 - 25 Jul 2024
|
||||
|
||||
- Experimental: add a new `start-proxy` action which starts the same HTTP proxy as used by [`github/dependabot-action`](https://github.com/github/dependabot-action). Do not use this in production as it is part of an internal experiment and subject to change at any time. [#2376](https://github.com/github/codeql-action/pull/2376)
|
||||
|
||||
## 3.25.13 - 19 Jul 2024
|
||||
## 2.25.13 - 19 Jul 2024
|
||||
|
||||
- Add `codeql-version` to outputs. [#2368](https://github.com/github/codeql-action/pull/2368)
|
||||
- Add a deprecation warning for customers using CodeQL version 2.13.4 and earlier. These versions of CodeQL were discontinued on 9 July 2024 alongside GitHub Enterprise Server 3.9, and will be unsupported by CodeQL Action versions 3.26.0 and later and versions 2.26.0 and later. [#2375](https://github.com/github/codeql-action/pull/2375)
|
||||
- If you are using one of these versions, please update to CodeQL CLI version 2.13.5 or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version.
|
||||
- Alternatively, if you want to continue using a version of the CodeQL CLI between 2.12.6 and 2.13.4, you can replace `github/codeql-action/*@v3` by `github/codeql-action/*@v3.25.13` and `github/codeql-action/*@v2` by `github/codeql-action/*@v2.25.13` in your code scanning workflow to ensure you continue using this version of the CodeQL Action.
|
||||
|
||||
## 3.25.12 - 12 Jul 2024
|
||||
## 2.25.12 - 12 Jul 2024
|
||||
|
||||
- Improve the reliability and performance of analyzing code when analyzing a compiled language with the `autobuild` [build mode](https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages#codeql-build-modes) on GitHub Enterprise Server. This feature is already available to GitHub.com users. [#2353](https://github.com/github/codeql-action/pull/2353)
|
||||
- Update default CodeQL bundle version to 2.18.0. [#2364](https://github.com/github/codeql-action/pull/2364)
|
||||
|
||||
## 3.25.11 - 28 Jun 2024
|
||||
## 2.25.11 - 28 Jun 2024
|
||||
|
||||
- Avoid failing the workflow run if there is an error while uploading debug artifacts. [#2349](https://github.com/github/codeql-action/pull/2349)
|
||||
- Update default CodeQL bundle version to 2.17.6. [#2352](https://github.com/github/codeql-action/pull/2352)
|
||||
|
||||
## 3.25.10 - 13 Jun 2024
|
||||
## 2.25.10 - 13 Jun 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.17.5. [#2327](https://github.com/github/codeql-action/pull/2327)
|
||||
|
||||
## 3.25.9 - 12 Jun 2024
|
||||
## 2.25.9 - 12 Jun 2024
|
||||
|
||||
- Avoid failing database creation if the database folder already exists and contains some unexpected files. Requires CodeQL 2.18.0 or higher. [#2330](https://github.com/github/codeql-action/pull/2330)
|
||||
- The init Action will attempt to clean up the database cluster directory before creating a new database and at the end of the job. This will help to avoid issues where the database cluster directory is left in an inconsistent state. [#2332](https://github.com/github/codeql-action/pull/2332)
|
||||
|
||||
## 3.25.8 - 04 Jun 2024
|
||||
## 2.25.8 - 04 Jun 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.17.4. [#2321](https://github.com/github/codeql-action/pull/2321)
|
||||
|
||||
## 3.25.7 - 31 May 2024
|
||||
## 2.25.7 - 31 May 2024
|
||||
|
||||
- We are rolling out a feature in May/June 2024 that will reduce the Actions cache usage of the Action by keeping only the newest TRAP cache for each language. [#2306](https://github.com/github/codeql-action/pull/2306)
|
||||
|
||||
## 3.25.6 - 20 May 2024
|
||||
## 2.25.6 - 20 May 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.17.3. [#2295](https://github.com/github/codeql-action/pull/2295)
|
||||
|
||||
## 3.25.5 - 13 May 2024
|
||||
## 2.25.5 - 13 May 2024
|
||||
|
||||
- Add a compatibility matrix of supported CodeQL Action, CodeQL CLI, and GitHub Enterprise Server versions to the [README.md](README.md). [#2273](https://github.com/github/codeql-action/pull/2273)
|
||||
- Avoid printing out a warning for a missing `on.push` trigger when the CodeQL Action is triggered via a `workflow_call` event. [#2274](https://github.com/github/codeql-action/pull/2274)
|
||||
- The `tools: latest` input to the `init` Action has been renamed to `tools: linked`. This option specifies that the Action should use the tools shipped at the same time as the Action. The old name will continue to work for backwards compatibility, but we recommend that new workflows use the new name. [#2281](https://github.com/github/codeql-action/pull/2281)
|
||||
|
||||
## 3.25.4 - 08 May 2024
|
||||
## 2.25.4 - 08 May 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.17.2. [#2270](https://github.com/github/codeql-action/pull/2270)
|
||||
|
||||
## 3.25.3 - 25 Apr 2024
|
||||
## 2.25.3 - 25 Apr 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.17.1. [#2247](https://github.com/github/codeql-action/pull/2247)
|
||||
- Workflows running on `macos-latest` using CodeQL CLI versions before v2.15.1 will need to either upgrade their CLI version to v2.15.1 or newer, or change the platform to an Intel MacOS runner, such as `macos-12`. ARM machines with SIP disabled, including the newest `macos-latest` image, are unsupported for CLI versions before 2.15.1. [#2261](https://github.com/github/codeql-action/pull/2261)
|
||||
|
||||
## 3.25.2 - 22 Apr 2024
|
||||
## 2.25.2 - 22 Apr 2024
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 3.25.1 - 17 Apr 2024
|
||||
## 2.25.1 - 17 Apr 2024
|
||||
|
||||
- We are rolling out a feature in April/May 2024 that improves the reliability and performance of analyzing code when analyzing a compiled language with the `autobuild` [build mode](https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages#codeql-build-modes). [#2235](https://github.com/github/codeql-action/pull/2235)
|
||||
- Fix a bug where the `init` Action would fail if `--overwrite` was specified in `CODEQL_ACTION_EXTRA_OPTIONS`. [#2245](https://github.com/github/codeql-action/pull/2245)
|
||||
|
||||
## 3.25.0 - 15 Apr 2024
|
||||
## 2.25.0 - 15 Apr 2024
|
||||
|
||||
- The deprecated feature for extracting dependencies for a Python analysis has been removed. [#2224](https://github.com/github/codeql-action/pull/2224)
|
||||
|
||||
As a result, the following inputs and environment variables are now ignored:
|
||||
|
||||
- The `setup-python-dependencies` input to the `init` Action
|
||||
- The `CODEQL_ACTION_DISABLE_PYTHON_DEPENDENCY_INSTALLATION` environment variable
|
||||
|
||||
We recommend removing any references to these from your workflows. For more information, see the release notes for CodeQL Action v3.23.0 and v2.23.0.
|
||||
- Automatically overwrite an existing database if found on the filesystem. [#2229](https://github.com/github/codeql-action/pull/2229)
|
||||
- Bump the minimum CodeQL bundle version to 2.12.6. [#2232](https://github.com/github/codeql-action/pull/2232)
|
||||
- A more relevant log message and a diagnostic are now emitted when the `file` program is not installed on a Linux runner, but is required for Go tracing to succeed. [#2234](https://github.com/github/codeql-action/pull/2234)
|
||||
|
||||
## 3.24.10 - 05 Apr 2024
|
||||
## 2.24.10 - 05 Apr 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.17.0. [#2219](https://github.com/github/codeql-action/pull/2219)
|
||||
- Add a deprecation warning for customers using CodeQL version 2.12.5 and earlier. These versions of CodeQL were discontinued on 26 March 2024 alongside GitHub Enterprise Server 3.8, and will be unsupported by CodeQL Action versions 3.25.0 and later and versions 2.25.0 and later. [#2220](https://github.com/github/codeql-action/pull/2220)
|
||||
- If you are using one of these versions, please update to CodeQL CLI version 2.12.6 or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version.
|
||||
- Alternatively, if you want to continue using a version of the CodeQL CLI between 2.11.6 and 2.12.5, you can replace `github/codeql-action/*@v3` by `github/codeql-action/*@v3.24.10` and `github/codeql-action/*@v2` by `github/codeql-action/*@v2.24.10` in your code scanning workflow to ensure you continue using this version of the CodeQL Action.
|
||||
|
||||
## 3.24.9 - 22 Mar 2024
|
||||
## 2.24.9 - 22 Mar 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.16.5. [#2203](https://github.com/github/codeql-action/pull/2203)
|
||||
|
||||
## 3.24.8 - 18 Mar 2024
|
||||
## 2.24.8 - 18 Mar 2024
|
||||
|
||||
- Improve the ease of debugging extraction issues by increasing the verbosity of the extractor logs when running in debug mode. [#2195](https://github.com/github/codeql-action/pull/2195)
|
||||
|
||||
## 3.24.7 - 12 Mar 2024
|
||||
## 2.24.7 - 12 Mar 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.16.4. [#2185](https://github.com/github/codeql-action/pull/2185)
|
||||
|
||||
## 3.24.6 - 29 Feb 2024
|
||||
## 2.24.6 - 29 Feb 2024
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 3.24.5 - 23 Feb 2024
|
||||
## 2.24.5 - 23 Feb 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.16.3. [#2156](https://github.com/github/codeql-action/pull/2156)
|
||||
|
||||
## 3.24.4 - 21 Feb 2024
|
||||
## 2.24.4 - 21 Feb 2024
|
||||
|
||||
- Fix an issue where an existing, but empty, `/sys/fs/cgroup/cpuset.cpus` file always resulted in a single-threaded run. [#2151](https://github.com/github/codeql-action/pull/2151)
|
||||
|
||||
## 3.24.3 - 15 Feb 2024
|
||||
## 2.24.3 - 15 Feb 2024
|
||||
|
||||
- Fix an issue where the CodeQL Action would fail to load a configuration specified by the `config` input to the `init` Action. [#2147](https://github.com/github/codeql-action/pull/2147)
|
||||
|
||||
## 3.24.2 - 15 Feb 2024
|
||||
## 2.24.2 - 15 Feb 2024
|
||||
|
||||
- Enable improved multi-threaded performance on larger runners for GitHub Enterprise Server users. This feature is already available to GitHub.com users. [#2141](https://github.com/github/codeql-action/pull/2141)
|
||||
|
||||
## 3.24.1 - 13 Feb 2024
|
||||
## 2.24.1 - 13 Feb 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.16.2. [#2124](https://github.com/github/codeql-action/pull/2124)
|
||||
- The CodeQL action no longer fails if it can't write to the telemetry api endpoint. [#2121](https://github.com/github/codeql-action/pull/2121)
|
||||
|
||||
## 3.24.0 - 02 Feb 2024
|
||||
## 2.24.0 - 02 Feb 2024
|
||||
|
||||
- CodeQL Python analysis will no longer install dependencies on GitHub Enterprise Server, as is already the case for GitHub.com. See [release notes for 3.23.0](#3230---08-jan-2024) for more details. [#2106](https://github.com/github/codeql-action/pull/2106)
|
||||
|
||||
## 3.23.2 - 26 Jan 2024
|
||||
## 2.23.2 - 26 Jan 2024
|
||||
|
||||
- On Linux, the maximum possible value for the `--threads` option now respects the CPU count as specified in `cgroup` files to more accurately reflect the number of available cores when running in containers. [#2083](https://github.com/github/codeql-action/pull/2083)
|
||||
- Update default CodeQL bundle version to 2.16.1. [#2096](https://github.com/github/codeql-action/pull/2096)
|
||||
|
||||
## 3.23.1 - 17 Jan 2024
|
||||
## 2.23.1 - 17 Jan 2024
|
||||
|
||||
- Update default CodeQL bundle version to 2.16.0. [#2073](https://github.com/github/codeql-action/pull/2073)
|
||||
- Change the retention period for uploaded debug artifacts to 7 days. Previously, this was whatever the repository default was. [#2079](https://github.com/github/codeql-action/pull/2079)
|
||||
|
||||
## 3.23.0 - 08 Jan 2024
|
||||
## 2.23.0 - 08 Jan 2024
|
||||
|
||||
- We are rolling out a feature in January 2024 that will disable Python dependency installation by default for all users. This improves the speed of analysis while having only a very minor impact on results. You can override this behavior by setting `CODEQL_ACTION_DISABLE_PYTHON_DEPENDENCY_INSTALLATION=false` in your workflow, however we plan to remove this ability in future versions of the CodeQL Action. [#2031](https://github.com/github/codeql-action/pull/2031)
|
||||
- The CodeQL Action now requires CodeQL version 2.11.6 or later. For more information, see [the corresponding changelog entry for CodeQL Action version 2.22.7](#2227---16-nov-2023). [#2009](https://github.com/github/codeql-action/pull/2009)
|
||||
|
||||
## 3.22.12 - 22 Dec 2023
|
||||
## 2.22.12 - 22 Dec 2023
|
||||
|
||||
- Update default CodeQL bundle version to 2.15.5. [#2047](https://github.com/github/codeql-action/pull/2047)
|
||||
|
||||
## 3.22.11 - 13 Dec 2023
|
||||
## 2.22.11 - 13 Dec 2023
|
||||
|
||||
- [v3+ only] The CodeQL Action now runs on Node.js v20. [#2006](https://github.com/github/codeql-action/pull/2006)
|
||||
No user facing changes.
|
||||
|
||||
## 2.22.10 - 12 Dec 2023
|
||||
|
||||
|
||||
@@ -74,7 +74,7 @@ inputs:
|
||||
required: true
|
||||
default: "true"
|
||||
token:
|
||||
description: "GitHub token to use for authenticating with this instance of GitHub. The token needs the `security-events: write` permission."
|
||||
description: "GitHub token to use for authenticating with this instance of GitHub. The token must be the built-in GitHub Actions token, and the workflow must have the `security-events: write` permission. Most of the time it is advisable to avoid specifying this input so that the workflow falls back to using the default value."
|
||||
required: false
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
@@ -91,6 +91,6 @@ outputs:
|
||||
sarif-id:
|
||||
description: The ID of the uploaded SARIF file.
|
||||
runs:
|
||||
using: node20
|
||||
using: node16
|
||||
main: "../lib/analyze-action.js"
|
||||
post: "../lib/analyze-action-post.js"
|
||||
|
||||
@@ -15,5 +15,5 @@ inputs:
|
||||
$GITHUB_WORKSPACE as its working directory.
|
||||
required: false
|
||||
runs:
|
||||
using: node20
|
||||
using: node16
|
||||
main: '../lib/autobuild-action.js'
|
||||
|
||||
@@ -142,6 +142,6 @@ outputs:
|
||||
codeql-version:
|
||||
description: The version of the CodeQL binary used for analysis
|
||||
runs:
|
||||
using: node20
|
||||
using: node16
|
||||
main: '../lib/init-action.js'
|
||||
post: '../lib/init-action-post.js'
|
||||
|
||||
44
lib/analyze-action-post-helper.js
generated
44
lib/analyze-action-post-helper.js
generated
@@ -1,44 +0,0 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.run = run;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const logging_1 = require("./logging");
|
||||
async function run(uploadSarifDebugArtifact) {
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||
if (config === undefined) {
|
||||
throw new Error("Config file could not be found at expected location. Did the 'init' action fail to start?");
|
||||
}
|
||||
// Upload Actions SARIF artifacts for debugging
|
||||
if (config?.debugMode) {
|
||||
core.info("Debug mode is on. Uploading available SARIF files as Actions debugging artifact...");
|
||||
const outputDir = actionsUtil.getRequiredInput("output");
|
||||
await uploadSarifDebugArtifact(config, outputDir);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=analyze-action-post-helper.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"analyze-action-post-helper.js","sourceRoot":"","sources":["../src/analyze-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAMA,kBAuBC;AA7BD,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAAmD;AACnD,uCAA6C;AAEtC,KAAK,UAAU,GAAG,CACvB,wBAGkB;IAElB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;QACzB,MAAM,IAAI,KAAK,CACb,2FAA2F,CAC5F,CAAC;IACJ,CAAC;IAED,+CAA+C;IAC/C,IAAI,MAAM,EAAE,SAAS,EAAE,CAAC;QACtB,IAAI,CAAC,IAAI,CACP,oFAAoF,CACrF,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,wBAAwB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IACpD,CAAC;AACH,CAAC"}
|
||||
73
lib/analyze-action-post-helper.test.js
generated
73
lib/analyze-action-post-helper.test.js
generated
@@ -1,73 +0,0 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyzeActionPostHelper = __importStar(require("./analyze-action-post-helper"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("post: analyze action with debug mode off", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
debugMode: false,
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
});
|
||||
const uploadSarifSpy = sinon.spy();
|
||||
await analyzeActionPostHelper.run(uploadSarifSpy);
|
||||
t.assert(uploadSarifSpy.notCalled);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("post: analyze action with debug mode on", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
debugMode: true,
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
});
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("output").returns("fake-output-dir");
|
||||
const uploadSarifSpy = sinon.spy();
|
||||
await analyzeActionPostHelper.run(uploadSarifSpy);
|
||||
t.assert(uploadSarifSpy.called);
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=analyze-action-post-helper.test.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"analyze-action-post-helper.test.js","sourceRoot":"","sources":["../src/analyze-action-post-helper.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,sFAAwE;AACxE,4DAA8C;AAC9C,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,0CAA0C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC3D,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QAEpC,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,KAAK;YAChB,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QAEpC,MAAM,cAAc,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;QAEnC,MAAM,uBAAuB,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;QAElD,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,SAAS,CAAC,CAAC;IACrC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QAEpC,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,IAAI;YACf,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QAEpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;QAEhE,MAAM,cAAc,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;QAEnC,MAAM,uBAAuB,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;QAElD,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;IAClC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
16
lib/analyze-action-post.js
generated
16
lib/analyze-action-post.js
generated
@@ -29,19 +29,21 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
* other `post:` hooks.
|
||||
*/
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const analyzeActionPostHelper = __importStar(require("./analyze-action-post-helper"));
|
||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||
const uploadSarifActionPostHelper = __importStar(require("./upload-sarif-action-post-helper"));
|
||||
const environment_1 = require("./environment");
|
||||
const logging_1 = require("./logging");
|
||||
const util_1 = require("./util");
|
||||
async function runWrapper() {
|
||||
try {
|
||||
await analyzeActionPostHelper.run(debugArtifacts.uploadSarifDebugArtifact);
|
||||
// Also run the upload-sarif post action since we're potentially running
|
||||
// the same steps in the analyze action.
|
||||
await uploadSarifActionPostHelper.uploadArtifacts(debugArtifacts.uploadDebugArtifacts);
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
// Upload SARIF artifacts if we determine that this is a first-party analysis run.
|
||||
// For third-party runs, this artifact will be uploaded in the `upload-sarif-post` step.
|
||||
if (process.env[environment_1.EnvVar.INIT_ACTION_HAS_RUN] === "true") {
|
||||
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger));
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`analyze post-action step failed: ${(0, util_1.wrapError)(error).message}`);
|
||||
core.setFailed(`analyze post-action step failed: ${(0, util_1.getErrorMessage)(error)}`);
|
||||
}
|
||||
}
|
||||
void runWrapper();
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,sFAAwE;AACxE,kEAAoD;AACpD,+FAAiF;AACjF,iCAAmC;AAEnC,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,uBAAuB,CAAC,GAAG,CAAC,cAAc,CAAC,wBAAwB,CAAC,CAAC;QAE3E,wEAAwE;QACxE,wCAAwC;QACxC,MAAM,2BAA2B,CAAC,eAAe,CAC/C,cAAc,CAAC,oBAAoB,CACpC,CAAC;IACJ,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,oCAAoC,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CAC/D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,kEAAoD;AACpD,+CAAuC;AACvC,uCAAwD;AACxD,iCAAyC;AAEzC,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAElC,kFAAkF;QAClF,wFAAwF;QACxF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CAAC,MAAM,CAAC,CACpD,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,oCAAoC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC7D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
3
lib/analyze-action.js
generated
3
lib/analyze-action.js
generated
@@ -163,6 +163,7 @@ async function run() {
|
||||
}
|
||||
const apiDetails = (0, api_client_1.getApiDetails)();
|
||||
const outputDir = actionsUtil.getRequiredInput("output");
|
||||
core.exportVariable(environment_1.EnvVar.SARIF_RESULTS_OUTPUT_DIR, outputDir);
|
||||
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
@@ -244,7 +245,7 @@ async function runWrapper() {
|
||||
await exports.runPromise;
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`analyze action failed: ${util.wrapError(error).message}`);
|
||||
core.setFailed(`analyze action failed: ${util.getErrorMessage(error)}`);
|
||||
}
|
||||
await util.checkForTimeout();
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
2
lib/analyze.js
generated
2
lib/analyze.js
generated
@@ -186,7 +186,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
}
|
||||
catch (e) {
|
||||
statusReport.analyze_failure_language = language;
|
||||
throw new CodeQLAnalysisError(statusReport, `Error running analysis for ${language}: ${util.wrapError(e).message}`, util.wrapError(e));
|
||||
throw new CodeQLAnalysisError(statusReport, `Error running analysis for ${language}: ${util.getErrorMessage(e)}`, util.wrapError(e));
|
||||
}
|
||||
}
|
||||
return statusReport;
|
||||
|
||||
File diff suppressed because one or more lines are too long
2
lib/autobuild-action.js
generated
2
lib/autobuild-action.js
generated
@@ -96,7 +96,7 @@ async function runWrapper() {
|
||||
await run();
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`autobuild action failed. ${(0, util_1.wrapError)(error).message}`);
|
||||
core.setFailed(`autobuild action failed. ${(0, util_1.getErrorMessage)(error)}`);
|
||||
}
|
||||
}
|
||||
void runWrapper();
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAIwB;AACxB,6CAAgD;AAChD,2CAAwE;AACxE,qCAAqC;AACrC,iDAAmD;AACnD,+CAAuC;AAEvC,uCAAqD;AACrD,mDAMyB;AACzB,mDAAuD;AACvD,iCAMgB;AAShB,KAAK,UAAU,yBAAyB,CACtC,MAA0B,EAC1B,MAAc,EACd,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAE1C,MAAM,MAAM,GAAG,IAAA,gCAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,SAAS,EACpB,MAAM,EACN,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,KAAK,EAAE,OAAO,EACd,KAAK,EAAE,KAAK,CACb,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,YAAY,GAA0B;YAC1C,GAAG,gBAAgB;YACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;YAC3C,iBAAiB,EAAE,eAAe;SACnC,CAAC;QACF,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,IAAI,MAA0B,CAAC;IAC/B,IAAI,eAAqC,CAAC;IAC1C,IAAI,SAAiC,CAAC;IACtC,IAAI,CAAC;QACH,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,SAAS,EACpB,UAAU,EACV,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;QACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;QAC3C,CAAC;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QACjD,IAAA,yBAAkB,EAAC,IAAA,+BAAgB,GAAE,EAAE,aAAa,CAAC,CAAC;QAEtD,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC1D,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;QACJ,CAAC;QAED,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;QAEjD,SAAS,GAAG,MAAM,IAAA,uCAA2B,EAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;QACtE,IAAI,SAAS,KAAK,SAAS,EAAE,CAAC;YAC5B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE,CAAC;gBACrB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;YAClC,CAAC;YACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;gBACjC,eAAe,GAAG,QAAQ,CAAC;gBAC3B,MAAM,IAAA,wBAAY,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;YAC/C,CAAC;QACH,CAAC;QAED,+FAA+F;QAC/F,oBAAoB;QACpB,MAAM,IAAA,oCAAoB,EAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;IACrD,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CACZ,kIAAkI,KAAK,CAAC,OAAO,EAAE,CAClJ,CAAC;QACF,MAAM,yBAAyB,CAC7B,MAAM,EACN,MAAM,EACN,SAAS,EACT,SAAS,IAAI,EAAE,EACf,eAAe,EACf,KAAK,CACN,CAAC;QACF,OAAO;IACT,CAAC;IAED,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,mCAAmC,EAAE,MAAM,CAAC,CAAC;IAExE,MAAM,yBAAyB,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,SAAS,IAAI,EAAE,CAAC,CAAC;AAC9E,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,EAAE,CAAC;IACd,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CAAC,4BAA4B,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;IACzE,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAIwB;AACxB,6CAAgD;AAChD,2CAAwE;AACxE,qCAAqC;AACrC,iDAAmD;AACnD,+CAAuC;AAEvC,uCAAqD;AACrD,mDAMyB;AACzB,mDAAuD;AACvD,iCAOgB;AAShB,KAAK,UAAU,yBAAyB,CACtC,MAA0B,EAC1B,MAAc,EACd,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAE1C,MAAM,MAAM,GAAG,IAAA,gCAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,SAAS,EACpB,MAAM,EACN,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,KAAK,EAAE,OAAO,EACd,KAAK,EAAE,KAAK,CACb,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,YAAY,GAA0B;YAC1C,GAAG,gBAAgB;YACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;YAC3C,iBAAiB,EAAE,eAAe;SACnC,CAAC;QACF,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,IAAI,MAA0B,CAAC;IAC/B,IAAI,eAAqC,CAAC;IAC1C,IAAI,SAAiC,CAAC;IACtC,IAAI,CAAC;QACH,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,SAAS,EACpB,UAAU,EACV,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;QACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;QAC3C,CAAC;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QACjD,IAAA,yBAAkB,EAAC,IAAA,+BAAgB,GAAE,EAAE,aAAa,CAAC,CAAC;QAEtD,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC1D,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;QACJ,CAAC;QAED,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;QAEjD,SAAS,GAAG,MAAM,IAAA,uCAA2B,EAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;QACtE,IAAI,SAAS,KAAK,SAAS,EAAE,CAAC;YAC5B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE,CAAC;gBACrB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;YAClC,CAAC;YACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;gBACjC,eAAe,GAAG,QAAQ,CAAC;gBAC3B,MAAM,IAAA,wBAAY,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;YAC/C,CAAC;QACH,CAAC;QAED,+FAA+F;QAC/F,oBAAoB;QACpB,MAAM,IAAA,oCAAoB,EAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;IACrD,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CACZ,kIAAkI,KAAK,CAAC,OAAO,EAAE,CAClJ,CAAC;QACF,MAAM,yBAAyB,CAC7B,MAAM,EACN,MAAM,EACN,SAAS,EACT,SAAS,IAAI,EAAE,EACf,eAAe,EACf,KAAK,CACN,CAAC;QACF,OAAO;IACT,CAAC;IAED,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,mCAAmC,EAAE,MAAM,CAAC,CAAC;IAExE,MAAM,yBAAyB,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,SAAS,IAAI,EAAE,CAAC,CAAC;AAC9E,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,EAAE,CAAC;IACd,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CAAC,4BAA4B,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAAC,CAAC;IACvE,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
12
lib/codeql.js
generated
12
lib/codeql.js
generated
@@ -122,9 +122,9 @@ const CODEQL_VERSION_CACHE_CLEANUP = "2.17.1";
|
||||
* version requirement. Must be set to true outside tests.
|
||||
* @returns a { CodeQL, toolsVersion } object.
|
||||
*/
|
||||
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger, checkVersion) {
|
||||
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, checkVersion) {
|
||||
try {
|
||||
const { codeqlFolder, toolsDownloadStatusReport, toolsSource, toolsVersion, } = await setupCodeql.setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger);
|
||||
const { codeqlFolder, toolsDownloadStatusReport, toolsSource, toolsVersion, zstdAvailability, } = await setupCodeql.setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger);
|
||||
logger.debug(`Bundle download status report: ${JSON.stringify(toolsDownloadStatusReport)}`);
|
||||
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
|
||||
if (process.platform === "win32") {
|
||||
@@ -139,10 +139,11 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
|
||||
toolsDownloadStatusReport,
|
||||
toolsSource,
|
||||
toolsVersion,
|
||||
zstdAvailability,
|
||||
};
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Unable to download and extract CodeQL CLI: ${(0, util_1.wrapError)(e).message}`);
|
||||
throw new Error(`Unable to download and extract CodeQL CLI: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
@@ -359,10 +360,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
const prefix = "We were unable to automatically build your code. " +
|
||||
"Please change the build mode for this language to manual and specify build steps " +
|
||||
`for your project. See ${doc_url_1.DocUrl.AUTOMATIC_BUILD_FAILED} for more information.`;
|
||||
const ErrorConstructor = e instanceof util.ConfigurationError
|
||||
? util.ConfigurationError
|
||||
: Error;
|
||||
throw new ErrorConstructor(`${prefix} ${util.wrapError(e).message}`);
|
||||
throw new util.ConfigurationError(`${prefix} ${(0, util_1.getErrorMessage)(e)}`);
|
||||
}
|
||||
else {
|
||||
throw e;
|
||||
|
||||
File diff suppressed because one or more lines are too long
20
lib/codeql.test.js
generated
20
lib/codeql.test.js
generated
@@ -60,7 +60,7 @@ async function installIntoToolcache({ apiDetails = testing_utils_1.SAMPLE_DOTCOM
|
||||
const url = (0, testing_utils_1.mockBundleDownloadApi)({ apiDetails, isPinned, tagName });
|
||||
await codeql.setupCodeQL(cliVersion !== undefined ? undefined : url, apiDetails, tmpDir, util.GitHubVariant.GHES, cliVersion !== undefined
|
||||
? { cliVersion, tagName }
|
||||
: testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||
: testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
}
|
||||
function mockReleaseApi({ apiDetails = testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, assetNames, tagName, }) {
|
||||
return (0, nock_1.default)(apiDetails.apiURL)
|
||||
@@ -97,7 +97,7 @@ function mockApiDetails(apiDetails) {
|
||||
tagName: `codeql-bundle-${version}`,
|
||||
isPinned: false,
|
||||
});
|
||||
const result = await codeql.setupCodeQL(url, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const result = await codeql.setupCodeQL(url, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
||||
t.is(result.toolsVersion, `0.0.0-${version}`);
|
||||
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Download);
|
||||
@@ -113,7 +113,7 @@ function mockApiDetails(apiDetails) {
|
||||
tagName: `codeql-bundle-v2.14.0`,
|
||||
isPinned: false,
|
||||
});
|
||||
const result = await codeql.setupCodeQL(url, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const result = await codeql.setupCodeQL(url, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.is(toolcache.findAllVersions("CodeQL").length, 1);
|
||||
t.assert(toolcache.find("CodeQL", `2.14.0`));
|
||||
t.is(result.toolsVersion, `2.14.0`);
|
||||
@@ -132,7 +132,7 @@ function mockApiDetails(apiDetails) {
|
||||
const url = (0, testing_utils_1.mockBundleDownloadApi)({
|
||||
tagName: "codeql-bundle-20200610",
|
||||
});
|
||||
const result = await codeql.setupCodeQL(url, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const result = await codeql.setupCodeQL(url, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
||||
t.deepEqual(result.toolsVersion, "0.0.0-20200610");
|
||||
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Download);
|
||||
@@ -158,7 +158,7 @@ for (const { tagName, expectedToolcacheVersion, } of EXPLICITLY_REQUESTED_BUNDLE
|
||||
const url = (0, testing_utils_1.mockBundleDownloadApi)({
|
||||
tagName,
|
||||
});
|
||||
const result = await codeql.setupCodeQL(url, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const result = await codeql.setupCodeQL(url, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", expectedToolcacheVersion));
|
||||
t.deepEqual(result.toolsVersion, expectedToolcacheVersion);
|
||||
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Download);
|
||||
@@ -181,7 +181,7 @@ for (const toolcacheVersion of [
|
||||
.withArgs("CodeQL", toolcacheVersion)
|
||||
.returns("path/to/cached/codeql");
|
||||
sinon.stub(toolcache, "findAllVersions").returns([toolcacheVersion]);
|
||||
const result = await codeql.setupCodeQL(undefined, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const result = await codeql.setupCodeQL(undefined, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.is(result.toolsVersion, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION.cliVersion);
|
||||
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Toolcache);
|
||||
t.is(result.toolsDownloadStatusReport?.downloadDurationMs, undefined);
|
||||
@@ -199,7 +199,7 @@ for (const toolcacheVersion of [
|
||||
const result = await codeql.setupCodeQL(undefined, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.GHES, {
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
}, (0, logging_1.getRunnerLogger)(true), false);
|
||||
}, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(result.toolsVersion, "0.0.0-20200601");
|
||||
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Toolcache);
|
||||
t.is(result.toolsDownloadStatusReport?.downloadDurationMs, undefined);
|
||||
@@ -221,7 +221,7 @@ for (const toolcacheVersion of [
|
||||
const result = await codeql.setupCodeQL(undefined, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.GHES, {
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
}, (0, logging_1.getRunnerLogger)(true), false);
|
||||
}, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
||||
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Download);
|
||||
t.assert(Number.isInteger(result.toolsDownloadStatusReport?.downloadDurationMs));
|
||||
@@ -240,7 +240,7 @@ for (const toolcacheVersion of [
|
||||
(0, testing_utils_1.mockBundleDownloadApi)({
|
||||
tagName: defaults.bundleVersion,
|
||||
});
|
||||
const result = await codeql.setupCodeQL("latest", testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const result = await codeql.setupCodeQL("latest", testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
||||
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Download);
|
||||
t.assert(Number.isInteger(result.toolsDownloadStatusReport?.downloadDurationMs));
|
||||
@@ -262,7 +262,7 @@ for (const toolcacheVersion of [
|
||||
platformSpecific: false,
|
||||
tagName: "codeql-bundle-20230203",
|
||||
});
|
||||
const result = await codeql.setupCodeQL("https://github.com/codeql-testing/codeql-cli-nightlies/releases/download/codeql-bundle-20230203/codeql-bundle.tar.gz", testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const result = await codeql.setupCodeQL("https://github.com/codeql-testing/codeql-cli-nightlies/releases/download/codeql-bundle-20230203/codeql-bundle.tar.gz", testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.is(result.toolsVersion, "0.0.0-20230203");
|
||||
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Download);
|
||||
t.true(Number.isInteger(result.toolsDownloadStatusReport?.downloadDurationMs));
|
||||
|
||||
File diff suppressed because one or more lines are too long
203
lib/debug-artifacts.js
generated
203
lib/debug-artifacts.js
generated
@@ -26,11 +26,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sanitizeArifactName = sanitizeArifactName;
|
||||
exports.sanitizeArtifactName = sanitizeArtifactName;
|
||||
exports.uploadCombinedSarifArtifacts = uploadCombinedSarifArtifacts;
|
||||
exports.tryUploadAllAvailableDebugArtifacts = tryUploadAllAvailableDebugArtifacts;
|
||||
exports.uploadDebugArtifacts = uploadDebugArtifacts;
|
||||
exports.uploadSarifDebugArtifact = uploadSarifDebugArtifact;
|
||||
exports.uploadLogsDebugArtifact = uploadLogsDebugArtifact;
|
||||
exports.uploadDatabaseBundleDebugArtifact = uploadDatabaseBundleDebugArtifact;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const artifact = __importStar(require("@actions/artifact"));
|
||||
@@ -40,10 +39,141 @@ const del_1 = __importDefault(require("del"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const analyze_1 = require("./analyze");
|
||||
const codeql_1 = require("./codeql");
|
||||
const environment_1 = require("./environment");
|
||||
const logging_1 = require("./logging");
|
||||
const util_1 = require("./util");
|
||||
function sanitizeArifactName(name) {
|
||||
function sanitizeArtifactName(name) {
|
||||
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
||||
}
|
||||
/**
|
||||
* Upload Actions SARIF artifacts for debugging when CODEQL_ACTION_DEBUG_COMBINED_SARIF
|
||||
* environment variable is set
|
||||
*/
|
||||
async function uploadCombinedSarifArtifacts(logger) {
|
||||
const tempDir = (0, actions_util_1.getTemporaryDirectory)();
|
||||
// Upload Actions SARIF artifacts for debugging when environment variable is set
|
||||
if (process.env["CODEQL_ACTION_DEBUG_COMBINED_SARIF"] === "true") {
|
||||
logger.info("Uploading available combined SARIF files as Actions debugging artifact...");
|
||||
const baseTempDir = path.resolve(tempDir, "combined-sarif");
|
||||
const toUpload = [];
|
||||
if (fs.existsSync(baseTempDir)) {
|
||||
const outputDirs = fs.readdirSync(baseTempDir);
|
||||
for (const outputDir of outputDirs) {
|
||||
const sarifFiles = fs
|
||||
.readdirSync(path.resolve(baseTempDir, outputDir))
|
||||
.filter((f) => f.endsWith(".sarif"));
|
||||
for (const sarifFile of sarifFiles) {
|
||||
toUpload.push(path.resolve(baseTempDir, outputDir, sarifFile));
|
||||
}
|
||||
}
|
||||
}
|
||||
try {
|
||||
await uploadDebugArtifacts(toUpload, baseTempDir, "combined-sarif-artifacts");
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Failed to upload combined SARIF files as Actions debugging artifact. Reason: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Try to prepare a SARIF result debug artifact for the given language.
|
||||
*
|
||||
* @return The path to that debug artifact, or undefined if an error occurs.
|
||||
*/
|
||||
function tryPrepareSarifDebugArtifact(config, language, logger) {
|
||||
try {
|
||||
const analyzeActionOutputDir = process.env[environment_1.EnvVar.SARIF_RESULTS_OUTPUT_DIR];
|
||||
if (analyzeActionOutputDir !== undefined &&
|
||||
fs.existsSync(analyzeActionOutputDir) &&
|
||||
fs.lstatSync(analyzeActionOutputDir).isDirectory()) {
|
||||
const sarifFile = path.resolve(analyzeActionOutputDir, `${language}.sarif`);
|
||||
// Move SARIF to DB location so that they can be uploaded with the same root directory as the other artifacts.
|
||||
if (fs.existsSync(sarifFile)) {
|
||||
const sarifInDbLocation = path.resolve(config.dbLocation, `${language}.sarif`);
|
||||
fs.copyFileSync(sarifFile, sarifInDbLocation);
|
||||
return sarifInDbLocation;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Failed to find SARIF results path for ${language}. Reason: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
/**
|
||||
* Try to bundle the database for the given language.
|
||||
*
|
||||
* @return The path to the database bundle, or undefined if an error occurs.
|
||||
*/
|
||||
async function tryBundleDatabase(config, language, logger) {
|
||||
try {
|
||||
if ((0, analyze_1.dbIsFinalized)(config, language, logger)) {
|
||||
try {
|
||||
return await createDatabaseBundleCli(config, language);
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Failed to bundle database for ${language} using the CLI. ` +
|
||||
`Falling back to a partial bundle. Reason: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
}
|
||||
}
|
||||
return await createPartialDatabaseBundle(config, language);
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Failed to bundle database for ${language}. Reason: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Attempt to upload all available debug artifacts.
|
||||
*
|
||||
* Logs and suppresses any errors that occur.
|
||||
*/
|
||||
async function tryUploadAllAvailableDebugArtifacts(config, logger) {
|
||||
const filesToUpload = [];
|
||||
try {
|
||||
for (const language of config.languages) {
|
||||
await (0, logging_1.withGroup)(`Uploading debug artifacts for ${language}`, async () => {
|
||||
logger.info("Preparing SARIF result debug artifact...");
|
||||
const sarifResultDebugArtifact = tryPrepareSarifDebugArtifact(config, language, logger);
|
||||
if (sarifResultDebugArtifact) {
|
||||
filesToUpload.push(sarifResultDebugArtifact);
|
||||
logger.info("SARIF result debug artifact ready for upload.");
|
||||
}
|
||||
logger.info("Preparing database logs debug artifact...");
|
||||
const databaseDirectory = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||
const logsDirectory = path.resolve(databaseDirectory, "log");
|
||||
if ((0, util_1.doesDirectoryExist)(logsDirectory)) {
|
||||
filesToUpload.push(...(0, util_1.listFolder)(logsDirectory));
|
||||
logger.info("Database logs debug artifact ready for upload.");
|
||||
}
|
||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||
logger.info("Preparing database cluster logs debug artifact...");
|
||||
const multiLanguageTracingLogsDirectory = path.resolve(config.dbLocation, "log");
|
||||
if ((0, util_1.doesDirectoryExist)(multiLanguageTracingLogsDirectory)) {
|
||||
filesToUpload.push(...(0, util_1.listFolder)(multiLanguageTracingLogsDirectory));
|
||||
logger.info("Database cluster logs debug artifact ready for upload.");
|
||||
}
|
||||
// Add database bundle
|
||||
logger.info("Preparing database bundle debug artifact...");
|
||||
const databaseBundle = await tryBundleDatabase(config, language, logger);
|
||||
if (databaseBundle) {
|
||||
filesToUpload.push(databaseBundle);
|
||||
logger.info("Database bundle debug artifact ready for upload.");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Failed to prepare debug artifacts. Reason: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await (0, logging_1.withGroup)("Uploading debug artifacts", async () => uploadDebugArtifacts(filesToUpload, config.dbLocation, config.debugArtifactName));
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Failed to upload debug artifacts. Reason: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
}
|
||||
}
|
||||
async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
|
||||
if (toUpload.length === 0) {
|
||||
return;
|
||||
@@ -59,46 +189,11 @@ async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
|
||||
core.info("Could not parse user-specified `matrix` input into JSON. The debug artifact will not be named with the user's `matrix` input.");
|
||||
}
|
||||
}
|
||||
try {
|
||||
await artifact.create().uploadArtifact(sanitizeArifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir), {
|
||||
continueOnError: true,
|
||||
// ensure we don't keep the debug artifacts around for too long since they can be large.
|
||||
retentionDays: 7,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
// A failure to upload debug artifacts should not fail the entire action.
|
||||
core.warning(`Failed to upload debug artifacts: ${e}`);
|
||||
}
|
||||
}
|
||||
async function uploadSarifDebugArtifact(config, outputDir) {
|
||||
if (!(0, util_1.doesDirectoryExist)(outputDir)) {
|
||||
return;
|
||||
}
|
||||
let toUpload = [];
|
||||
for (const lang of config.languages) {
|
||||
const sarifFile = path.resolve(outputDir, `${lang}.sarif`);
|
||||
if (fs.existsSync(sarifFile)) {
|
||||
toUpload = toUpload.concat(sarifFile);
|
||||
}
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, outputDir, config.debugArtifactName);
|
||||
}
|
||||
async function uploadLogsDebugArtifact(config) {
|
||||
let toUpload = [];
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||
const logsDirectory = path.resolve(databaseDirectory, "log");
|
||||
if ((0, util_1.doesDirectoryExist)(logsDirectory)) {
|
||||
toUpload = toUpload.concat((0, util_1.listFolder)(logsDirectory));
|
||||
}
|
||||
}
|
||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||
const multiLanguageTracingLogsDirectory = path.resolve(config.dbLocation, "log");
|
||||
if ((0, util_1.doesDirectoryExist)(multiLanguageTracingLogsDirectory)) {
|
||||
toUpload = toUpload.concat((0, util_1.listFolder)(multiLanguageTracingLogsDirectory));
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
||||
await artifact.create().uploadArtifact(sanitizeArtifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir), {
|
||||
continueOnError: true,
|
||||
// ensure we don't keep the debug artifacts around for too long since they can be large.
|
||||
retentionDays: 7,
|
||||
});
|
||||
}
|
||||
/**
|
||||
* If a database has not been finalized, we cannot run the `codeql database bundle`
|
||||
@@ -122,25 +217,7 @@ async function createPartialDatabaseBundle(config, language) {
|
||||
* Runs `codeql database bundle` command and returns the path.
|
||||
*/
|
||||
async function createDatabaseBundleCli(config, language) {
|
||||
// Otherwise run `codeql database bundle` command.
|
||||
const databaseBundlePath = await (0, util_1.bundleDb)(config, language, await (0, codeql_1.getCodeQL)(config.codeQLCmd), `${config.debugDatabaseName}-${language}`);
|
||||
return databaseBundlePath;
|
||||
}
|
||||
async function uploadDatabaseBundleDebugArtifact(config, logger) {
|
||||
for (const language of config.languages) {
|
||||
try {
|
||||
let databaseBundlePath;
|
||||
if (!(0, analyze_1.dbIsFinalized)(config, language, logger)) {
|
||||
databaseBundlePath = await createPartialDatabaseBundle(config, language);
|
||||
}
|
||||
else {
|
||||
databaseBundlePath = await createDatabaseBundleCli(config, language);
|
||||
}
|
||||
await uploadDebugArtifacts([databaseBundlePath], config.dbLocation, config.debugArtifactName);
|
||||
}
|
||||
catch (error) {
|
||||
core.info(`Failed to upload database debug bundle for ${config.debugDatabaseName}-${language}: ${error}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=debug-artifacts.js.map
|
||||
File diff suppressed because one or more lines are too long
10
lib/debug-artifacts.test.js
generated
10
lib/debug-artifacts.test.js
generated
@@ -28,11 +28,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||
(0, ava_1.default)("sanitizeArifactName", (t) => {
|
||||
t.deepEqual(debugArtifacts.sanitizeArifactName("hello-world_"), "hello-world_");
|
||||
t.deepEqual(debugArtifacts.sanitizeArifactName("hello`world`"), "helloworld");
|
||||
t.deepEqual(debugArtifacts.sanitizeArifactName("hello===123"), "hello123");
|
||||
t.deepEqual(debugArtifacts.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
|
||||
(0, ava_1.default)("sanitizeArtifactName", (t) => {
|
||||
t.deepEqual(debugArtifacts.sanitizeArtifactName("hello-world_"), "hello-world_");
|
||||
t.deepEqual(debugArtifacts.sanitizeArtifactName("hello`world`"), "helloworld");
|
||||
t.deepEqual(debugArtifacts.sanitizeArtifactName("hello===123"), "hello123");
|
||||
t.deepEqual(debugArtifacts.sanitizeArtifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
|
||||
});
|
||||
(0, ava_1.default)("uploadDebugArtifacts", async (t) => {
|
||||
// Test that no error is thrown if artifacts list is empty.
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"debug-artifacts.test.js","sourceRoot":"","sources":["../src/debug-artifacts.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AAEvB,kEAAoD;AAEpD,IAAA,aAAI,EAAC,qBAAqB,EAAE,CAAC,CAAC,EAAE,EAAE;IAChC,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,mBAAmB,CAAC,cAAc,CAAC,EAClD,cAAc,CACf,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,mBAAmB,CAAC,cAAc,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9E,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAAE,UAAU,CAAC,CAAC;IAC3E,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,mBAAmB,CAAC,yBAAyB,CAAC,EAC7D,aAAa,CACd,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,2DAA2D;IAC3D,MAAM,CAAC,CAAC,cAAc,CACpB,cAAc,CAAC,oBAAoB,CAAC,EAAE,EAAE,SAAS,EAAE,cAAc,CAAC,CACnE,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"debug-artifacts.test.js","sourceRoot":"","sources":["../src/debug-artifacts.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AAEvB,kEAAoD;AAEpD,IAAA,aAAI,EAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE,EAAE;IACjC,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,cAAc,CACf,CAAC;IACF,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,YAAY,CACb,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,oBAAoB,CAAC,aAAa,CAAC,EAAE,UAAU,CAAC,CAAC;IAC5E,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,yBAAyB,CAAC,EAC9D,aAAa,CACd,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,2DAA2D;IAC3D,MAAM,CAAC,CAAC,cAAc,CACpB,cAAc,CAAC,oBAAoB,CAAC,EAAE,EAAE,SAAS,EAAE,cAAc,CAAC,CACnE,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.18.3",
|
||||
"cliVersion": "2.18.3",
|
||||
"priorBundleVersion": "codeql-bundle-v2.18.2",
|
||||
"priorCliVersion": "2.18.2"
|
||||
"bundleVersion": "codeql-bundle-v2.19.0",
|
||||
"cliVersion": "2.19.0",
|
||||
"priorBundleVersion": "codeql-bundle-v2.18.4",
|
||||
"priorCliVersion": "2.18.4"
|
||||
}
|
||||
|
||||
2
lib/environment.js
generated
2
lib/environment.js
generated
@@ -53,6 +53,8 @@ var EnvVar;
|
||||
/** Status for the entire job, submitted to the status report in `init-post` */
|
||||
EnvVar["JOB_STATUS"] = "CODEQL_ACTION_JOB_STATUS";
|
||||
EnvVar["ODASA_TRACER_CONFIGURATION"] = "ODASA_TRACER_CONFIGURATION";
|
||||
/** The value of the `output` input for the analyze action. */
|
||||
EnvVar["SARIF_RESULTS_OUTPUT_DIR"] = "CODEQL_ACTION_SARIF_RESULTS_OUTPUT_DIR";
|
||||
/**
|
||||
* What percentage of the total amount of RAM over 8 GB that the Action should reserve for the
|
||||
* system.
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"environment.js","sourceRoot":"","sources":["../src/environment.ts"],"names":[],"mappings":";;;AAAA;;;;;GAKG;AACH,IAAY,MA2FX;AA3FD,WAAY,MAAM;IAChB,2DAA2D;IAC3D,+FAAqF,CAAA;IAErF,6DAA6D;IAC7D,mGAAyF,CAAA;IAEzF;;;OAGG;IACH,4CAAkC,CAAA;IAElC,gEAAgE;IAChE,qEAA2D,CAAA;IAE3D;;;OAGG;IACH,yFAA+E,CAAA;IAE/E;;;OAGG;IACH,yEAA+D,CAAA;IAE/D,gFAAgF;IAChF,6DAAmD,CAAA;IAEnD;;;OAGG;IACH,uEAA6D,CAAA;IAE7D,gEAAgE;IAChE,mEAAyD,CAAA;IAEzD,kFAAkF;IAClF,mFAAyE,CAAA;IAEzE,4CAA4C;IAC5C,4DAAkD,CAAA;IAElD;;;OAGG;IACH,yDAA+C,CAAA;IAE/C,6CAA6C;IAC7C,uCAA6B,CAAA;IAE7B,+EAA+E;IAC/E,iDAAuC,CAAA;IAEvC,mEAAyD,CAAA;IAEzD;;;OAGG;IACH,2FAAiF,CAAA;IAEjF,mFAAmF;IACnF,6FAAmF,CAAA;IAEnF,qFAAqF;IACrF,+CAAqC,CAAA;IAErC,mEAAyD,CAAA;IAEzD,kEAAkE;IAClE,2CAAiC,CAAA;IAEjC;;;;;;OAMG;IACH,4DAAkD,CAAA;IAElD;;;OAGG;IACH,wDAA8C,CAAA;AAChD,CAAC,EA3FW,MAAM,sBAAN,MAAM,QA2FjB"}
|
||||
{"version":3,"file":"environment.js","sourceRoot":"","sources":["../src/environment.ts"],"names":[],"mappings":";;;AAAA;;;;;GAKG;AACH,IAAY,MA8FX;AA9FD,WAAY,MAAM;IAChB,2DAA2D;IAC3D,+FAAqF,CAAA;IAErF,6DAA6D;IAC7D,mGAAyF,CAAA;IAEzF;;;OAGG;IACH,4CAAkC,CAAA;IAElC,gEAAgE;IAChE,qEAA2D,CAAA;IAE3D;;;OAGG;IACH,yFAA+E,CAAA;IAE/E;;;OAGG;IACH,yEAA+D,CAAA;IAE/D,gFAAgF;IAChF,6DAAmD,CAAA;IAEnD;;;OAGG;IACH,uEAA6D,CAAA;IAE7D,gEAAgE;IAChE,mEAAyD,CAAA;IAEzD,kFAAkF;IAClF,mFAAyE,CAAA;IAEzE,4CAA4C;IAC5C,4DAAkD,CAAA;IAElD;;;OAGG;IACH,yDAA+C,CAAA;IAE/C,6CAA6C;IAC7C,uCAA6B,CAAA;IAE7B,+EAA+E;IAC/E,iDAAuC,CAAA;IAEvC,mEAAyD,CAAA;IAEzD,8DAA8D;IAC9D,6EAAmE,CAAA;IAEnE;;;OAGG;IACH,2FAAiF,CAAA;IAEjF,mFAAmF;IACnF,6FAAmF,CAAA;IAEnF,qFAAqF;IACrF,+CAAqC,CAAA;IAErC,mEAAyD,CAAA;IAEzD,kEAAkE;IAClE,2CAAiC,CAAA;IAEjC;;;;;;OAMG;IACH,4DAAkD,CAAA;IAElD;;;OAGG;IACH,wDAA8C,CAAA;AAChD,CAAC,EA9FW,MAAM,sBAAN,MAAM,QA8FjB"}
|
||||
14
lib/feature-flags.js
generated
14
lib/feature-flags.js
generated
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Features = exports.FEATURE_FLAGS_FILE_NAME = exports.featureConfig = exports.Feature = exports.CODEQL_VERSION_FINE_GRAINED_PARALLELISM = void 0;
|
||||
exports.Features = exports.FEATURE_FLAGS_FILE_NAME = exports.featureConfig = exports.Feature = exports.CODEQL_VERSION_ZSTD_BUNDLE = exports.CODEQL_VERSION_FINE_GRAINED_PARALLELISM = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const semver = __importStar(require("semver"));
|
||||
@@ -37,6 +37,10 @@ const DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled";
|
||||
* (Some earlier versions recognize the command-line flag, but they contain a bug which makes it unsafe to use).
|
||||
*/
|
||||
exports.CODEQL_VERSION_FINE_GRAINED_PARALLELISM = "2.15.1";
|
||||
/**
|
||||
* The first version of the CodeQL Bundle that shipped with zstd-compressed bundles.
|
||||
*/
|
||||
exports.CODEQL_VERSION_ZSTD_BUNDLE = "2.19.0";
|
||||
/**
|
||||
* Feature enablement as returned by the GitHub API endpoint.
|
||||
*
|
||||
@@ -51,6 +55,7 @@ var Feature;
|
||||
Feature["DisableKotlinAnalysisEnabled"] = "disable_kotlin_analysis_enabled";
|
||||
Feature["ExportDiagnosticsEnabled"] = "export_diagnostics_enabled";
|
||||
Feature["QaTelemetryEnabled"] = "qa_telemetry_enabled";
|
||||
Feature["ZstdBundle"] = "zstd_bundle";
|
||||
})(Feature || (exports.Feature = Feature = {}));
|
||||
exports.featureConfig = {
|
||||
[Feature.CleanupTrapCaches]: {
|
||||
@@ -93,6 +98,13 @@ exports.featureConfig = {
|
||||
legacyApi: true,
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.ZstdBundle]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_ZSTD_BUNDLE",
|
||||
// We haven't yet installed CodeQL when we check this feature flag, so we need to implement the
|
||||
// version check separately.
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
};
|
||||
exports.FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json";
|
||||
/**
|
||||
|
||||
File diff suppressed because one or more lines are too long
5
lib/init-action-post-helper.js
generated
5
lib/init-action-post-helper.js
generated
@@ -106,7 +106,7 @@ async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger
|
||||
};
|
||||
}
|
||||
}
|
||||
async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, printDebugLogs, config, repositoryNwo, features, logger) {
|
||||
async function run(uploadAllAvailableDebugArtifacts, printDebugLogs, config, repositoryNwo, features, logger) {
|
||||
const uploadFailedSarifResult = await tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger);
|
||||
if (uploadFailedSarifResult.upload_failed_run_skipped_because) {
|
||||
logger.debug("Won't upload a failed SARIF file for this CodeQL code scanning run because: " +
|
||||
@@ -132,8 +132,7 @@ async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, p
|
||||
// Upload appropriate Actions artifacts for debugging
|
||||
if (config.debugMode) {
|
||||
logger.info("Debug mode is on. Uploading available database bundles and logs as Actions debugging artifacts...");
|
||||
await uploadDatabaseBundleDebugArtifact(config, logger);
|
||||
await uploadLogsDebugArtifact(config);
|
||||
await uploadAllAvailableDebugArtifacts(config, logger);
|
||||
await printDebugLogs(config);
|
||||
}
|
||||
if (actionsUtil.isSelfHostedRunner()) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
16
lib/init-action-post-helper.test.js
generated
16
lib/init-action-post-helper.test.js
generated
@@ -53,12 +53,10 @@ const workflow = __importStar(require("./workflow"));
|
||||
languages: [],
|
||||
packs: [],
|
||||
});
|
||||
const uploadDatabaseBundleSpy = sinon.spy();
|
||||
const uploadLogsSpy = sinon.spy();
|
||||
const uploadAllAvailableDebugArtifactsSpy = sinon.spy();
|
||||
const printDebugLogsSpy = sinon.spy();
|
||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy, (0, testing_utils_1.createTestConfig)({ debugMode: false }), (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.assert(uploadDatabaseBundleSpy.notCalled);
|
||||
t.assert(uploadLogsSpy.notCalled);
|
||||
await initActionPostHelper.run(uploadAllAvailableDebugArtifactsSpy, printDebugLogsSpy, (0, testing_utils_1.createTestConfig)({ debugMode: false }), (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.assert(uploadAllAvailableDebugArtifactsSpy.notCalled);
|
||||
t.assert(printDebugLogsSpy.notCalled);
|
||||
});
|
||||
});
|
||||
@@ -66,12 +64,10 @@ const workflow = __importStar(require("./workflow"));
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
const uploadDatabaseBundleSpy = sinon.spy();
|
||||
const uploadLogsSpy = sinon.spy();
|
||||
const uploadAllAvailableDebugArtifactsSpy = sinon.spy();
|
||||
const printDebugLogsSpy = sinon.spy();
|
||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy, (0, testing_utils_1.createTestConfig)({ debugMode: true }), (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.assert(uploadDatabaseBundleSpy.called);
|
||||
t.assert(uploadLogsSpy.called);
|
||||
await initActionPostHelper.run(uploadAllAvailableDebugArtifactsSpy, printDebugLogsSpy, (0, testing_utils_1.createTestConfig)({ debugMode: true }), (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.assert(uploadAllAvailableDebugArtifactsSpy.called);
|
||||
t.assert(printDebugLogsSpy.called);
|
||||
});
|
||||
});
|
||||
|
||||
File diff suppressed because one or more lines are too long
2
lib/init-action-post.js
generated
2
lib/init-action-post.js
generated
@@ -54,7 +54,7 @@ async function runWrapper() {
|
||||
logger.warning("Debugging artifacts are unavailable since the 'init' Action failed before it could produce any.");
|
||||
return;
|
||||
}
|
||||
uploadFailedSarifResult = await initActionPostHelper.run(debugArtifacts.uploadDatabaseBundleDebugArtifact, debugArtifacts.uploadLogsDebugArtifact, actions_util_1.printDebugLogs, config, repositoryNwo, features, logger);
|
||||
uploadFailedSarifResult = await initActionPostHelper.run(debugArtifacts.tryUploadAllAvailableDebugArtifacts, actions_util_1.printDebugLogs, config, repositoryNwo, features, logger);
|
||||
}
|
||||
catch (unwrappedError) {
|
||||
const error = (0, util_1.wrapError)(unwrappedError);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,iDAAuE;AACvE,6CAAgD;AAChD,iDAAmD;AACnD,kEAAoD;AACpD,mDAA2C;AAC3C,gFAAkE;AAClE,uCAA6C;AAC7C,6CAAkD;AAClD,mDAOyB;AACzB,iCAKgB;AAOhB,KAAK,UAAU,UAAU;IACvB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,MAA0B,CAAC;IAC/B,IAAI,uBAES,CAAC;IACd,IAAI,CAAC;QACH,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QAEF,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC1D,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;YACF,OAAO;QACT,CAAC;QAED,uBAAuB,GAAG,MAAM,oBAAoB,CAAC,GAAG,CACtD,cAAc,CAAC,iCAAiC,EAChD,cAAc,CAAC,uBAAuB,EACtC,6BAAc,EACd,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;IACJ,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAE9B,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,QAAQ,EACnB,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CAAC;QACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;QAC3C,CAAC;QACD,OAAO;IACT,CAAC;IACD,MAAM,SAAS,GAAG,oBAAoB,CAAC,iBAAiB,EAAE,CAAC;IAC3D,MAAM,CAAC,IAAI,CAAC,yBAAyB,IAAA,uCAAuB,EAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IAE5E,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,QAAQ,EACnB,SAAS,EACT,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,YAAY,GAAyB;YACzC,GAAG,gBAAgB;YACnB,GAAG,uBAAuB;YAC1B,UAAU,EAAE,oBAAoB,CAAC,iBAAiB,EAAE;SACrD,CAAC;QACF,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,iDAAuE;AACvE,6CAAgD;AAChD,iDAAmD;AACnD,kEAAoD;AACpD,mDAA2C;AAC3C,gFAAkE;AAClE,uCAA6C;AAC7C,6CAAkD;AAClD,mDAOyB;AACzB,iCAKgB;AAOhB,KAAK,UAAU,UAAU;IACvB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,MAA0B,CAAC;IAC/B,IAAI,uBAES,CAAC;IACd,IAAI,CAAC;QACH,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QAEF,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC1D,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;YACF,OAAO;QACT,CAAC;QAED,uBAAuB,GAAG,MAAM,oBAAoB,CAAC,GAAG,CACtD,cAAc,CAAC,mCAAmC,EAClD,6BAAc,EACd,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;IACJ,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAE9B,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,QAAQ,EACnB,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CAAC;QACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;QAC3C,CAAC;QACD,OAAO;IACT,CAAC;IACD,MAAM,SAAS,GAAG,oBAAoB,CAAC,iBAAiB,EAAE,CAAC;IAC3D,MAAM,CAAC,IAAI,CAAC,yBAAyB,IAAA,uCAAuB,EAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IAE5E,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,QAAQ,EACnB,SAAS,EACT,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,YAAY,GAAyB;YACzC,GAAG,gBAAgB;YACnB,GAAG,uBAAuB;YAC1B,UAAU,EAAE,oBAAoB,CAAC,iBAAiB,EAAE;SACrD,CAAC;QACF,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
17
lib/init-action.js
generated
17
lib/init-action.js
generated
@@ -40,7 +40,6 @@ const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const setup_codeql_1 = require("./setup-codeql");
|
||||
const status_report_1 = require("./status-report");
|
||||
const tar_1 = require("./tar");
|
||||
const tools_features_1 = require("./tools-features");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
const util_1 = require("./util");
|
||||
@@ -136,6 +135,7 @@ async function run() {
|
||||
let toolsFeatureFlagsValid;
|
||||
let toolsSource;
|
||||
let toolsVersion;
|
||||
let zstdAvailability;
|
||||
const apiDetails = {
|
||||
auth: (0, actions_util_1.getRequiredInput)("token"),
|
||||
externalRepoAuth: (0, actions_util_1.getOptionalInput)("external-repository-token"),
|
||||
@@ -159,11 +159,12 @@ async function run() {
|
||||
}
|
||||
const codeQLDefaultVersionInfo = await features.getDefaultCliVersion(gitHubVersion.type);
|
||||
toolsFeatureFlagsValid = codeQLDefaultVersionInfo.toolsFeatureFlagsValid;
|
||||
const initCodeQLResult = await (0, init_1.initCodeQL)((0, actions_util_1.getOptionalInput)("tools"), apiDetails, (0, actions_util_1.getTemporaryDirectory)(), gitHubVersion.type, codeQLDefaultVersionInfo, logger);
|
||||
const initCodeQLResult = await (0, init_1.initCodeQL)((0, actions_util_1.getOptionalInput)("tools"), apiDetails, (0, actions_util_1.getTemporaryDirectory)(), gitHubVersion.type, codeQLDefaultVersionInfo, features, logger);
|
||||
codeql = initCodeQLResult.codeql;
|
||||
toolsDownloadStatusReport = initCodeQLResult.toolsDownloadStatusReport;
|
||||
toolsVersion = initCodeQLResult.toolsVersion;
|
||||
toolsSource = initCodeQLResult.toolsSource;
|
||||
zstdAvailability = initCodeQLResult.zstdAvailability;
|
||||
core.startGroup("Validating workflow");
|
||||
if ((await (0, workflow_1.validateWorkflow)(codeql, logger)) === undefined) {
|
||||
logger.info("Detected no issues with the code scanning workflow.");
|
||||
@@ -209,7 +210,9 @@ async function run() {
|
||||
}
|
||||
try {
|
||||
(0, init_1.cleanupDatabaseClusterDirectory)(config, logger);
|
||||
await logZstdAvailability(config, logger);
|
||||
if (zstdAvailability) {
|
||||
await recordZstdAvailability(config, zstdAvailability);
|
||||
}
|
||||
// Log CodeQL download telemetry, if appropriate
|
||||
if (toolsDownloadStatusReport) {
|
||||
(0, diagnostics_1.addDiagnostic)(config,
|
||||
@@ -392,14 +395,12 @@ function getTrapCachingEnabled() {
|
||||
// On hosted runners, enable TRAP caching by default
|
||||
return true;
|
||||
}
|
||||
async function logZstdAvailability(config, logger) {
|
||||
// Log zstd availability
|
||||
const zstdAvailableResult = await (0, tar_1.isZstdAvailable)(logger);
|
||||
async function recordZstdAvailability(config, zstdAvailability) {
|
||||
(0, diagnostics_1.addDiagnostic)(config,
|
||||
// Arbitrarily choose the first language. We could also choose all languages, but that
|
||||
// increases the risk of misinterpreting the data.
|
||||
config.languages[0], (0, diagnostics_1.makeDiagnostic)("codeql-action/zstd-availability", "Zstandard availability", {
|
||||
attributes: zstdAvailableResult,
|
||||
attributes: zstdAvailability,
|
||||
visibility: {
|
||||
cliSummaryTable: false,
|
||||
statusPage: false,
|
||||
@@ -412,7 +413,7 @@ async function runWrapper() {
|
||||
await run();
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`init action failed: ${(0, util_1.wrapError)(error).message}`);
|
||||
core.setFailed(`init action failed: ${(0, util_1.getErrorMessage)(error)}`);
|
||||
}
|
||||
await (0, util_1.checkForTimeout)();
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
16
lib/init.js
generated
16
lib/init.js
generated
@@ -40,12 +40,18 @@ const languages_1 = require("./languages");
|
||||
const tools_features_1 = require("./tools-features");
|
||||
const tracer_config_1 = require("./tracer-config");
|
||||
const util = __importStar(require("./util"));
|
||||
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger) {
|
||||
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
|
||||
logger.startGroup("Setup CodeQL tools");
|
||||
const { codeql, toolsDownloadStatusReport, toolsSource, toolsVersion } = await (0, codeql_1.setupCodeQL)(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger, true);
|
||||
const { codeql, toolsDownloadStatusReport, toolsSource, toolsVersion, zstdAvailability, } = await (0, codeql_1.setupCodeQL)(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, true);
|
||||
await codeql.printVersion();
|
||||
logger.endGroup();
|
||||
return { codeql, toolsDownloadStatusReport, toolsSource, toolsVersion };
|
||||
return {
|
||||
codeql,
|
||||
toolsDownloadStatusReport,
|
||||
toolsSource,
|
||||
toolsVersion,
|
||||
zstdAvailability,
|
||||
};
|
||||
}
|
||||
async function initConfig(inputs, codeql) {
|
||||
const logger = inputs.logger;
|
||||
@@ -115,12 +121,12 @@ rmSync = fs.rmSync) {
|
||||
// Hosted runners are automatically cleaned up, so this error should not occur for hosted runners.
|
||||
if ((0, actions_util_1.isSelfHostedRunner)()) {
|
||||
throw new util.ConfigurationError(`${blurb} This can happen if another process is using the directory or the directory is owned by a different user. ` +
|
||||
`Please clean up the directory manually and rerun the job. Details: ${util.wrapError(e).message}`);
|
||||
`Please clean up the directory manually and rerun the job. Details: ${util.getErrorMessage(e)}`);
|
||||
}
|
||||
else {
|
||||
throw new Error(`${blurb} This shouldn't typically happen on hosted runners. ` +
|
||||
"If you are using an advanced setup, please check your workflow, otherwise we " +
|
||||
`recommend rerunning the job. Details: ${util.wrapError(e).message}`);
|
||||
`recommend rerunning the job. Details: ${util.getErrorMessage(e)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAkBA,gCA2BC;AAED,gCAgBC;AAED,0BAkCC;AAED,0DAeC;AAMD,sDAkBC;AAED,0EAoDC;AAlMD,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,iDAAsE;AAEtE,qCAA+C;AAC/C,4DAA8C;AAE9C,2CAA0D;AAG1D,qDAAgD;AAChD,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,UAA8B,EAC9B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,iBAA2C,EAC3C,MAAc;IAOd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,yBAAyB,EAAE,WAAW,EAAE,YAAY,EAAE,GACpE,MAAM,IAAA,oBAAW,EACf,UAAU,EACV,UAAU,EACV,OAAO,EACP,OAAO,EACP,iBAAiB,EACjB,MAAM,EACN,IAAI,CACL,CAAC;IACJ,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,yBAAyB,EAAE,WAAW,EAAE,YAAY,EAAE,CAAC;AAC1E,CAAC;AAEM,KAAK,UAAU,UAAU,CAC9B,MAAoC,EACpC,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;IAC7B,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;IACpD,IACE,CAAC,CAAC,MAAM,MAAM,CAAC,eAAe,CAC5B,6BAAY,CAAC,kCAAkC,CAChD,CAAC,EACF,CAAC;QACD,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,eAAmC,EACnC,UAAoC,EACpC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,MAAM,EAAE,oBAAoB,EAAE,YAAY,EAAE,GAC1C,MAAM,WAAW,CAAC,kBAAkB,CAClC,eAAe,EACf,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC;IACJ,MAAM,WAAW,CAAC,eAAe,CAC/B;QACE,YAAY,EAAE,UAAU,CAAC,IAAI;QAC7B,sBAAsB,EAAE,oBAAoB;KAC7C;IAED,0BAA0B;IAC1B,KAAK,IAAI,EAAE,CACT,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,YAAY,EACZ,MAAM,CACP,CACJ,CAAC;IACF,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,EAAE,MAAM;QACrC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,EAAE,MAAM,CAAC;QACnD,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,6BAAiB,CAAC,EAC1C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;;GAGG;AACI,KAAK,UAAU,qBAAqB,CACzC,SAAqB,EACrB,MAAc;IAEd,IACE,SAAS,CAAC,QAAQ,CAAC,oBAAQ,CAAC,MAAM,CAAC;QACnC,OAAO,CAAC,QAAQ,KAAK,OAAO;QAC5B,CAAC,CAAC,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC,CAAC,QAAQ,EAAE,iBAAiB,EACxD,CAAC;QACD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CACzB,SAAS,EACT,iBAAiB,EACjB,oBAAoB,CACrB,CAAC;QACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;YACvE,MAAM;SACP,CAAC,CAAC,IAAI,EAAE,CAAC;IACZ,CAAC;AACH,CAAC;AAED,SAAgB,+BAA+B,CAC7C,MAA0B,EAC1B,MAAc;AACd,+FAA+F;AAC/F,eAAe;AACf,MAAM,GAAG,EAAE,CAAC,MAAM;IAElB,IACE,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC;QAChC,CAAC,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,EAAE;YACtC,EAAE,CAAC,WAAW,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,EAC3C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,kCAAkC,MAAM,CAAC,UAAU,4CAA4C,CAChG,CAAC;QACF,IAAI,CAAC;YACH,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE;gBACxB,KAAK,EAAE,IAAI;gBACX,UAAU,EAAE,CAAC;gBACb,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,IAAI,CACT,yCAAyC,MAAM,CAAC,UAAU,GAAG,CAC9D,CAAC;QACJ,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,MAAM,KAAK,GAAG,mEACZ,IAAA,+BAAgB,EAAC,aAAa,CAAC;gBAC7B,CAAC,CAAC,sCAAsC,MAAM,CAAC,UAAU,IAAI;gBAC7D,CAAC,CAAC,kCAAkC,MAAM,CAAC,UAAU,IAAI;oBACvD,yEACN,iEAAiE,CAAC;YAElE,kGAAkG;YAClG,IAAI,IAAA,iCAAkB,GAAE,EAAE,CAAC;gBACzB,MAAM,IAAI,IAAI,CAAC,kBAAkB,CAC/B,GAAG,KAAK,4GAA4G;oBAClH,sEACE,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,OACpB,EAAE,CACL,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACN,MAAM,IAAI,KAAK,CACb,GAAG,KAAK,sDAAsD;oBAC5D,+EAA+E;oBAC/E,yCACE,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,OACpB,EAAE,CACL,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC"}
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAmBA,gCAyCC;AAED,gCAgBC;AAED,0BAkCC;AAED,0DAeC;AAMD,sDAkBC;AAED,0EAkDC;AA/MD,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,iDAAsE;AAEtE,qCAA+C;AAC/C,4DAA8C;AAE9C,2CAA0D;AAI1D,qDAAgD;AAChD,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,UAA8B,EAC9B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,iBAA2C,EAC3C,QAA2B,EAC3B,MAAc;IAQd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EACJ,MAAM,EACN,yBAAyB,EACzB,WAAW,EACX,YAAY,EACZ,gBAAgB,GACjB,GAAG,MAAM,IAAA,oBAAW,EACnB,UAAU,EACV,UAAU,EACV,OAAO,EACP,OAAO,EACP,iBAAiB,EACjB,QAAQ,EACR,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO;QACL,MAAM;QACN,yBAAyB;QACzB,WAAW;QACX,YAAY;QACZ,gBAAgB;KACjB,CAAC;AACJ,CAAC;AAEM,KAAK,UAAU,UAAU,CAC9B,MAAoC,EACpC,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;IAC7B,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;IACpD,IACE,CAAC,CAAC,MAAM,MAAM,CAAC,eAAe,CAC5B,6BAAY,CAAC,kCAAkC,CAChD,CAAC,EACF,CAAC;QACD,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,eAAmC,EACnC,UAAoC,EACpC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,MAAM,EAAE,oBAAoB,EAAE,YAAY,EAAE,GAC1C,MAAM,WAAW,CAAC,kBAAkB,CAClC,eAAe,EACf,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC;IACJ,MAAM,WAAW,CAAC,eAAe,CAC/B;QACE,YAAY,EAAE,UAAU,CAAC,IAAI;QAC7B,sBAAsB,EAAE,oBAAoB;KAC7C;IAED,0BAA0B;IAC1B,KAAK,IAAI,EAAE,CACT,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,YAAY,EACZ,MAAM,CACP,CACJ,CAAC;IACF,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,EAAE,MAAM;QACrC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,EAAE,MAAM,CAAC;QACnD,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,6BAAiB,CAAC,EAC1C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;;GAGG;AACI,KAAK,UAAU,qBAAqB,CACzC,SAAqB,EACrB,MAAc;IAEd,IACE,SAAS,CAAC,QAAQ,CAAC,oBAAQ,CAAC,MAAM,CAAC;QACnC,OAAO,CAAC,QAAQ,KAAK,OAAO;QAC5B,CAAC,CAAC,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC,CAAC,QAAQ,EAAE,iBAAiB,EACxD,CAAC;QACD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CACzB,SAAS,EACT,iBAAiB,EACjB,oBAAoB,CACrB,CAAC;QACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;YACvE,MAAM;SACP,CAAC,CAAC,IAAI,EAAE,CAAC;IACZ,CAAC;AACH,CAAC;AAED,SAAgB,+BAA+B,CAC7C,MAA0B,EAC1B,MAAc;AACd,+FAA+F;AAC/F,eAAe;AACf,MAAM,GAAG,EAAE,CAAC,MAAM;IAElB,IACE,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC;QAChC,CAAC,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,EAAE;YACtC,EAAE,CAAC,WAAW,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,EAC3C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,kCAAkC,MAAM,CAAC,UAAU,4CAA4C,CAChG,CAAC;QACF,IAAI,CAAC;YACH,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE;gBACxB,KAAK,EAAE,IAAI;gBACX,UAAU,EAAE,CAAC;gBACb,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,IAAI,CACT,yCAAyC,MAAM,CAAC,UAAU,GAAG,CAC9D,CAAC;QACJ,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,MAAM,KAAK,GAAG,mEACZ,IAAA,+BAAgB,EAAC,aAAa,CAAC;gBAC7B,CAAC,CAAC,sCAAsC,MAAM,CAAC,UAAU,IAAI;gBAC7D,CAAC,CAAC,kCAAkC,MAAM,CAAC,UAAU,IAAI;oBACvD,yEACN,iEAAiE,CAAC;YAElE,kGAAkG;YAClG,IAAI,IAAA,iCAAkB,GAAE,EAAE,CAAC;gBACzB,MAAM,IAAI,IAAI,CAAC,kBAAkB,CAC/B,GAAG,KAAK,4GAA4G;oBAClH,sEAAsE,IAAI,CAAC,eAAe,CACxF,CAAC,CACF,EAAE,CACN,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACN,MAAM,IAAI,KAAK,CACb,GAAG,KAAK,sDAAsD;oBAC5D,+EAA+E;oBAC/E,yCAAyC,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,CACrE,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC"}
|
||||
10
lib/logging.js
generated
10
lib/logging.js
generated
@@ -25,6 +25,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getActionsLogger = getActionsLogger;
|
||||
exports.getRunnerLogger = getRunnerLogger;
|
||||
exports.withGroup = withGroup;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
function getActionsLogger() {
|
||||
return core;
|
||||
@@ -44,4 +45,13 @@ function getRunnerLogger(debugMode) {
|
||||
endGroup: () => undefined,
|
||||
};
|
||||
}
|
||||
function withGroup(groupName, f) {
|
||||
core.startGroup(groupName);
|
||||
try {
|
||||
return f();
|
||||
}
|
||||
finally {
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=logging.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../src/logging.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAcA,4CAEC;AAED,0CAcC;AAhCD,oDAAsC;AActC,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC;AACd,CAAC;AAED,SAAgB,eAAe,CAAC,SAAkB;IAChD,OAAO;QACL,sCAAsC;QACtC,KAAK,EAAE,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,SAAS;QAClD,sCAAsC;QACtC,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,sCAAsC;QACtC,OAAO,EAAE,OAAO,CAAC,IAAI;QACrB,sCAAsC;QACtC,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,OAAO,EAAE,GAAG,EAAE,CAAC,SAAS;QACxB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC"}
|
||||
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../src/logging.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAcA,4CAEC;AAED,0CAcC;AAED,8BAOC;AAzCD,oDAAsC;AActC,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC;AACd,CAAC;AAED,SAAgB,eAAe,CAAC,SAAkB;IAChD,OAAO;QACL,sCAAsC;QACtC,KAAK,EAAE,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,SAAS;QAClD,sCAAsC;QACtC,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,sCAAsC;QACtC,OAAO,EAAE,OAAO,CAAC,IAAI;QACrB,sCAAsC;QACtC,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,OAAO,EAAE,GAAG,EAAE,CAAC,SAAS;QACxB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAED,SAAgB,SAAS,CAAI,SAAiB,EAAE,CAAU;IACxD,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;IAC3B,IAAI,CAAC;QACH,OAAO,CAAC,EAAE,CAAC;IACb,CAAC;YAAS,CAAC;QACT,IAAI,CAAC,QAAQ,EAAE,CAAC;IAClB,CAAC;AACH,CAAC"}
|
||||
2
lib/resolve-environment-action.js
generated
2
lib/resolve-environment-action.js
generated
@@ -81,7 +81,7 @@ async function runWrapper() {
|
||||
await run();
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`${status_report_1.ActionName.ResolveEnvironment} action failed: ${(0, util_1.wrapError)(error).message}`);
|
||||
core.setFailed(`${status_report_1.ActionName.ResolveEnvironment} action failed: ${(0, util_1.getErrorMessage)(error)}`);
|
||||
}
|
||||
await (0, util_1.checkForTimeout)();
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"resolve-environment-action.js","sourceRoot":"","sources":["../src/resolve-environment-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAKwB;AACxB,6CAAgD;AAChD,6CAAsD;AACtD,iDAAmD;AACnD,uCAA6C;AAC7C,+DAAmE;AACnE,mDAKyB;AACzB,iCAMgB;AAEhB,MAAM,uBAAuB,GAAG,aAAa,CAAC;AAE9C,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,IAAI,MAA0B,CAAC;IAE/B,IAAI,CAAC;QACH,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,kBAAkB,EAC7B,UAAU,EACV,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;QACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;QAC3C,CAAC;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QACjD,IAAA,yBAAkB,EAAC,IAAA,+BAAgB,GAAE,EAAE,aAAa,CAAC,CAAC;QAEtD,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC1D,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;QACJ,CAAC;QAED,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG,MAAM,IAAA,gDAA0B,EAC7C,MAAM,CAAC,SAAS,EAChB,MAAM,EACN,gBAAgB,EAChB,IAAA,+BAAgB,EAAC,UAAU,CAAC,CAC7B,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,MAAM,CAAC,CAAC;IAClD,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QAExC,IAAI,KAAK,YAAY,mCAAsB,EAAE,CAAC;YAC5C,6DAA6D;YAC7D,qEAAqE;YACrE,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,EAAE,CAAC,CAAC;YAC5C,MAAM,CAAC,OAAO,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;QACJ,CAAC;aAAM,CAAC;YACN,kFAAkF;YAClF,IAAI,CAAC,SAAS,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;YAEF,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,kBAAkB,EAC7B,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CAAC;YACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;gBACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;YAC3C,CAAC;QACH,CAAC;QAED,OAAO;IACT,CAAC;IAED,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,kBAAkB,EAC7B,SAAS,EACT,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;IAC3C,CAAC;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,EAAE,CAAC;IACd,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,GAAG,0BAAU,CAAC,kBAAkB,mBAC9B,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OACnB,EAAE,CACH,CAAC;IACJ,CAAC;IACD,MAAM,IAAA,sBAAe,GAAE,CAAC;AAC1B,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"resolve-environment-action.js","sourceRoot":"","sources":["../src/resolve-environment-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAKwB;AACxB,6CAAgD;AAChD,6CAAsD;AACtD,iDAAmD;AACnD,uCAA6C;AAC7C,+DAAmE;AACnE,mDAKyB;AACzB,iCAOgB;AAEhB,MAAM,uBAAuB,GAAG,aAAa,CAAC;AAE9C,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,IAAI,MAA0B,CAAC;IAE/B,IAAI,CAAC;QACH,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,kBAAkB,EAC7B,UAAU,EACV,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;QACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;QAC3C,CAAC;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QACjD,IAAA,yBAAkB,EAAC,IAAA,+BAAgB,GAAE,EAAE,aAAa,CAAC,CAAC;QAEtD,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC1D,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;QACJ,CAAC;QAED,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG,MAAM,IAAA,gDAA0B,EAC7C,MAAM,CAAC,SAAS,EAChB,MAAM,EACN,gBAAgB,EAChB,IAAA,+BAAgB,EAAC,UAAU,CAAC,CAC7B,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,MAAM,CAAC,CAAC;IAClD,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QAExC,IAAI,KAAK,YAAY,mCAAsB,EAAE,CAAC;YAC5C,6DAA6D;YAC7D,qEAAqE;YACrE,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,EAAE,CAAC,CAAC;YAC5C,MAAM,CAAC,OAAO,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;QACJ,CAAC;aAAM,CAAC;YACN,kFAAkF;YAClF,IAAI,CAAC,SAAS,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;YAEF,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,kBAAkB,EAC7B,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CAAC;YACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;gBACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;YAC3C,CAAC;QACH,CAAC;QAED,OAAO;IACT,CAAC;IAED,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,kBAAkB,EAC7B,SAAS,EACT,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;IAC3C,CAAC;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,EAAE,CAAC;IACd,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,GAAG,0BAAU,CAAC,kBAAkB,mBAAmB,IAAA,sBAAe,EAChE,KAAK,CACN,EAAE,CACJ,CAAC;IACJ,CAAC;IACD,MAAM,IAAA,sBAAe,GAAE,CAAC;AAC1B,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
61
lib/setup-codeql.js
generated
61
lib/setup-codeql.js
generated
@@ -48,6 +48,7 @@ const api = __importStar(require("./api-client"));
|
||||
// creation scripts. Ensure that any changes to the format of this file are compatible with both of
|
||||
// these dependents.
|
||||
const defaults = __importStar(require("./defaults.json"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const tar = __importStar(require("./tar"));
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
@@ -60,7 +61,11 @@ var ToolsSource;
|
||||
})(ToolsSource || (exports.ToolsSource = ToolsSource = {}));
|
||||
exports.CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||
const CODEQL_BUNDLE_VERSION_ALIAS = ["linked", "latest"];
|
||||
function getCodeQLBundleName() {
|
||||
function getCodeQLBundleExtension(useZstd) {
|
||||
return useZstd ? ".tar.zst" : ".tar.gz";
|
||||
}
|
||||
function getCodeQLBundleName(useZstd) {
|
||||
const extension = getCodeQLBundleExtension(useZstd);
|
||||
let platform;
|
||||
if (process.platform === "win32") {
|
||||
platform = "win64";
|
||||
@@ -72,9 +77,9 @@ function getCodeQLBundleName() {
|
||||
platform = "osx64";
|
||||
}
|
||||
else {
|
||||
return "codeql-bundle.tar.gz";
|
||||
return `codeql-bundle${extension}`;
|
||||
}
|
||||
return `codeql-bundle-${platform}.tar.gz`;
|
||||
return `codeql-bundle-${platform}${extension}`;
|
||||
}
|
||||
function getCodeQLActionRepository(logger) {
|
||||
if ((0, actions_util_1.isRunningLocalAction)()) {
|
||||
@@ -86,7 +91,7 @@ function getCodeQLActionRepository(logger) {
|
||||
}
|
||||
return util.getRequiredEnvParam("GITHUB_ACTION_REPOSITORY");
|
||||
}
|
||||
async function getCodeQLBundleDownloadURL(tagName, apiDetails, logger) {
|
||||
async function getCodeQLBundleDownloadURL(tagName, apiDetails, useZstd, logger) {
|
||||
const codeQLActionRepository = getCodeQLActionRepository(logger);
|
||||
const potentialDownloadSources = [
|
||||
// This GitHub instance, and this Action.
|
||||
@@ -101,7 +106,7 @@ async function getCodeQLBundleDownloadURL(tagName, apiDetails, logger) {
|
||||
const uniqueDownloadSources = potentialDownloadSources.filter((source, index, self) => {
|
||||
return !self.slice(0, index).some((other) => (0, fast_deep_equal_1.default)(source, other));
|
||||
});
|
||||
const codeQLBundleName = getCodeQLBundleName();
|
||||
const codeQLBundleName = getCodeQLBundleName(useZstd);
|
||||
for (const downloadSource of uniqueDownloadSources) {
|
||||
const [apiURL, repository] = downloadSource;
|
||||
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
|
||||
@@ -193,7 +198,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, logger) {
|
||||
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, features, logger) {
|
||||
if (toolsInput &&
|
||||
!CODEQL_BUNDLE_VERSION_ALIAS.includes(toolsInput) &&
|
||||
!toolsInput.startsWith("http")) {
|
||||
@@ -335,7 +340,8 @@ async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, varian
|
||||
}
|
||||
}
|
||||
if (!url) {
|
||||
url = await getCodeQLBundleDownloadURL(tagName, apiDetails, logger);
|
||||
url = await getCodeQLBundleDownloadURL(tagName, apiDetails, cliVersion !== undefined &&
|
||||
(await useZstdBundle(cliVersion, features, tarSupportsZstd)), logger);
|
||||
}
|
||||
if (cliVersion) {
|
||||
logger.info(`Using CodeQL CLI version ${cliVersion} sourced from ${url}.`);
|
||||
@@ -471,8 +477,32 @@ function getCanonicalToolcacheVersion(cliVersion, bundleVersion, logger) {
|
||||
*
|
||||
* @returns the path to the extracted bundle, and the version of the tools
|
||||
*/
|
||||
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger) {
|
||||
const source = await getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, logger);
|
||||
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
|
||||
const zstdAvailability = await tar.isZstdAvailable(logger);
|
||||
let zstdFailureReason;
|
||||
// If we think the installed version of tar supports zstd, try to use zstd,
|
||||
// but be prepared to fall back to gzip in case we were wrong.
|
||||
if (zstdAvailability.available) {
|
||||
try {
|
||||
// To facilitate testing the fallback, fail here if a testing environment variable is set.
|
||||
if (process.env.CODEQL_ACTION_FORCE_ZSTD_FAILURE === "true") {
|
||||
throw new Error("Failing since CODEQL_ACTION_FORCE_ZSTD_FAILURE is true.");
|
||||
}
|
||||
return await setupCodeQLBundleWithCompressionMethod(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, zstdAvailability, true);
|
||||
}
|
||||
catch (e) {
|
||||
zstdFailureReason = util.getErrorMessage(e) || "unknown error";
|
||||
logger.warning(`Failed to set up CodeQL tools with zstd. Falling back to gzipped version. Error: ${util.getErrorMessage(e)}`);
|
||||
}
|
||||
}
|
||||
const result = await setupCodeQLBundleWithCompressionMethod(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, zstdAvailability, false);
|
||||
if (result.toolsDownloadStatusReport && zstdFailureReason) {
|
||||
result.toolsDownloadStatusReport.zstdFailureReason = zstdFailureReason;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
async function setupCodeQLBundleWithCompressionMethod(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, zstdAvailability, useTarIfAvailable) {
|
||||
const source = await getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, useTarIfAvailable, features, logger);
|
||||
let codeqlFolder;
|
||||
let toolsVersion = source.toolsVersion;
|
||||
let toolsDownloadStatusReport;
|
||||
@@ -500,7 +530,13 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
|
||||
default:
|
||||
util.assertNever(source);
|
||||
}
|
||||
return { codeqlFolder, toolsDownloadStatusReport, toolsSource, toolsVersion };
|
||||
return {
|
||||
codeqlFolder,
|
||||
toolsDownloadStatusReport,
|
||||
toolsSource,
|
||||
toolsVersion,
|
||||
zstdAvailability,
|
||||
};
|
||||
}
|
||||
async function cleanUpGlob(glob, name, logger) {
|
||||
logger.debug(`Cleaning up ${name}.`);
|
||||
@@ -525,4 +561,9 @@ function sanitizeUrlForStatusReport(url) {
|
||||
? url
|
||||
: "sanitized-value";
|
||||
}
|
||||
async function useZstdBundle(cliVersion, features, tarSupportsZstd) {
|
||||
return (tarSupportsZstd &&
|
||||
semver.gte(cliVersion, feature_flags_1.CODEQL_VERSION_ZSTD_BUNDLE) &&
|
||||
!!(await features.getValue(feature_flags_1.Feature.ZstdBundle)));
|
||||
}
|
||||
//# sourceMappingURL=setup-codeql.js.map
|
||||
File diff suppressed because one or more lines are too long
12
lib/setup-codeql.test.js
generated
12
lib/setup-codeql.test.js
generated
@@ -56,7 +56,7 @@ ava_1.default.beforeEach(() => {
|
||||
t.deepEqual(parsedVersion, expectedVersion);
|
||||
}
|
||||
catch (e) {
|
||||
t.fail((0, util_1.wrapError)(e).message);
|
||||
t.fail((0, util_1.getErrorMessage)(e));
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -79,7 +79,7 @@ ava_1.default.beforeEach(() => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const tagName = "codeql-bundle-v1.2.3";
|
||||
(0, testing_utils_1.mockBundleDownloadApi)({ tagName });
|
||||
const source = await setupCodeql.getCodeQLSource(`https://github.com/github/codeql-action/releases/download/${tagName}/codeql-bundle-linux64.tar.gz`, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, util_1.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true));
|
||||
const source = await setupCodeql.getCodeQLSource(`https://github.com/github/codeql-action/releases/download/${tagName}/codeql-bundle-linux64.tar.gz`, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, util_1.GitHubVariant.DOTCOM, false, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.is(source.sourceType, "download");
|
||||
t.is(source["cliVersion"], "1.2.3");
|
||||
});
|
||||
@@ -87,7 +87,7 @@ ava_1.default.beforeEach(() => {
|
||||
(0, ava_1.default)("getCodeQLSource correctly returns bundled CLI version when tools == linked", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const source = await setupCodeql.getCodeQLSource("linked", testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, util_1.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true));
|
||||
const source = await setupCodeql.getCodeQLSource("linked", testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, util_1.GitHubVariant.DOTCOM, false, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.is(source.toolsVersion, testing_utils_1.LINKED_CLI_VERSION.cliVersion);
|
||||
t.is(source.sourceType, "download");
|
||||
});
|
||||
@@ -97,7 +97,7 @@ ava_1.default.beforeEach(() => {
|
||||
const logger = (0, testing_utils_1.getRecordingLogger)(loggedMessages);
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const source = await setupCodeql.getCodeQLSource("latest", testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, util_1.GitHubVariant.DOTCOM, logger);
|
||||
const source = await setupCodeql.getCodeQLSource("latest", testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, util_1.GitHubVariant.DOTCOM, false, (0, testing_utils_1.createFeatures)([]), logger);
|
||||
// First, ensure that the CLI version is the linked version, so that backwards
|
||||
// compatibility is maintained.
|
||||
t.is(source.toolsVersion, testing_utils_1.LINKED_CLI_VERSION.cliVersion);
|
||||
@@ -125,7 +125,7 @@ ava_1.default.beforeEach(() => {
|
||||
});
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const result = await setupCodeql.setupCodeQLBundle("linked", testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, "tmp/codeql_action_test/", util_1.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, logger);
|
||||
const result = await setupCodeql.setupCodeQLBundle("linked", testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, "tmp/codeql_action_test/", util_1.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, testing_utils_1.createFeatures)([]), logger);
|
||||
// Basic sanity check that the version we got back is indeed
|
||||
// the linked (default) CLI version.
|
||||
t.is(result.toolsVersion, testing_utils_1.LINKED_CLI_VERSION.cliVersion);
|
||||
@@ -154,7 +154,7 @@ ava_1.default.beforeEach(() => {
|
||||
});
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const result = await setupCodeql.setupCodeQLBundle(bundleUrl, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, "tmp/codeql_action_test/", util_1.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, logger);
|
||||
const result = await setupCodeql.setupCodeQLBundle(bundleUrl, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, "tmp/codeql_action_test/", util_1.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, testing_utils_1.createFeatures)([]), logger);
|
||||
// Basic sanity check that the version we got back is indeed the version that the
|
||||
// bundle contains..
|
||||
t.is(result.toolsVersion, expectedVersion);
|
||||
|
||||
File diff suppressed because one or more lines are too long
2
lib/start-proxy-action-post.js
generated
2
lib/start-proxy-action-post.js
generated
@@ -41,7 +41,7 @@ async function runWrapper() {
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`start-proxy post-action step failed: ${(0, util_1.wrapError)(error).message}`);
|
||||
core.setFailed(`start-proxy post-action step failed: ${(0, util_1.getErrorMessage)(error)}`);
|
||||
}
|
||||
const config = await configUtils.getConfig(actionsUtil.getTemporaryDirectory(), core);
|
||||
if ((config && config.debugMode) || core.isDebug()) {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"start-proxy-action-post.js","sourceRoot":"","sources":["../src/start-proxy-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,4DAA8C;AAC9C,oDAAsC;AAEtC,4DAA8C;AAC9C,4DAA8C;AAC9C,iCAAmC;AAEnC,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;QAC/C,IAAI,GAAG,EAAE,CAAC;YACR,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;QAC5B,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,wCAAwC,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CACnE,CAAC;IACJ,CAAC;IACD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CACxC,WAAW,CAAC,qBAAqB,EAAE,EACnC,IAAI,CACL,CAAC;IAEF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE,CAAC;QACnD,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC;QACpD,IAAI,CAAC,IAAI,CACP,wEAAwE,CACzE,CAAC;QACF,IAAI,CAAC;YACH,MAAM,QAAQ;iBACX,MAAM,EAAE;iBACR,cAAc,CACb,gBAAgB,EAChB,CAAC,WAAW,CAAC,EACb,WAAW,CAAC,qBAAqB,EAAE,EACnC;gBACE,eAAe,EAAE,IAAI;gBACrB,aAAa,EAAE,CAAC;aACjB,CACF,CAAC;QACN,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,yEAAyE;YACzE,IAAI,CAAC,OAAO,CAAC,qCAAqC,CAAC,EAAE,CAAC,CAAC;QACzD,CAAC;IACH,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"start-proxy-action-post.js","sourceRoot":"","sources":["../src/start-proxy-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,4DAA8C;AAC9C,oDAAsC;AAEtC,4DAA8C;AAC9C,4DAA8C;AAC9C,iCAAyC;AAEzC,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;QAC/C,IAAI,GAAG,EAAE,CAAC;YACR,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;QAC5B,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,wCAAwC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CACjE,CAAC;IACJ,CAAC;IACD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CACxC,WAAW,CAAC,qBAAqB,EAAE,EACnC,IAAI,CACL,CAAC;IAEF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE,CAAC;QACnD,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC;QACpD,IAAI,CAAC,IAAI,CACP,wEAAwE,CACzE,CAAC;QACF,IAAI,CAAC;YACH,MAAM,QAAQ;iBACX,MAAM,EAAE;iBACR,cAAc,CACb,gBAAgB,EAChB,CAAC,WAAW,CAAC,EACb,WAAW,CAAC,qBAAqB,EAAE,EACnC;gBACE,eAAe,EAAE,IAAI;gBACrB,aAAa,EAAE,CAAC;aACjB,CACF,CAAC;QACN,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,yEAAyE;YACzE,IAAI,CAAC,OAAO,CAAC,qCAAqC,CAAC,EAAE,CAAC,CAAC;QACzD,CAAC;IACH,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
2
lib/start-proxy-action.js
generated
2
lib/start-proxy-action.js
generated
@@ -142,7 +142,7 @@ async function startProxy(binPath, config, logFilePath, logger) {
|
||||
core.setOutput("proxy_ca_certificate", config.ca.cert);
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`start-proxy action failed: ${util.wrapError(error).message}`);
|
||||
core.setFailed(`start-proxy action failed: ${util.getErrorMessage(error)}`);
|
||||
}
|
||||
}
|
||||
// getCredentials returns registry credentials from action inputs.
|
||||
|
||||
File diff suppressed because one or more lines are too long
2
lib/status-report.js
generated
2
lib/status-report.js
generated
@@ -278,7 +278,7 @@ async function sendStatusReport(statusReport) {
|
||||
}
|
||||
// something else has gone wrong and the request/response will be logged by octokit
|
||||
// it's possible this is a transient error and we should continue scanning
|
||||
core.warning(`An unexpected error occurred when sending code scanning status report: ${(0, util_1.wrapError)(e).message}`);
|
||||
core.warning(`An unexpected error occurred when sending code scanning status report: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=status-report.js.map
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
{"version":3,"file":"tar.js","sourceRoot":"","sources":["../src/tar.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAgDA,0CA4BC;AAID,0BAeC;AAED,wDAKC;AAtGD,6DAA0D;AAC1D,+DAAiD;AACjD,uDAAmD;AAGnD,iCAAqC;AAErC,MAAM,4BAA4B,GAAG,OAAO,CAAC;AAC7C,MAAM,4BAA4B,GAAG,MAAM,CAAC;AAO5C,KAAK,UAAU,aAAa;IAC1B,MAAM,GAAG,GAAG,MAAM,IAAA,sBAAS,EAAC,KAAK,CAAC,CAAC;IACnC,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,QAAQ,GAAG,MAAM,IAAI,uBAAU,CAAC,GAAG,EAAE,CAAC,WAAW,CAAC,EAAE;QACxD,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC,CAAC,IAAI,EAAE,CAAC;IACV,IAAI,QAAQ,KAAK,CAAC,EAAE,CAAC;QACnB,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;IAClD,CAAC;IACD,oEAAoE;IACpE,IAAI,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAC/B,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,2BAA2B,CAAC,CAAC;QACxD,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9D,CAAC;QAED,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC5C,CAAC;SAAM,IAAI,MAAM,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;QACrC,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;QAC/C,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9D,CAAC;QAED,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC5C,CAAC;SAAM,CAAC;QACN,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;IACzC,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,eAAe,CACnC,MAAc;IAEd,IAAI,CAAC;QACH,MAAM,UAAU,GAAG,MAAM,aAAa,EAAE,CAAC;QACzC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,UAAU,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,SAAS,IAAI,gBAAgB,OAAO,GAAG,CAAC,CAAC;QACrD,QAAQ,IAAI,EAAE,CAAC;YACb,KAAK,KAAK;gBACR,OAAO;oBACL,SAAS,EAAE,OAAO,IAAI,4BAA4B;oBAClD,OAAO,EAAE,UAAU;iBACpB,CAAC;YACJ,KAAK,KAAK;gBACR,OAAO;oBACL,SAAS,EAAE,OAAO,IAAI,4BAA4B;oBAClD,OAAO,EAAE,UAAU;iBACpB,CAAC;YACJ;gBACE,IAAA,kBAAW,EAAC,IAAI,CAAC,CAAC;QACtB,CAAC;IACH,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,CAAC,KAAK,CACV,oFAAoF;YAClF,6BAA6B,CAAC,EAAE,CACnC,CAAC;QACF,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,CAAC;IAC9B,CAAC;AACH,CAAC;AAIM,KAAK,UAAU,OAAO,CAC3B,IAAY,EACZ,iBAAoC;IAEpC,QAAQ,iBAAiB,EAAE,CAAC;QAC1B,KAAK,MAAM;YACT,oEAAoE;YACpE,sEAAsE;YACtE,yCAAyC;YACzC,OAAO,MAAM,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QAC1C,KAAK,MAAM;YACT,gEAAgE;YAChE,sBAAsB;YACtB,OAAO,MAAM,SAAS,CAAC,UAAU,CAAC,IAAI,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;IAC5D,CAAC;AACH,CAAC;AAED,SAAgB,sBAAsB,CAAC,IAAY;IACjD,IAAI,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAC7B,OAAO,MAAM,CAAC;IAChB,CAAC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC"}
|
||||
{"version":3,"file":"tar.js","sourceRoot":"","sources":["../src/tar.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAqDA,0CA4BC;AAID,0BAeC;AAED,wDAKC;AA3GD,6DAA0D;AAC1D,+DAAiD;AACjD,uDAAmD;AAGnD,iCAAqC;AAErC,MAAM,4BAA4B,GAAG,OAAO,CAAC;AAC7C,MAAM,4BAA4B,GAAG,MAAM,CAAC;AAO5C,KAAK,UAAU,aAAa;IAC1B,MAAM,GAAG,GAAG,MAAM,IAAA,sBAAS,EAAC,KAAK,CAAC,CAAC;IACnC,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,QAAQ,GAAG,MAAM,IAAI,uBAAU,CAAC,GAAG,EAAE,CAAC,WAAW,CAAC,EAAE;QACxD,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC,CAAC,IAAI,EAAE,CAAC;IACV,IAAI,QAAQ,KAAK,CAAC,EAAE,CAAC;QACnB,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;IAClD,CAAC;IACD,oEAAoE;IACpE,IAAI,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAC/B,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,2BAA2B,CAAC,CAAC;QACxD,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9D,CAAC;QAED,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC5C,CAAC;SAAM,IAAI,MAAM,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;QACrC,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;QAC/C,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9D,CAAC;QAED,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC5C,CAAC;SAAM,CAAC;QACN,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;IACzC,CAAC;AACH,CAAC;AAOM,KAAK,UAAU,eAAe,CACnC,MAAc;IAEd,IAAI,CAAC;QACH,MAAM,UAAU,GAAG,MAAM,aAAa,EAAE,CAAC;QACzC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,UAAU,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,SAAS,IAAI,gBAAgB,OAAO,GAAG,CAAC,CAAC;QACrD,QAAQ,IAAI,EAAE,CAAC;YACb,KAAK,KAAK;gBACR,OAAO;oBACL,SAAS,EAAE,OAAO,IAAI,4BAA4B;oBAClD,OAAO,EAAE,UAAU;iBACpB,CAAC;YACJ,KAAK,KAAK;gBACR,OAAO;oBACL,SAAS,EAAE,OAAO,IAAI,4BAA4B;oBAClD,OAAO,EAAE,UAAU;iBACpB,CAAC;YACJ;gBACE,IAAA,kBAAW,EAAC,IAAI,CAAC,CAAC;QACtB,CAAC;IACH,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,CAAC,KAAK,CACV,oFAAoF;YAClF,6BAA6B,CAAC,EAAE,CACnC,CAAC;QACF,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,CAAC;IAC9B,CAAC;AACH,CAAC;AAIM,KAAK,UAAU,OAAO,CAC3B,IAAY,EACZ,iBAAoC;IAEpC,QAAQ,iBAAiB,EAAE,CAAC;QAC1B,KAAK,MAAM;YACT,oEAAoE;YACpE,sEAAsE;YACtE,yCAAyC;YACzC,OAAO,MAAM,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QAC1C,KAAK,MAAM;YACT,gEAAgE;YAChE,sBAAsB;YACtB,OAAO,MAAM,SAAS,CAAC,UAAU,CAAC,IAAI,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;IAC5D,CAAC;AACH,CAAC;AAED,SAAgB,sBAAsB,CAAC,IAAY;IACjD,IAAI,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAC7B,OAAO,MAAM,CAAC;IAChB,CAAC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC"}
|
||||
2
lib/trap-caching.js
generated
2
lib/trap-caching.js
generated
@@ -183,7 +183,7 @@ async function cleanupTrapCaches(config, features, logger) {
|
||||
else {
|
||||
logger.info(`Failed to cleanup TRAP caches, continuing. Details: ${e}`);
|
||||
}
|
||||
return { trap_cache_cleanup_error: (0, util_1.wrapError)(e).message };
|
||||
return { trap_cache_cleanup_error: (0, util_1.getErrorMessage)(e) };
|
||||
}
|
||||
}
|
||||
async function getTrapCachesForLanguage(allCaches, language, logger) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
13
lib/upload-lib.js
generated
13
lib/upload-lib.js
generated
@@ -173,7 +173,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
|
||||
};
|
||||
const codeQLDefaultVersionInfo = await features.getDefaultCliVersion(gitHubVersion.type);
|
||||
const initCodeQLResult = await (0, init_1.initCodeQL)(undefined, // There is no tools input on the upload action
|
||||
apiDetails, tempDir, gitHubVersion.type, codeQLDefaultVersionInfo, logger);
|
||||
apiDetails, tempDir, gitHubVersion.type, codeQLDefaultVersionInfo, features, logger);
|
||||
codeQL = initCodeQLResult.codeql;
|
||||
}
|
||||
if (!(await codeQL.supportsFeature(tools_features_1.ToolsFeature.SarifMergeRunsFromEqualCategory))) {
|
||||
@@ -319,15 +319,20 @@ function validateSarifFileSchema(sarifFilePath, logger) {
|
||||
sarif = JSON.parse(fs.readFileSync(sarifFilePath, "utf8"));
|
||||
}
|
||||
catch (e) {
|
||||
throw new InvalidSarifUploadError(`Invalid SARIF. JSON syntax error: ${(0, util_1.wrapError)(e).message}`);
|
||||
throw new InvalidSarifUploadError(`Invalid SARIF. JSON syntax error: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const schema = require("../src/sarif-schema-2.1.0.json");
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
// Filter errors related to invalid URIs in the artifactLocation field as this
|
||||
// is a breaking change. See https://github.com/github/codeql-action/issues/1703
|
||||
const errors = (result.errors || []).filter((err) => err.argument !== "uri-reference");
|
||||
const warnings = (result.errors || []).filter((err) => err.argument === "uri-reference");
|
||||
const warningAttributes = ["uri-reference", "uri"];
|
||||
const errors = (result.errors ?? []).filter((err) => !(err.name === "format" &&
|
||||
typeof err.argument === "string" &&
|
||||
warningAttributes.includes(err.argument)));
|
||||
const warnings = (result.errors ?? []).filter((err) => err.name === "format" &&
|
||||
typeof err.argument === "string" &&
|
||||
warningAttributes.includes(err.argument));
|
||||
for (const warning of warnings) {
|
||||
logger.info(`Warning: '${warning.instance}' is not a valid URI in '${warning.property}'.`);
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
4
lib/upload-lib.test.js
generated
4
lib/upload-lib.test.js
generated
@@ -192,8 +192,8 @@ ava_1.default.beforeEach(() => {
|
||||
};
|
||||
const sarifFile = `${__dirname}/../src/testdata/with-invalid-uri.sarif`;
|
||||
uploadLib.validateSarifFileSchema(sarifFile, mockLogger);
|
||||
t.deepEqual(loggedMessages.length, 2);
|
||||
t.deepEqual(loggedMessages[1], "Warning: 'not a valid URI' is not a valid URI in 'instance.runs[0].results[0].locations[0].physicalLocation.artifactLocation.uri'.");
|
||||
t.deepEqual(loggedMessages.length, 3);
|
||||
t.deepEqual(loggedMessages[1], "Warning: 'not a valid URI' is not a valid URI in 'instance.runs[0].tool.driver.rules[0].helpUri'.", "Warning: 'not a valid URI' is not a valid URI in 'instance.runs[0].results[0].locations[0].physicalLocation.artifactLocation.uri'.");
|
||||
});
|
||||
(0, ava_1.default)("shouldShowCombineSarifFilesDeprecationWarning when on dotcom", async (t) => {
|
||||
t.true(await uploadLib.shouldShowCombineSarifFilesDeprecationWarning([createMockSarif("abc", "def"), createMockSarif("abc", "def")], {
|
||||
|
||||
File diff suppressed because one or more lines are too long
54
lib/upload-sarif-action-post-helper.js
generated
54
lib/upload-sarif-action-post-helper.js
generated
@@ -1,54 +0,0 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.uploadArtifacts = uploadArtifacts;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
async function uploadArtifacts(uploadDebugArtifacts) {
|
||||
const tempDir = actionsUtil.getTemporaryDirectory();
|
||||
// Upload Actions SARIF artifacts for debugging when environment variable is set
|
||||
if (process.env["CODEQL_ACTION_DEBUG_COMBINED_SARIF"] === "true") {
|
||||
core.info("Uploading available combined SARIF files as Actions debugging artifact...");
|
||||
const baseTempDir = path.resolve(tempDir, "combined-sarif");
|
||||
const toUpload = [];
|
||||
if (fs.existsSync(baseTempDir)) {
|
||||
const outputDirs = fs.readdirSync(baseTempDir);
|
||||
for (const outputDir of outputDirs) {
|
||||
const sarifFiles = fs
|
||||
.readdirSync(path.resolve(baseTempDir, outputDir))
|
||||
.filter((f) => f.endsWith(".sarif"));
|
||||
for (const sarifFile of sarifFiles) {
|
||||
toUpload.push(path.resolve(baseTempDir, outputDir, sarifFile));
|
||||
}
|
||||
}
|
||||
}
|
||||
if (toUpload.length > 0) {
|
||||
await uploadDebugArtifacts(toUpload, baseTempDir, "upload-debug-artifacts");
|
||||
}
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=upload-sarif-action-post-helper.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"upload-sarif-action-post-helper.js","sourceRoot":"","sources":["../src/upload-sarif-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAOA,0CAyCC;AAhDD,uCAAyB;AACzB,2CAA6B;AAE7B,oDAAsC;AAEtC,4DAA8C;AAEvC,KAAK,UAAU,eAAe,CACnC,oBAIkB;IAElB,MAAM,OAAO,GAAG,WAAW,CAAC,qBAAqB,EAAE,CAAC;IAEpD,gFAAgF;IAChF,IAAI,OAAO,CAAC,GAAG,CAAC,oCAAoC,CAAC,KAAK,MAAM,EAAE,CAAC;QACjE,IAAI,CAAC,IAAI,CACP,2EAA2E,CAC5E,CAAC;QAEF,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,gBAAgB,CAAC,CAAC;QAE5D,MAAM,QAAQ,GAAa,EAAE,CAAC;QAE9B,IAAI,EAAE,CAAC,UAAU,CAAC,WAAW,CAAC,EAAE,CAAC;YAC/B,MAAM,UAAU,GAAG,EAAE,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;YAE/C,KAAK,MAAM,SAAS,IAAI,UAAU,EAAE,CAAC;gBACnC,MAAM,UAAU,GAAG,EAAE;qBAClB,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;qBACjD,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAEvC,KAAK,MAAM,SAAS,IAAI,UAAU,EAAE,CAAC;oBACnC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC,CAAC;gBACjE,CAAC;YACH,CAAC;QACH,CAAC;QAED,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACxB,MAAM,oBAAoB,CACxB,QAAQ,EACR,WAAW,EACX,wBAAwB,CACzB,CAAC;QACJ,CAAC;IACH,CAAC;AACH,CAAC"}
|
||||
12
lib/upload-sarif-action-post.js
generated
12
lib/upload-sarif-action-post.js
generated
@@ -30,14 +30,20 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
*/
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||
const uploadSarifActionPostHelper = __importStar(require("./upload-sarif-action-post-helper"));
|
||||
const environment_1 = require("./environment");
|
||||
const logging_1 = require("./logging");
|
||||
const util_1 = require("./util");
|
||||
async function runWrapper() {
|
||||
try {
|
||||
await uploadSarifActionPostHelper.uploadArtifacts(debugArtifacts.uploadDebugArtifacts);
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
// Upload SARIF artifacts if we determine that this is a third-party analysis run.
|
||||
// For first-party runs, this artifact will be uploaded in the `analyze-post` step.
|
||||
if (process.env[environment_1.EnvVar.INIT_ACTION_HAS_RUN] !== "true") {
|
||||
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger));
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`upload-sarif post-action step failed: ${(0, util_1.wrapError)(error).message}`);
|
||||
core.setFailed(`upload-sarif post-action step failed: ${(0, util_1.getErrorMessage)(error)}`);
|
||||
}
|
||||
}
|
||||
void runWrapper();
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"upload-sarif-action-post.js","sourceRoot":"","sources":["../src/upload-sarif-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,kEAAoD;AACpD,+FAAiF;AACjF,iCAAmC;AAEnC,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,2BAA2B,CAAC,eAAe,CAC/C,cAAc,CAAC,oBAAoB,CACpC,CAAC;IACJ,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,yCAAyC,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CACpE,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"upload-sarif-action-post.js","sourceRoot":"","sources":["../src/upload-sarif-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,kEAAoD;AACpD,+CAAuC;AACvC,uCAAwD;AACxD,iCAAyC;AAEzC,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,kFAAkF;QAClF,mFAAmF;QACnF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CAAC,MAAM,CAAC,CACpD,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,yCAAyC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAClE,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
2
lib/upload-sarif-action.js
generated
2
lib/upload-sarif-action.js
generated
@@ -86,7 +86,7 @@ async function runWrapper() {
|
||||
await run();
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`codeql/upload-sarif action failed: ${(0, util_1.wrapError)(error).message}`);
|
||||
core.setFailed(`codeql/upload-sarif action failed: ${(0, util_1.getErrorMessage)(error)}`);
|
||||
}
|
||||
}
|
||||
void runWrapper();
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAAyE;AACzE,6CAAgD;AAChD,mDAA2C;AAC3C,uCAAqD;AACrD,6CAAkD;AAClD,mDAOyB;AACzB,yDAA2C;AAC3C,iCAQgB;AAMhB,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C,EAC1C,MAAc;IAEd,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,WAAW,EACtB,SAAS,EACT,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,YAAY,GAA4B;YAC5C,GAAG,gBAAgB;YACnB,GAAG,WAAW;SACf,CAAC;QACF,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAE1C,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;IAC/C,IAAA,yBAAkB,EAAC,IAAA,+BAAgB,GAAE,EAAE,aAAa,CAAC,CAAC;IAEtD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;IACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;IAEF,MAAM,wBAAwB,GAAG,MAAM,IAAA,sCAAsB,EAC3D,0BAAU,CAAC,WAAW,EACtB,UAAU,EACV,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,wBAAwB,KAAK,SAAS,EAAE,CAAC;QAC3C,MAAM,IAAA,gCAAgB,EAAC,wBAAwB,CAAC,CAAC;IACnD,CAAC;IAED,IAAI,CAAC;QACH,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,WAAW,CAC/C,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,QAAQ,EACR,MAAM,CACP,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE,CAAC;YACnB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;QAClE,CAAC;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE,CAAC;YAC1E,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,MAAM,CACP,CAAC;QACJ,CAAC;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC;IAC9E,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GACT,CAAC,IAAA,oCAAoB,EAAC,0BAAU,CAAC,WAAW,CAAC;YAC7C,cAAc,YAAY,UAAU,CAAC,uBAAuB;YAC1D,CAAC,CAAC,IAAI,yBAAkB,CAAC,cAAc,CAAC,OAAO,CAAC;YAChD,CAAC,CAAC,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QAChC,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;QAC9B,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QAExB,MAAM,qBAAqB,GAAG,MAAM,IAAA,sCAAsB,EACxD,0BAAU,CAAC,WAAW,EACtB,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,OAAO,EACP,KAAK,CAAC,KAAK,CACZ,CAAC;QACF,IAAI,qBAAqB,KAAK,SAAS,EAAE,CAAC;YACxC,MAAM,IAAA,gCAAgB,EAAC,qBAAqB,CAAC,CAAC;QAChD,CAAC;QACD,OAAO;IACT,CAAC;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,EAAE,CAAC;IACd,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,sCAAsC,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CACjE,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAAyE;AACzE,6CAAgD;AAChD,mDAA2C;AAC3C,uCAAqD;AACrD,6CAAkD;AAClD,mDAOyB;AACzB,yDAA2C;AAC3C,iCASgB;AAMhB,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C,EAC1C,MAAc;IAEd,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,WAAW,EACtB,SAAS,EACT,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,YAAY,GAA4B;YAC5C,GAAG,gBAAgB;YACnB,GAAG,WAAW;SACf,CAAC;QACF,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAE1C,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;IAC/C,IAAA,yBAAkB,EAAC,IAAA,+BAAgB,GAAE,EAAE,aAAa,CAAC,CAAC;IAEtD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;IACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;IAEF,MAAM,wBAAwB,GAAG,MAAM,IAAA,sCAAsB,EAC3D,0BAAU,CAAC,WAAW,EACtB,UAAU,EACV,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,wBAAwB,KAAK,SAAS,EAAE,CAAC;QAC3C,MAAM,IAAA,gCAAgB,EAAC,wBAAwB,CAAC,CAAC;IACnD,CAAC;IAED,IAAI,CAAC;QACH,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,WAAW,CAC/C,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,QAAQ,EACR,MAAM,CACP,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE,CAAC;YACnB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;QAClE,CAAC;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE,CAAC;YAC1E,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,MAAM,CACP,CAAC;QACJ,CAAC;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC;IAC9E,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GACT,CAAC,IAAA,oCAAoB,EAAC,0BAAU,CAAC,WAAW,CAAC;YAC7C,cAAc,YAAY,UAAU,CAAC,uBAAuB;YAC1D,CAAC,CAAC,IAAI,yBAAkB,CAAC,cAAc,CAAC,OAAO,CAAC;YAChD,CAAC,CAAC,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QAChC,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;QAC9B,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QAExB,MAAM,qBAAqB,GAAG,MAAM,IAAA,sCAAsB,EACxD,0BAAU,CAAC,WAAW,EACtB,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,OAAO,EACP,KAAK,CAAC,KAAK,CACZ,CAAC;QACF,IAAI,qBAAqB,KAAK,SAAS,EAAE,CAAC;YACxC,MAAM,IAAA,gCAAgB,EAAC,qBAAqB,CAAC,CAAC;QAChD,CAAC;QACD,OAAO;IACT,CAAC;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,EAAE,CAAC;IACd,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,sCAAsC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC/D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
8
lib/util.js
generated
8
lib/util.js
generated
@@ -784,8 +784,14 @@ function fixInvalidNotificationsInFile(inputPath, outputPath, logger) {
|
||||
function wrapError(error) {
|
||||
return error instanceof Error ? error : new Error(String(error));
|
||||
}
|
||||
/**
|
||||
* Returns an appropriate message for the error.
|
||||
*
|
||||
* If the error is an `Error` instance, this returns the error message without
|
||||
* an `Error: ` prefix.
|
||||
*/
|
||||
function getErrorMessage(error) {
|
||||
return error instanceof Error ? error.toString() : String(error);
|
||||
return error instanceof Error ? error.message : String(error);
|
||||
}
|
||||
function prettyPrintPack(pack) {
|
||||
return `${pack.name}${pack.version ? `@${pack.version}` : ""}${pack.path ? `:${pack.path}` : ""}`;
|
||||
|
||||
File diff suppressed because one or more lines are too long
1141
node_modules/.package-lock.json
generated
vendored
1141
node_modules/.package-lock.json
generated
vendored
File diff suppressed because it is too large
Load Diff
2
node_modules/@eslint/js/package.json
generated
vendored
2
node_modules/@eslint/js/package.json
generated
vendored
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@eslint/js",
|
||||
"version": "9.9.1",
|
||||
"version": "9.11.0",
|
||||
"description": "ESLint JavaScript language implementation",
|
||||
"main": "./src/index.js",
|
||||
"scripts": {},
|
||||
|
||||
130
node_modules/@humanwhocodes/config-array/api.js
generated
vendored
130
node_modules/@humanwhocodes/config-array/api.js
generated
vendored
@@ -154,8 +154,82 @@ const MINIMATCH_OPTIONS = {
|
||||
|
||||
const CONFIG_TYPES = new Set(['array', 'function']);
|
||||
|
||||
/**
|
||||
* Fields that are considered metadata and not part of the config object.
|
||||
*/
|
||||
const META_FIELDS = new Set(['name']);
|
||||
|
||||
const FILES_AND_IGNORES_SCHEMA = new objectSchema.ObjectSchema(filesAndIgnoresSchema);
|
||||
|
||||
/**
|
||||
* Wrapper error for config validation errors that adds a name to the front of the
|
||||
* error message.
|
||||
*/
|
||||
class ConfigError extends Error {
|
||||
|
||||
/**
|
||||
* Creates a new instance.
|
||||
* @param {string} name The config object name causing the error.
|
||||
* @param {number} index The index of the config object in the array.
|
||||
* @param {Error} source The source error.
|
||||
*/
|
||||
constructor(name, index, { cause, message }) {
|
||||
|
||||
|
||||
const finalMessage = message || cause.message;
|
||||
|
||||
super(`Config ${name}: ${finalMessage}`, { cause });
|
||||
|
||||
// copy over custom properties that aren't represented
|
||||
if (cause) {
|
||||
for (const key of Object.keys(cause)) {
|
||||
if (!(key in this)) {
|
||||
this[key] = cause[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the error.
|
||||
* @type {string}
|
||||
* @readonly
|
||||
*/
|
||||
this.name = 'ConfigError';
|
||||
|
||||
/**
|
||||
* The index of the config object in the array.
|
||||
* @type {number}
|
||||
* @readonly
|
||||
*/
|
||||
this.index = index;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the name of a config object.
|
||||
* @param {object} config The config object to get the name of.
|
||||
* @returns {string} The name of the config object.
|
||||
*/
|
||||
function getConfigName(config) {
|
||||
if (config && typeof config.name === 'string' && config.name) {
|
||||
return `"${config.name}"`;
|
||||
}
|
||||
|
||||
return '(unnamed)';
|
||||
}
|
||||
|
||||
/**
|
||||
* Rethrows a config error with additional information about the config object.
|
||||
* @param {object} config The config object to get the name of.
|
||||
* @param {number} index The index of the config object in the array.
|
||||
* @param {Error} error The error to rethrow.
|
||||
* @throws {ConfigError} When the error is rethrown for a config.
|
||||
*/
|
||||
function rethrowConfigError(config, index, error) {
|
||||
const configName = getConfigName(config);
|
||||
throw new ConfigError(configName, index, error);
|
||||
}
|
||||
|
||||
/**
|
||||
* Shorthand for checking if a value is a string.
|
||||
* @param {any} value The value to check.
|
||||
@@ -166,23 +240,43 @@ function isString(value) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the files and ignores keys of a config object are valid as per base schema.
|
||||
* @param {object} config The config object to check.
|
||||
* Creates a function that asserts that the config is valid
|
||||
* during normalization. This checks that the config is not nullish
|
||||
* and that files and ignores keys of a config object are valid as per base schema.
|
||||
* @param {Object} config The config object to check.
|
||||
* @param {number} index The index of the config object in the array.
|
||||
* @returns {void}
|
||||
* @throws {TypeError} If the files and ignores keys of a config object are not valid.
|
||||
* @throws {ConfigError} If the files and ignores keys of a config object are not valid.
|
||||
*/
|
||||
function assertValidFilesAndIgnores(config) {
|
||||
if (!config || typeof config !== 'object') {
|
||||
return;
|
||||
function assertValidBaseConfig(config, index) {
|
||||
|
||||
if (config === null) {
|
||||
throw new ConfigError(getConfigName(config), index, { message: 'Unexpected null config.' });
|
||||
}
|
||||
|
||||
if (config === undefined) {
|
||||
throw new ConfigError(getConfigName(config), index, { message: 'Unexpected undefined config.' });
|
||||
}
|
||||
|
||||
if (typeof config !== 'object') {
|
||||
throw new ConfigError(getConfigName(config), index, { message: 'Unexpected non-object config.' });
|
||||
}
|
||||
|
||||
const validateConfig = { };
|
||||
|
||||
if ('files' in config) {
|
||||
validateConfig.files = config.files;
|
||||
}
|
||||
|
||||
if ('ignores' in config) {
|
||||
validateConfig.ignores = config.ignores;
|
||||
}
|
||||
FILES_AND_IGNORES_SCHEMA.validate(validateConfig);
|
||||
|
||||
try {
|
||||
FILES_AND_IGNORES_SCHEMA.validate(validateConfig);
|
||||
} catch (validationError) {
|
||||
rethrowConfigError(config, index, { cause: validationError });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -377,7 +471,7 @@ function pathMatchesIgnores(filePath, basePath, config) {
|
||||
*/
|
||||
const relativeFilePath = path.relative(basePath, filePath);
|
||||
|
||||
return Object.keys(config).length > 1 &&
|
||||
return Object.keys(config).filter(key => !META_FIELDS.has(key)).length > 1 &&
|
||||
!shouldIgnorePath(config.ignores, filePath, relativeFilePath);
|
||||
}
|
||||
|
||||
@@ -511,7 +605,7 @@ class ConfigArray extends Array {
|
||||
/**
|
||||
* Tracks if the array has been normalized.
|
||||
* @property isNormalized
|
||||
* @type boolean
|
||||
* @type {boolean}
|
||||
* @private
|
||||
*/
|
||||
this[ConfigArraySymbol.isNormalized] = normalized;
|
||||
@@ -530,7 +624,7 @@ class ConfigArray extends Array {
|
||||
* The path of the config file that this array was loaded from.
|
||||
* This is used to calculate filename matches.
|
||||
* @property basePath
|
||||
* @type string
|
||||
* @type {string}
|
||||
*/
|
||||
this.basePath = basePath;
|
||||
|
||||
@@ -539,14 +633,14 @@ class ConfigArray extends Array {
|
||||
/**
|
||||
* The supported config types.
|
||||
* @property configTypes
|
||||
* @type Array<string>
|
||||
* @type {Array<string>}
|
||||
*/
|
||||
this.extraConfigTypes = Object.freeze([...extraConfigTypes]);
|
||||
|
||||
/**
|
||||
* A cache to store calculated configs for faster repeat lookup.
|
||||
* @property configCache
|
||||
* @type Map
|
||||
* @type {Map<string, Object>}
|
||||
* @private
|
||||
*/
|
||||
this[ConfigArraySymbol.configCache] = new Map();
|
||||
@@ -645,7 +739,7 @@ class ConfigArray extends Array {
|
||||
* In this case, it acts list a globally ignored pattern. If there
|
||||
* are additional keys, then ignores act like exclusions.
|
||||
*/
|
||||
if (config.ignores && Object.keys(config).length === 1) {
|
||||
if (config.ignores && Object.keys(config).filter(key => !META_FIELDS.has(key)).length === 1) {
|
||||
result.push(...config.ignores);
|
||||
}
|
||||
}
|
||||
@@ -677,7 +771,7 @@ class ConfigArray extends Array {
|
||||
const normalizedConfigs = await normalize(this, context, this.extraConfigTypes);
|
||||
this.length = 0;
|
||||
this.push(...normalizedConfigs.map(this[ConfigArraySymbol.preprocessConfig].bind(this)));
|
||||
this.forEach(assertValidFilesAndIgnores);
|
||||
this.forEach(assertValidBaseConfig);
|
||||
this[ConfigArraySymbol.isNormalized] = true;
|
||||
|
||||
// prevent further changes
|
||||
@@ -699,7 +793,7 @@ class ConfigArray extends Array {
|
||||
const normalizedConfigs = normalizeSync(this, context, this.extraConfigTypes);
|
||||
this.length = 0;
|
||||
this.push(...normalizedConfigs.map(this[ConfigArraySymbol.preprocessConfig].bind(this)));
|
||||
this.forEach(assertValidFilesAndIgnores);
|
||||
this.forEach(assertValidBaseConfig);
|
||||
this[ConfigArraySymbol.isNormalized] = true;
|
||||
|
||||
// prevent further changes
|
||||
@@ -932,7 +1026,11 @@ class ConfigArray extends Array {
|
||||
// otherwise construct the config
|
||||
|
||||
finalConfig = matchingConfigIndices.reduce((result, index) => {
|
||||
return this[ConfigArraySymbol.schema].merge(result, this[index]);
|
||||
try {
|
||||
return this[ConfigArraySymbol.schema].merge(result, this[index]);
|
||||
} catch (validationError) {
|
||||
rethrowConfigError(this[index], index, { cause: validationError});
|
||||
}
|
||||
}, {}, this);
|
||||
|
||||
finalConfig = this[ConfigArraySymbol.finalizeConfig](finalConfig);
|
||||
|
||||
8
node_modules/@humanwhocodes/config-array/package.json
generated
vendored
8
node_modules/@humanwhocodes/config-array/package.json
generated
vendored
@@ -1,11 +1,13 @@
|
||||
{
|
||||
"name": "@humanwhocodes/config-array",
|
||||
"version": "0.11.14",
|
||||
"version": "0.13.0",
|
||||
"description": "Glob-based configuration matching.",
|
||||
"author": "Nicholas C. Zakas",
|
||||
"main": "api.js",
|
||||
"files": [
|
||||
"api.js"
|
||||
"api.js",
|
||||
"LICENSE",
|
||||
"README.md"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -42,7 +44,7 @@
|
||||
"node": ">=10.10.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@humanwhocodes/object-schema": "^2.0.2",
|
||||
"@humanwhocodes/object-schema": "^2.0.3",
|
||||
"debug": "^4.3.1",
|
||||
"minimatch": "^3.0.5"
|
||||
},
|
||||
|
||||
29
node_modules/@humanwhocodes/object-schema/.eslintrc.js
generated
vendored
29
node_modules/@humanwhocodes/object-schema/.eslintrc.js
generated
vendored
@@ -1,29 +0,0 @@
|
||||
module.exports = {
|
||||
"env": {
|
||||
"commonjs": true,
|
||||
"es6": true,
|
||||
"node": true
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2018
|
||||
},
|
||||
"rules": {
|
||||
"indent": [
|
||||
"error",
|
||||
4
|
||||
],
|
||||
"linebreak-style": [
|
||||
"error",
|
||||
"unix"
|
||||
],
|
||||
"quotes": [
|
||||
"error",
|
||||
"double"
|
||||
],
|
||||
"semi": [
|
||||
"error",
|
||||
"always"
|
||||
]
|
||||
}
|
||||
};
|
||||
27
node_modules/@humanwhocodes/object-schema/.github/workflows/nodejs-test.yml
generated
vendored
27
node_modules/@humanwhocodes/object-schema/.github/workflows/nodejs-test.yml
generated
vendored
@@ -1,27 +0,0 @@
|
||||
name: Node CI
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest, macOS-latest, ubuntu-latest]
|
||||
node: [18.x, 19.x, 20.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: npm install, build, and test
|
||||
run: |
|
||||
npm install
|
||||
npm run build --if-present
|
||||
npm test
|
||||
env:
|
||||
CI: true
|
||||
39
node_modules/@humanwhocodes/object-schema/.github/workflows/release-please.yml
generated
vendored
39
node_modules/@humanwhocodes/object-schema/.github/workflows/release-please.yml
generated
vendored
@@ -1,39 +0,0 @@
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
name: release-please
|
||||
jobs:
|
||||
release-please:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: GoogleCloudPlatform/release-please-action@v3
|
||||
id: release
|
||||
with:
|
||||
release-type: node
|
||||
package-name: object-schema
|
||||
# The logic below handles the npm publication:
|
||||
- uses: actions/checkout@v4
|
||||
# these if statements ensure that a publication only occurs when
|
||||
# a new release is created:
|
||||
if: ${{ steps.release.outputs.release_created }}
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 12
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
if: ${{ steps.release.outputs.release_created }}
|
||||
- run: npm ci
|
||||
if: ${{ steps.release.outputs.release_created }}
|
||||
- run: npm publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
|
||||
if: ${{ steps.release.outputs.release_created }}
|
||||
|
||||
# Tweets out release announcement
|
||||
- run: 'npx @humanwhocodes/tweet "Object Schema v${{ steps.release.outputs.major }}.${{ steps.release.outputs.minor }}.${{ steps.release.outputs.patch }} has been released!\n\n${{ github.event.release.html_url }}"'
|
||||
if: ${{ steps.release.outputs.release_created }}
|
||||
env:
|
||||
TWITTER_CONSUMER_KEY: ${{ secrets.TWITTER_CONSUMER_KEY }}
|
||||
TWITTER_CONSUMER_SECRET: ${{ secrets.TWITTER_CONSUMER_SECRET }}
|
||||
TWITTER_ACCESS_TOKEN_KEY: ${{ secrets.TWITTER_ACCESS_TOKEN_KEY }}
|
||||
TWITTER_ACCESS_TOKEN_SECRET: ${{ secrets.TWITTER_ACCESS_TOKEN_SECRET }}
|
||||
7
node_modules/@humanwhocodes/object-schema/CHANGELOG.md
generated
vendored
7
node_modules/@humanwhocodes/object-schema/CHANGELOG.md
generated
vendored
@@ -1,5 +1,12 @@
|
||||
# Changelog
|
||||
|
||||
## [2.0.3](https://github.com/humanwhocodes/object-schema/compare/v2.0.2...v2.0.3) (2024-04-01)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Ensure test files are not including in package ([6eeb32c](https://github.com/humanwhocodes/object-schema/commit/6eeb32cc76a3e37d76b2990bd603d72061c816e0)), closes [#19](https://github.com/humanwhocodes/object-schema/issues/19)
|
||||
|
||||
## [2.0.2](https://github.com/humanwhocodes/object-schema/compare/v2.0.1...v2.0.2) (2024-01-10)
|
||||
|
||||
|
||||
|
||||
7
node_modules/@humanwhocodes/object-schema/package.json
generated
vendored
7
node_modules/@humanwhocodes/object-schema/package.json
generated
vendored
@@ -1,8 +1,13 @@
|
||||
{
|
||||
"name": "@humanwhocodes/object-schema",
|
||||
"version": "2.0.2",
|
||||
"version": "2.0.3",
|
||||
"description": "An object schema merger/validator",
|
||||
"main": "src/index.js",
|
||||
"files": [
|
||||
"src",
|
||||
"LICENSE",
|
||||
"README.md"
|
||||
],
|
||||
"directories": {
|
||||
"test": "tests"
|
||||
},
|
||||
|
||||
66
node_modules/@humanwhocodes/object-schema/tests/merge-strategy.js
generated
vendored
66
node_modules/@humanwhocodes/object-schema/tests/merge-strategy.js
generated
vendored
@@ -1,66 +0,0 @@
|
||||
/**
|
||||
* @filedescription Merge Strategy Tests
|
||||
*/
|
||||
/* global it, describe, beforeEach */
|
||||
|
||||
"use strict";
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
const assert = require("chai").assert;
|
||||
const { MergeStrategy } = require("../src/");
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Class
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
describe("MergeStrategy", () => {
|
||||
|
||||
|
||||
describe("overwrite()", () => {
|
||||
|
||||
it("should overwrite the first value with the second when the second is defined", () => {
|
||||
const result = MergeStrategy.overwrite(1, 2);
|
||||
assert.strictEqual(result, 2);
|
||||
});
|
||||
|
||||
it("should overwrite the first value with the second when the second is undefined", () => {
|
||||
const result = MergeStrategy.overwrite(1, undefined);
|
||||
assert.strictEqual(result, undefined);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe("replace()", () => {
|
||||
|
||||
it("should overwrite the first value with the second when the second is defined", () => {
|
||||
const result = MergeStrategy.replace(1, 2);
|
||||
assert.strictEqual(result, 2);
|
||||
});
|
||||
|
||||
it("should return the first value when the second is undefined", () => {
|
||||
const result = MergeStrategy.replace(1, undefined);
|
||||
assert.strictEqual(result, 1);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe("assign()", () => {
|
||||
|
||||
it("should merge properties from two objects when called", () => {
|
||||
|
||||
const object1 = { foo: 1, bar: 3 };
|
||||
const object2 = { foo: 2 };
|
||||
|
||||
const result = MergeStrategy.assign(object1, object2);
|
||||
assert.deepStrictEqual(result, {
|
||||
foo: 2,
|
||||
bar: 3
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
659
node_modules/@humanwhocodes/object-schema/tests/object-schema.js
generated
vendored
659
node_modules/@humanwhocodes/object-schema/tests/object-schema.js
generated
vendored
@@ -1,659 +0,0 @@
|
||||
/**
|
||||
* @filedescription Object Schema Tests
|
||||
*/
|
||||
/* global it, describe, beforeEach */
|
||||
|
||||
"use strict";
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
const assert = require("chai").assert;
|
||||
const { ObjectSchema } = require("../src/");
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Class
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
describe("ObjectSchema", () => {
|
||||
|
||||
let schema;
|
||||
|
||||
describe("new ObjectSchema()", () => {
|
||||
|
||||
it("should add a new key when a strategy is passed", () => {
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {},
|
||||
validate() {}
|
||||
}
|
||||
});
|
||||
|
||||
assert.isTrue(schema.hasKey("foo"));
|
||||
});
|
||||
|
||||
it("should throw an error when a strategy is missing a merge() method", () => {
|
||||
assert.throws(() => {
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
validate() { }
|
||||
}
|
||||
});
|
||||
}, /Definition for key "foo" must have a merge property/);
|
||||
});
|
||||
|
||||
it("should throw an error when a strategy is missing a merge() method", () => {
|
||||
assert.throws(() => {
|
||||
schema = new ObjectSchema();
|
||||
}, /Schema definitions missing/);
|
||||
});
|
||||
|
||||
it("should throw an error when a strategy is missing a validate() method", () => {
|
||||
assert.throws(() => {
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() { },
|
||||
}
|
||||
});
|
||||
}, /Definition for key "foo" must have a validate\(\) method/);
|
||||
});
|
||||
|
||||
it("should throw an error when merge is an invalid string", () => {
|
||||
assert.throws(() => {
|
||||
new ObjectSchema({
|
||||
foo: {
|
||||
merge: "bar",
|
||||
validate() { }
|
||||
}
|
||||
});
|
||||
}, /key "foo" missing valid merge strategy/);
|
||||
});
|
||||
|
||||
it("should throw an error when validate is an invalid string", () => {
|
||||
assert.throws(() => {
|
||||
new ObjectSchema({
|
||||
foo: {
|
||||
merge: "assign",
|
||||
validate: "s"
|
||||
}
|
||||
});
|
||||
}, /key "foo" missing valid validation strategy/);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
describe("merge()", () => {
|
||||
|
||||
it("should throw an error when an unexpected key is found", () => {
|
||||
let schema = new ObjectSchema({});
|
||||
|
||||
assert.throws(() => {
|
||||
schema.merge({ foo: true }, { foo: true });
|
||||
}, /Unexpected key "foo"/);
|
||||
});
|
||||
|
||||
it("should throw an error when merge() throws an error", () => {
|
||||
let schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
throw new Error("Boom!");
|
||||
},
|
||||
validate() {}
|
||||
}
|
||||
});
|
||||
|
||||
assert.throws(() => {
|
||||
schema.merge({ foo: true }, { foo: true });
|
||||
}, /Key "foo": Boom!/);
|
||||
|
||||
});
|
||||
|
||||
it("should throw an error when merge() throws an error with a readonly message", () => {
|
||||
let schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
throw {
|
||||
get message() {
|
||||
return "Boom!";
|
||||
}
|
||||
};
|
||||
},
|
||||
validate() {}
|
||||
}
|
||||
});
|
||||
|
||||
assert.throws(() => {
|
||||
schema.merge({ foo: true }, { foo: true });
|
||||
}, /Key "foo": Boom!/);
|
||||
|
||||
});
|
||||
|
||||
it("should throw an error with custom properties when merge() throws an error with custom properties", () => {
|
||||
let schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
throw {
|
||||
get message() {
|
||||
return "Boom!";
|
||||
},
|
||||
booya: true
|
||||
};
|
||||
},
|
||||
validate() {}
|
||||
}
|
||||
});
|
||||
|
||||
let errorThrown = false;
|
||||
|
||||
try {
|
||||
schema.merge({ foo: true }, { foo: true });
|
||||
} catch (ex) {
|
||||
errorThrown = true;
|
||||
assert.isTrue(ex.booya);
|
||||
}
|
||||
|
||||
assert.isTrue(errorThrown);
|
||||
|
||||
});
|
||||
|
||||
it("should call the merge() strategy for one key when called", () => {
|
||||
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
return "bar";
|
||||
},
|
||||
validate() {}
|
||||
}
|
||||
});
|
||||
|
||||
const result = schema.merge({ foo: true }, { foo: false });
|
||||
assert.propertyVal(result, "foo", "bar");
|
||||
});
|
||||
|
||||
it("should not call the merge() strategy when both objects don't contain the key", () => {
|
||||
|
||||
let called = false;
|
||||
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
called = true;
|
||||
},
|
||||
validate() {}
|
||||
}
|
||||
});
|
||||
|
||||
schema.merge({}, {});
|
||||
assert.isFalse(called, "The merge() strategy should not have been called.");
|
||||
});
|
||||
|
||||
it("should omit returning the key when the merge() strategy returns undefined", () => {
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
return undefined;
|
||||
},
|
||||
validate() { }
|
||||
}
|
||||
});
|
||||
|
||||
const result = schema.merge({ foo: true }, { foo: false });
|
||||
assert.notProperty(result, "foo");
|
||||
});
|
||||
|
||||
it("should call the merge() strategy for two keys when called", () => {
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
return "bar";
|
||||
},
|
||||
validate() { }
|
||||
},
|
||||
bar: {
|
||||
merge() {
|
||||
return "baz";
|
||||
},
|
||||
validate() {}
|
||||
}
|
||||
});
|
||||
|
||||
const result = schema.merge({ foo: true, bar: 1 }, { foo: true, bar: 2 });
|
||||
assert.propertyVal(result, "foo", "bar");
|
||||
assert.propertyVal(result, "bar", "baz");
|
||||
});
|
||||
|
||||
it("should call the merge() strategy for two keys when called on three objects", () => {
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
return "bar";
|
||||
},
|
||||
validate() { }
|
||||
},
|
||||
bar: {
|
||||
merge() {
|
||||
return "baz";
|
||||
},
|
||||
validate() { }
|
||||
}
|
||||
});
|
||||
|
||||
const result = schema.merge(
|
||||
{ foo: true, bar: 1 },
|
||||
{ foo: true, bar: 3 },
|
||||
{ foo: false, bar: 2 }
|
||||
);
|
||||
assert.propertyVal(result, "foo", "bar");
|
||||
assert.propertyVal(result, "bar", "baz");
|
||||
});
|
||||
|
||||
it("should call the merge() strategy when defined as 'overwrite'", () => {
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge: "overwrite",
|
||||
validate() { }
|
||||
}
|
||||
});
|
||||
|
||||
const result = schema.merge(
|
||||
{ foo: true },
|
||||
{ foo: false }
|
||||
);
|
||||
assert.propertyVal(result, "foo", false);
|
||||
});
|
||||
|
||||
it("should call the merge() strategy when defined as 'assign'", () => {
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge: "assign",
|
||||
validate() { }
|
||||
}
|
||||
});
|
||||
|
||||
const result = schema.merge(
|
||||
{ foo: { bar: true } },
|
||||
{ foo: { baz: false } }
|
||||
);
|
||||
|
||||
assert.strictEqual(result.foo.bar, true);
|
||||
assert.strictEqual(result.foo.baz, false);
|
||||
});
|
||||
|
||||
it("should call the merge strategy when there's a subschema", () => {
|
||||
|
||||
schema = new ObjectSchema({
|
||||
name: {
|
||||
schema: {
|
||||
first: {
|
||||
merge: "replace",
|
||||
validate: "string"
|
||||
},
|
||||
last: {
|
||||
merge: "replace",
|
||||
validate: "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const result = schema.merge({
|
||||
name: {
|
||||
first: "n",
|
||||
last: "z"
|
||||
}
|
||||
}, {
|
||||
name: {
|
||||
first: "g"
|
||||
}
|
||||
});
|
||||
|
||||
assert.strictEqual(result.name.first, "g");
|
||||
assert.strictEqual(result.name.last, "z");
|
||||
});
|
||||
|
||||
it("should return separate objects when using subschema", () => {
|
||||
|
||||
schema = new ObjectSchema({
|
||||
age: {
|
||||
merge: "replace",
|
||||
validate: "number"
|
||||
},
|
||||
address: {
|
||||
schema: {
|
||||
street: {
|
||||
schema: {
|
||||
number: {
|
||||
merge: "replace",
|
||||
validate: "number"
|
||||
},
|
||||
streetName: {
|
||||
merge: "replace",
|
||||
validate: "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
state: {
|
||||
merge: "replace",
|
||||
validate: "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const baseObject = {
|
||||
address: {
|
||||
street: {
|
||||
number: 100,
|
||||
streetName: "Foo St"
|
||||
},
|
||||
state: "HA"
|
||||
}
|
||||
};
|
||||
|
||||
const result = schema.merge(baseObject, {
|
||||
age: 29
|
||||
});
|
||||
|
||||
assert.notStrictEqual(result.address.street, baseObject.address.street);
|
||||
assert.deepStrictEqual(result.address, baseObject.address);
|
||||
});
|
||||
|
||||
it("should not error when calling the merge strategy when there's a subschema and no matching key in second object", () => {
|
||||
|
||||
schema = new ObjectSchema({
|
||||
name: {
|
||||
schema: {
|
||||
first: {
|
||||
merge: "replace",
|
||||
validate: "string"
|
||||
},
|
||||
last: {
|
||||
merge: "replace",
|
||||
validate: "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const result = schema.merge({
|
||||
name: {
|
||||
first: "n",
|
||||
last: "z"
|
||||
}
|
||||
}, {
|
||||
});
|
||||
|
||||
assert.strictEqual(result.name.first, "n");
|
||||
assert.strictEqual(result.name.last, "z");
|
||||
});
|
||||
|
||||
it("should not error when calling the merge strategy when there's multiple subschemas and no matching key in second object", () => {
|
||||
|
||||
schema = new ObjectSchema({
|
||||
user: {
|
||||
schema: {
|
||||
name: {
|
||||
schema: {
|
||||
first: {
|
||||
merge: "replace",
|
||||
validate: "string"
|
||||
},
|
||||
last: {
|
||||
merge: "replace",
|
||||
validate: "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const result = schema.merge({
|
||||
user: {
|
||||
name: {
|
||||
first: "n",
|
||||
last: "z"
|
||||
}
|
||||
}
|
||||
}, {
|
||||
});
|
||||
|
||||
assert.strictEqual(result.user.name.first, "n");
|
||||
assert.strictEqual(result.user.name.last, "z");
|
||||
});
|
||||
|
||||
|
||||
});
|
||||
|
||||
describe("validate()", () => {
|
||||
|
||||
it("should throw an error when an unexpected key is found", () => {
|
||||
let schema = new ObjectSchema({});
|
||||
assert.throws(() => {
|
||||
schema.validate({ foo: true });
|
||||
}, /Unexpected key "foo"/);
|
||||
});
|
||||
|
||||
it("should not throw an error when an expected key is found", () => {
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
return "bar";
|
||||
},
|
||||
validate() {}
|
||||
}
|
||||
});
|
||||
|
||||
schema.validate({ foo: true });
|
||||
});
|
||||
|
||||
it("should pass the property value into validate() when key is found", () => {
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
return "bar";
|
||||
},
|
||||
validate(value) {
|
||||
assert.isTrue(value);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
schema.validate({ foo: true });
|
||||
});
|
||||
|
||||
it("should not throw an error when expected keys are found", () => {
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
return "bar";
|
||||
},
|
||||
validate() {}
|
||||
},
|
||||
bar: {
|
||||
merge() {
|
||||
return "baz";
|
||||
},
|
||||
validate() {}
|
||||
}
|
||||
});
|
||||
|
||||
schema.validate({ foo: true, bar: true });
|
||||
});
|
||||
|
||||
it("should not throw an error when expected keys are found with required keys", () => {
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
return "bar";
|
||||
},
|
||||
validate() { }
|
||||
},
|
||||
bar: {
|
||||
requires: ["foo"],
|
||||
merge() {
|
||||
return "baz";
|
||||
},
|
||||
validate() { }
|
||||
}
|
||||
});
|
||||
|
||||
schema.validate({ foo: true, bar: true });
|
||||
});
|
||||
|
||||
it("should throw an error when expected keys are found without required keys", () => {
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
return "bar";
|
||||
},
|
||||
validate() { }
|
||||
},
|
||||
baz: {
|
||||
merge() {
|
||||
return "baz";
|
||||
},
|
||||
validate() { }
|
||||
},
|
||||
bar: {
|
||||
name: "bar",
|
||||
requires: ["foo", "baz"],
|
||||
merge() { },
|
||||
validate() { }
|
||||
}
|
||||
});
|
||||
|
||||
assert.throws(() => {
|
||||
schema.validate({ bar: true });
|
||||
}, /Key "bar" requires keys "foo", "baz"./);
|
||||
});
|
||||
|
||||
|
||||
it("should throw an error when an expected key is found but is invalid", () => {
|
||||
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
return "bar";
|
||||
},
|
||||
validate() {
|
||||
throw new Error("Invalid key.");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
assert.throws(() => {
|
||||
schema.validate({ foo: true });
|
||||
}, /Key "foo": Invalid key/);
|
||||
});
|
||||
|
||||
it("should throw an error when an expected key is found but is invalid with a string validator", () => {
|
||||
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
return "bar";
|
||||
},
|
||||
validate: "string"
|
||||
}
|
||||
});
|
||||
|
||||
assert.throws(() => {
|
||||
schema.validate({ foo: true });
|
||||
}, /Key "foo": Expected a string/);
|
||||
});
|
||||
|
||||
it("should throw an error when an expected key is found but is invalid with a number validator", () => {
|
||||
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
merge() {
|
||||
return "bar";
|
||||
},
|
||||
validate: "number"
|
||||
}
|
||||
});
|
||||
|
||||
assert.throws(() => {
|
||||
schema.validate({ foo: true });
|
||||
}, /Key "foo": Expected a number/);
|
||||
});
|
||||
|
||||
it("should throw an error when a required key is missing", () => {
|
||||
|
||||
schema = new ObjectSchema({
|
||||
foo: {
|
||||
required: true,
|
||||
merge() {
|
||||
return "bar";
|
||||
},
|
||||
validate() {}
|
||||
}
|
||||
});
|
||||
|
||||
assert.throws(() => {
|
||||
schema.validate({});
|
||||
}, /Missing required key "foo"/);
|
||||
});
|
||||
|
||||
it("should throw an error when a subschema is provided and the value doesn't validate", () => {
|
||||
|
||||
schema = new ObjectSchema({
|
||||
name: {
|
||||
schema: {
|
||||
first: {
|
||||
merge: "replace",
|
||||
validate: "string"
|
||||
},
|
||||
last: {
|
||||
merge: "replace",
|
||||
validate: "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
assert.throws(() => {
|
||||
schema.validate({
|
||||
name: {
|
||||
first: 123,
|
||||
last: "z"
|
||||
}
|
||||
});
|
||||
|
||||
}, /Key "name": Key "first": Expected a string/);
|
||||
});
|
||||
|
||||
it("should not throw an error when a subschema is provided and the value validates", () => {
|
||||
|
||||
schema = new ObjectSchema({
|
||||
name: {
|
||||
schema: {
|
||||
first: {
|
||||
merge: "replace",
|
||||
validate: "string"
|
||||
},
|
||||
last: {
|
||||
merge: "replace",
|
||||
validate: "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
schema.validate({
|
||||
name: {
|
||||
first: "n",
|
||||
last: "z"
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
186
node_modules/@humanwhocodes/object-schema/tests/validation-strategy.js
generated
vendored
186
node_modules/@humanwhocodes/object-schema/tests/validation-strategy.js
generated
vendored
@@ -1,186 +0,0 @@
|
||||
/**
|
||||
* @filedescription Merge Strategy Tests
|
||||
*/
|
||||
/* global it, describe, beforeEach */
|
||||
|
||||
"use strict";
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
const assert = require("chai").assert;
|
||||
const { ValidationStrategy } = require("../src/");
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Class
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
describe("ValidationStrategy", () => {
|
||||
|
||||
describe("boolean", () => {
|
||||
it("should not throw an error when the value is a boolean", () => {
|
||||
ValidationStrategy.boolean(true);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is null", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy.boolean(null);
|
||||
}, /Expected a Boolean/);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is a string", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy.boolean("foo");
|
||||
}, /Expected a Boolean/);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is a number", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy.boolean(123);
|
||||
}, /Expected a Boolean/);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is an object", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy.boolean({});
|
||||
}, /Expected a Boolean/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("number", () => {
|
||||
it("should not throw an error when the value is a number", () => {
|
||||
ValidationStrategy.number(25);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is null", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy.number(null);
|
||||
}, /Expected a number/);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is a string", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy.number("foo");
|
||||
}, /Expected a number/);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is a boolean", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy.number(true);
|
||||
}, /Expected a number/);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is an object", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy.number({});
|
||||
}, /Expected a number/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("object", () => {
|
||||
it("should not throw an error when the value is an object", () => {
|
||||
ValidationStrategy.object({});
|
||||
});
|
||||
|
||||
it("should throw an error when the value is null", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy.object(null);
|
||||
}, /Expected an object/);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is a string", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy.object("");
|
||||
}, /Expected an object/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("array", () => {
|
||||
it("should not throw an error when the value is an array", () => {
|
||||
ValidationStrategy.array([]);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is null", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy.array(null);
|
||||
}, /Expected an array/);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is a string", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy.array("");
|
||||
}, /Expected an array/);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is an object", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy.array({});
|
||||
}, /Expected an array/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("object?", () => {
|
||||
it("should not throw an error when the value is an object", () => {
|
||||
ValidationStrategy["object?"]({});
|
||||
});
|
||||
|
||||
it("should not throw an error when the value is null", () => {
|
||||
ValidationStrategy["object?"](null);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is a string", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy["object?"]("");
|
||||
}, /Expected an object/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("string", () => {
|
||||
it("should not throw an error when the value is a string", () => {
|
||||
ValidationStrategy.string("foo");
|
||||
});
|
||||
|
||||
it("should not throw an error when the value is an empty string", () => {
|
||||
ValidationStrategy.string("");
|
||||
});
|
||||
|
||||
it("should throw an error when the value is null", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy.string(null);
|
||||
}, /Expected a string/);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is an object", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy.string({});
|
||||
}, /Expected a string/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("string!", () => {
|
||||
it("should not throw an error when the value is an string", () => {
|
||||
ValidationStrategy["string!"]("foo");
|
||||
});
|
||||
|
||||
it("should throw an error when the value is an empty string", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy["string!"]("");
|
||||
}, /Expected a non-empty string/);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is null", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy["string!"](null);
|
||||
}, /Expected a non-empty string/);
|
||||
});
|
||||
|
||||
it("should throw an error when the value is an object", () => {
|
||||
assert.throws(() => {
|
||||
ValidationStrategy["string!"]({});
|
||||
}, /Expected a non-empty string/);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
});
|
||||
43
node_modules/@microsoft/eslint-formatter-sarif/README.md
generated
vendored
Normal file
43
node_modules/@microsoft/eslint-formatter-sarif/README.md
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
# SARIF formatter for ESLint
|
||||
|
||||
`eslint-formatter-sarif` is a formatter for [ESLint](https://www.npmjs.com/package/eslint) that produces output in the SARIF (Static Analysis Results Interchange Format) v2.1.0 format.
|
||||
|
||||
It is available as an npm module [@microsoft/eslint-formatter-sarif](https://www.npmjs.com/package/@microsoft/eslint-formatter-sarif).
|
||||
|
||||
# Installation and usage
|
||||
|
||||
1. To install ESLint, follow the instructions at [Getting Started with ESLint](https://eslint.org/docs/3.0.0/user-guide/getting-started).
|
||||
|
||||
2. To install the ESLint SARIF formatter:
|
||||
|
||||
```
|
||||
npm install @microsoft/eslint-formatter-sarif --save-dev
|
||||
```
|
||||
|
||||
3. To run ESLint with the SARIF formatter:
|
||||
|
||||
```
|
||||
./node-modules/.bin/eslint -f @microsoft/eslint-formatter-sarif -o yourfile.sarif yourfile.js
|
||||
```
|
||||
|
||||
Note that you *cannot* use the abbreviated form `-f sarif`, because that only works when the npm module name is of the form `eslint-formatter-example`, and the ESLint SARIF formatter module is not `eslint-formatter-sarif`; it's `@microsoft/eslint-formatter-sarif`. Alternatively, you can use the form `-f @microsoft/sarif`.
|
||||
|
||||
# Developer details
|
||||
|
||||
To embed the contents of the analyzed source files in the resulting SARIF file:
|
||||
|
||||
```bat
|
||||
set SARIF_ESLINT_EMBED=true
|
||||
```
|
||||
|
||||
To disable content embedding:
|
||||
|
||||
```bat
|
||||
set SARIF_ESLINT_EMBED=
|
||||
```
|
||||
|
||||
To run unit tests:
|
||||
|
||||
```bat
|
||||
RunTests.cmd
|
||||
```
|
||||
63
node_modules/@microsoft/eslint-formatter-sarif/package.json
generated
vendored
Normal file
63
node_modules/@microsoft/eslint-formatter-sarif/package.json
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
{
|
||||
"name": "@microsoft/eslint-formatter-sarif",
|
||||
"version": "3.1.0",
|
||||
"description": "ESLint formatter for the SARIF (Static Analysis Results Interchange Format) v2.1.0 file format",
|
||||
"main": "sarif.js",
|
||||
"directories": {
|
||||
"test": "test"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
},
|
||||
"repository": {
|
||||
"type": "github",
|
||||
"url": "git+https://github.com/Microsoft/sarif-js-sdk.git"
|
||||
},
|
||||
"keywords": [
|
||||
"eslint",
|
||||
"formatter",
|
||||
"eslint formatter",
|
||||
"sarif",
|
||||
"sarif formatter",
|
||||
"sarif eslint",
|
||||
"eslint sarif"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
},
|
||||
"files": [
|
||||
"sarif.js"
|
||||
],
|
||||
"author": "Microsoft Corporation",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/Microsoft/sarif-js-sdk/issues"
|
||||
},
|
||||
"homepage": "https://github.com/microsoft/sarif-js-sdk/tree/main/packages/eslint-formatter-sarif#readme",
|
||||
"dependencies": {
|
||||
"eslint": "^8.9.0",
|
||||
"jschardet": "latest",
|
||||
"lodash": "^4.17.14",
|
||||
"utf8": "^3.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"rewire": "^6.0.0",
|
||||
"semver-regex": "^3.1.4"
|
||||
},
|
||||
"release-it": {
|
||||
"plugins": {
|
||||
"release-it-lerna-changelog": {
|
||||
"infile": "CHANGELOG.md",
|
||||
"launchEditor": true
|
||||
}
|
||||
},
|
||||
"git": {
|
||||
"tagName": "eslint-formatter-sarif@${version}"
|
||||
},
|
||||
"github": {
|
||||
"release": true,
|
||||
"releaseName": "eslint-formatter-sarif@${version}",
|
||||
"tokenRef": "GITHUB_AUTH"
|
||||
}
|
||||
}
|
||||
}
|
||||
298
node_modules/@microsoft/eslint-formatter-sarif/sarif.js
generated
vendored
Normal file
298
node_modules/@microsoft/eslint-formatter-sarif/sarif.js
generated
vendored
Normal file
@@ -0,0 +1,298 @@
|
||||
/* eslint-disable unicorn/no-null */
|
||||
/**
|
||||
* @fileoverview SARIF v2.1 formatter
|
||||
* @author Microsoft
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const url = require('url');
|
||||
const utf8 = require('utf8');
|
||||
const lodash = require('lodash');
|
||||
const jschardet = require('jschardet');
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helper Functions
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the version of used eslint package
|
||||
* @returns {string} eslint version or undefined
|
||||
* @private
|
||||
*/
|
||||
function getESLintVersion() {
|
||||
try {
|
||||
// Resolve ESLint relative to main entry script, not the formatter
|
||||
const { ESLint } = require.main.require('eslint');
|
||||
return ESLint.version;
|
||||
} catch {
|
||||
// Formatter was not called from eslint, return undefined
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the severity of warning or error
|
||||
* @param {Object} message message object to examine
|
||||
* @returns {string} severity level
|
||||
* @private
|
||||
*/
|
||||
function getResultLevel(message) {
|
||||
if (message.fatal || message.severity === 2) {
|
||||
return 'error';
|
||||
}
|
||||
return 'warning';
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Public Interface
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
module.exports = function (results, data) {
|
||||
const rulesMeta = lodash.get(data, 'rulesMeta', null);
|
||||
|
||||
const sarifLog = {
|
||||
version: '2.1.0',
|
||||
$schema: 'http://json.schemastore.org/sarif-2.1.0-rtm.5',
|
||||
runs: [
|
||||
{
|
||||
tool: {
|
||||
driver: {
|
||||
name: 'ESLint',
|
||||
informationUri: 'https://eslint.org',
|
||||
rules: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const eslintVersion = getESLintVersion();
|
||||
if (typeof eslintVersion !== 'undefined') {
|
||||
sarifLog.runs[0].tool.driver.version = eslintVersion;
|
||||
}
|
||||
|
||||
const sarifFiles = {};
|
||||
const sarifArtifactIndices = {};
|
||||
let nextArtifactIndex = 0;
|
||||
const sarifRules = {};
|
||||
const sarifRuleIndices = {};
|
||||
let nextRuleIndex = 0;
|
||||
const sarifResults = [];
|
||||
const embedFileContents = process.env.SARIF_ESLINT_EMBED === 'true';
|
||||
const ignoreSuppressed = process.env.SARIF_ESLINT_IGNORE_SUPPRESSED === 'true';
|
||||
|
||||
// Emit a tool configuration notification with this id if ESLint emits a message with
|
||||
// no ruleId (which indicates an internal error in ESLint).
|
||||
//
|
||||
// It is not clear whether we should treat these messages tool configuration notifications,
|
||||
// tool execution notifications, or a mixture of the two, based on the properties of the
|
||||
// message. https://github.com/microsoft/sarif-sdk/issues/1798, "ESLint formatter can't
|
||||
// distinguish between an internal error and a misconfiguration", tracks this issue.
|
||||
const internalErrorId = 'ESL0999';
|
||||
|
||||
const toolConfigurationNotifications = [];
|
||||
let executionSuccessful = true;
|
||||
|
||||
for (const result of results) {
|
||||
// Only add it if not already there.
|
||||
if (typeof sarifFiles[result.filePath] === 'undefined') {
|
||||
sarifArtifactIndices[result.filePath] = nextArtifactIndex++;
|
||||
|
||||
let contentsUtf8;
|
||||
|
||||
// Create a new entry in the files dictionary.
|
||||
sarifFiles[result.filePath] = {
|
||||
location: {
|
||||
uri: url.pathToFileURL(result.filePath),
|
||||
},
|
||||
};
|
||||
|
||||
if (embedFileContents) {
|
||||
try {
|
||||
// Try to get the file contents and encoding.
|
||||
const contents = fs.readFileSync(result.filePath);
|
||||
const encoding = jschardet.detect(contents);
|
||||
|
||||
// Encoding will be null if it could not be determined.
|
||||
if (encoding) {
|
||||
// Convert the content bytes to a UTF-8 string.
|
||||
contentsUtf8 = utf8.encode(contents.toString(encoding.encoding));
|
||||
|
||||
sarifFiles[result.filePath].contents = {
|
||||
text: contentsUtf8,
|
||||
};
|
||||
sarifFiles[result.filePath].encoding = encoding.encoding;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
|
||||
const containsSuppressedMessages =
|
||||
result.suppressedMessages && result.suppressedMessages.length > 0;
|
||||
const messages =
|
||||
containsSuppressedMessages && !ignoreSuppressed
|
||||
? [...result.messages, ...result.suppressedMessages]
|
||||
: result.messages;
|
||||
|
||||
if (messages.length > 0) {
|
||||
for (const message of messages) {
|
||||
const sarifRepresentation = {
|
||||
level: getResultLevel(message),
|
||||
message: {
|
||||
text: message.message,
|
||||
},
|
||||
locations: [
|
||||
{
|
||||
physicalLocation: {
|
||||
artifactLocation: {
|
||||
uri: url.pathToFileURL(result.filePath),
|
||||
index: sarifArtifactIndices[result.filePath],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
if (message.ruleId) {
|
||||
sarifRepresentation.ruleId = message.ruleId;
|
||||
|
||||
if (rulesMeta && typeof sarifRules[message.ruleId] === 'undefined') {
|
||||
const meta = rulesMeta[message.ruleId];
|
||||
|
||||
// An unknown ruleId will return null. This check prevents unit test failure.
|
||||
if (meta) {
|
||||
sarifRuleIndices[message.ruleId] = nextRuleIndex++;
|
||||
|
||||
if (meta.docs) {
|
||||
// Create a new entry in the rules dictionary.
|
||||
sarifRules[message.ruleId] = {
|
||||
id: message.ruleId,
|
||||
helpUri: meta.docs.url,
|
||||
properties: {
|
||||
category: meta.docs.category,
|
||||
},
|
||||
};
|
||||
if (meta.docs.description) {
|
||||
sarifRules[message.ruleId].shortDescription = {
|
||||
text: meta.docs.description,
|
||||
};
|
||||
}
|
||||
// Some rulesMetas do not have docs property
|
||||
} else {
|
||||
sarifRules[message.ruleId] = {
|
||||
id: message.ruleId,
|
||||
helpUri: 'Please see details in message',
|
||||
properties: {
|
||||
category: 'No category provided',
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (sarifRuleIndices[message.ruleId] !== 'undefined') {
|
||||
sarifRepresentation.ruleIndex = sarifRuleIndices[message.ruleId];
|
||||
}
|
||||
|
||||
if (containsSuppressedMessages && !ignoreSuppressed) {
|
||||
sarifRepresentation.suppressions = message.suppressions
|
||||
? message.suppressions.map((suppression) => {
|
||||
return {
|
||||
kind: suppression.kind === 'directive' ? 'inSource' : 'external',
|
||||
justification: suppression.justification,
|
||||
};
|
||||
})
|
||||
: [];
|
||||
}
|
||||
} else {
|
||||
// ESLint produces a message with no ruleId when it encounters an internal
|
||||
// error. SARIF represents this as a tool execution notification rather
|
||||
// than as a result, and a notification has a descriptor.id property rather
|
||||
// than a ruleId property.
|
||||
sarifRepresentation.descriptor = {
|
||||
id: internalErrorId,
|
||||
};
|
||||
|
||||
// As far as we know, whenever ESLint produces a message with no rule id,
|
||||
// it has severity: 2 which corresponds to a SARIF error. But check here
|
||||
// anyway.
|
||||
if (sarifRepresentation.level === 'error') {
|
||||
// An error-level notification means that the tool failed to complete
|
||||
// its task.
|
||||
executionSuccessful = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (message.line > 0 || message.column > 0) {
|
||||
sarifRepresentation.locations[0].physicalLocation.region = {};
|
||||
if (message.line > 0) {
|
||||
sarifRepresentation.locations[0].physicalLocation.region.startLine = message.line;
|
||||
}
|
||||
if (message.column > 0) {
|
||||
sarifRepresentation.locations[0].physicalLocation.region.startColumn = message.column;
|
||||
}
|
||||
if (message.endLine > 0) {
|
||||
sarifRepresentation.locations[0].physicalLocation.region.endLine = message.endLine;
|
||||
}
|
||||
if (message.endColumn > 0) {
|
||||
sarifRepresentation.locations[0].physicalLocation.region.endColumn =
|
||||
message.endColumn;
|
||||
}
|
||||
}
|
||||
|
||||
if (message.source) {
|
||||
// Create an empty region if we don't already have one from the line / column block above.
|
||||
sarifRepresentation.locations[0].physicalLocation.region =
|
||||
sarifRepresentation.locations[0].physicalLocation.region || {};
|
||||
sarifRepresentation.locations[0].physicalLocation.region.snippet = {
|
||||
text: message.source,
|
||||
};
|
||||
}
|
||||
|
||||
if (message.ruleId) {
|
||||
sarifResults.push(sarifRepresentation);
|
||||
} else {
|
||||
toolConfigurationNotifications.push(sarifRepresentation);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(sarifFiles).length > 0) {
|
||||
sarifLog.runs[0].artifacts = [];
|
||||
|
||||
for (const path of Object.keys(sarifFiles)) {
|
||||
sarifLog.runs[0].artifacts.push(sarifFiles[path]);
|
||||
}
|
||||
}
|
||||
|
||||
// Per the SARIF spec §3.14.23, run.results must be present even if there are no results.
|
||||
// This provides a positive indication that the run completed and no results were found.
|
||||
sarifLog.runs[0].results = sarifResults;
|
||||
|
||||
if (toolConfigurationNotifications.length > 0) {
|
||||
sarifLog.runs[0].invocations = [
|
||||
{
|
||||
toolConfigurationNotifications: toolConfigurationNotifications,
|
||||
executionSuccessful: executionSuccessful,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
if (Object.keys(sarifRules).length > 0) {
|
||||
for (const ruleId of Object.keys(sarifRules)) {
|
||||
const rule = sarifRules[ruleId];
|
||||
sarifLog.runs[0].tool.driver.rules.push(rule);
|
||||
}
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
sarifLog,
|
||||
null, // replacer function
|
||||
2 // # of spaces for indents
|
||||
);
|
||||
};
|
||||
85
node_modules/@sinonjs/fake-timers/README.md
generated
vendored
85
node_modules/@sinonjs/fake-timers/README.md
generated
vendored
@@ -3,19 +3,27 @@
|
||||
[](https://codecov.io/gh/sinonjs/fake-timers)
|
||||
<a href="CODE_OF_CONDUCT.md"><img src="https://img.shields.io/badge/Contributor%20Covenant-v2.0%20adopted-ff69b4.svg" alt="Contributor Covenant" /></a>
|
||||
|
||||
JavaScript implementation of the timer APIs; `setTimeout`, `clearTimeout`, `setImmediate`, `clearImmediate`, `setInterval`, `clearInterval`, `requestAnimationFrame`, `cancelAnimationFrame`, `requestIdleCallback`, and `cancelIdleCallback`, along with a clock instance that controls the flow of time. FakeTimers also provides a `Date` implementation that gets its time from the clock.
|
||||
JavaScript implementation of the timer
|
||||
APIs; `setTimeout`, `clearTimeout`, `setImmediate`, `clearImmediate`, `setInterval`, `clearInterval`, `requestAnimationFrame`, `cancelAnimationFrame`, `requestIdleCallback`,
|
||||
and `cancelIdleCallback`, along with a clock instance that controls the flow of time. FakeTimers also provides a `Date`
|
||||
implementation that gets its time from the clock.
|
||||
|
||||
In addition in browser environment `@sinonjs/fake-timers` provides a `performance` implementation that gets its time from the clock. In Node environments FakeTimers provides a `nextTick` implementation that is synchronized with the clock - and a `process.hrtime` shim that works with the clock.
|
||||
In addition in browser environment `@sinonjs/fake-timers` provides a `performance` implementation that gets its time
|
||||
from the clock. In Node environments FakeTimers provides a `nextTick` implementation that is synchronized with the
|
||||
clock - and a `process.hrtime` shim that works with the clock.
|
||||
|
||||
`@sinonjs/fake-timers` can be used to simulate passing time in automated tests and other
|
||||
situations where you want the scheduling semantics, but don't want to actually
|
||||
wait.
|
||||
|
||||
`@sinonjs/fake-timers` is extracted from [Sinon.JS](https://github.com/sinonjs/sinon.js) and targets the [same runtimes](https://sinonjs.org/releases/latest/#supported-runtimes).
|
||||
`@sinonjs/fake-timers` is extracted from [Sinon.JS](https://github.com/sinonjs/sinon.js) and targets
|
||||
the [same runtimes](https://sinonjs.org/releases/latest/#supported-runtimes).
|
||||
|
||||
## Autocomplete, IntelliSense and TypeScript definitions
|
||||
|
||||
Version 7 introduced JSDoc to the codebase. This should provide autocomplete and type suggestions in supporting IDEs. If you need more elaborate type support, TypeScript definitions for the Sinon projects are independently maintained by the Definitely Types community:
|
||||
Version 7 introduced JSDoc to the codebase. This should provide autocomplete and type suggestions in supporting IDEs. If
|
||||
you need more elaborate type support, TypeScript definitions for the Sinon projects are independently maintained by the
|
||||
Definitely Types community:
|
||||
|
||||
```
|
||||
npm install -D @types/sinonjs__fake-timers
|
||||
@@ -29,7 +37,8 @@ npm install -D @types/sinonjs__fake-timers
|
||||
npm install @sinonjs/fake-timers
|
||||
```
|
||||
|
||||
If you want to use `@sinonjs/fake-timers` in a browser you can either build your own bundle or use [Skypack](https://www.skypack.dev).
|
||||
If you want to use `@sinonjs/fake-timers` in a browser you can either build your own bundle or
|
||||
use [Skypack](https://www.skypack.dev).
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -54,7 +63,8 @@ clock.tick(15);
|
||||
|
||||
Upon executing the last line, an interesting fact about the
|
||||
[Poblano](https://en.wikipedia.org/wiki/Poblano) will be printed synchronously to
|
||||
the screen. If you want to simulate asynchronous behavior, please see the `async` function variants (eg `clock.tick(time)` vs `await clock.tickAsync(time)`).
|
||||
the screen. If you want to simulate asynchronous behavior, please see the `async` function variants (
|
||||
eg `clock.tick(time)` vs `await clock.tickAsync(time)`).
|
||||
|
||||
The `next`, `runAll`, `runToFrame`, and `runToLast` methods are available to advance the clock. See the
|
||||
API Reference for more details.
|
||||
@@ -67,7 +77,9 @@ clock instance, not the browser's internals.
|
||||
|
||||
Calling `install` with no arguments achieves this. You can call `uninstall`
|
||||
later to restore things as they were again.
|
||||
Note that in NodeJS also the [timers](https://nodejs.org/api/timers.html) module will receive fake timers when using global scope.
|
||||
Note that in NodeJS the [timers](https://nodejs.org/api/timers.html)
|
||||
and [timers/promises](https://nodejs.org/api/timers.html#timers-promises-api) modules will also receive fake timers when
|
||||
using global scope.
|
||||
|
||||
```js
|
||||
// In the browser distribution, a global `FakeTimers` is already available
|
||||
@@ -143,22 +155,26 @@ Creates a clock. The default
|
||||
|
||||
The `now` argument may be a number (in milliseconds) or a Date object.
|
||||
|
||||
The `loopLimit` argument sets the maximum number of timers that will be run when calling `runAll()` before assuming that we have an infinite loop and throwing an error. The default is `1000`.
|
||||
The `loopLimit` argument sets the maximum number of timers that will be run when calling `runAll()` before assuming that
|
||||
we have an infinite loop and throwing an error. The default is `1000`.
|
||||
|
||||
### `var clock = FakeTimers.install([config])`
|
||||
|
||||
Installs FakeTimers using the specified config (otherwise with epoch `0` on the global scope).
|
||||
Note that in NodeJS also the [timers](https://nodejs.org/api/timers.html) module will receive fake timers when using global scope.
|
||||
Note that in NodeJS the [timers](https://nodejs.org/api/timers.html)
|
||||
and [timers/promises](https://nodejs.org/api/timers.html#timers-promises-api) modules will also receive fake timers when
|
||||
using global scope.
|
||||
The following configuration options are available
|
||||
|
||||
| Parameter | Type | Default | Description |
|
||||
| -------------------------------- | ----------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `config.now` | Number/Date | 0 | installs FakeTimers with the specified unix epoch |
|
||||
| `config.toFake` | String[] | ["setTimeout", "clearTimeout", "setImmediate", "clearImmediate","setInterval", "clearInterval", "Date", "requestAnimationFrame", "cancelAnimationFrame", "requestIdleCallback", "cancelIdleCallback", "hrtime", "performance"] | an array with explicit function names (or objects, in the case of "performance") to hijack. _When not set, FakeTimers will automatically fake all methods **except** `nextTick`_ e.g., `FakeTimers.install({ toFake: ["setTimeout","nextTick"]})` will fake only `setTimeout` and `nextTick` |
|
||||
| `config.loopLimit` | Number | 1000 | the maximum number of timers that will be run when calling runAll() |
|
||||
| `config.shouldAdvanceTime` | Boolean | false | tells FakeTimers to increment mocked time automatically based on the real system time shift (e.g. the mocked time will be incremented by 20ms for every 20ms change in the real system time) |
|
||||
| `config.advanceTimeDelta` | Number | 20 | relevant only when using with `shouldAdvanceTime: true`. increment mocked time by `advanceTimeDelta` ms every `advanceTimeDelta` ms change in the real system time. |
|
||||
| `config.shouldClearNativeTimers` | Boolean | false | tells FakeTimers to clear 'native' (i.e. not fake) timers by delegating to their respective handlers. These are not cleared by default, leading to potentially unexpected behavior if timers existed prior to installing FakeTimers. |
|
||||
| Parameter | Type | Default | Description |
|
||||
| -------------------------------- | ----------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `config.now` | Number/Date | 0 | installs FakeTimers with the specified unix epoch |
|
||||
| `config.toFake` | String[] | ["setTimeout", "clearTimeout", "setImmediate", "clearImmediate","setInterval", "clearInterval", "Date", "requestAnimationFrame", "cancelAnimationFrame", "requestIdleCallback", "cancelIdleCallback", "hrtime", "performance"] | an array with explicit function names (or objects, in the case of "performance") to hijack. \_When not set, FakeTimers will automatically fake all methods e.g., `FakeTimers.install({ toFake: ["setTimeout","nextTick"]})` will fake only `setTimeout` and `nextTick` |
|
||||
| `config.loopLimit` | Number | 1000 | the maximum number of timers that will be run when calling runAll() |
|
||||
| `config.shouldAdvanceTime` | Boolean | false | tells FakeTimers to increment mocked time automatically based on the real system time shift (e.g. the mocked time will be incremented by 20ms for every 20ms change in the real system time) |
|
||||
| `config.advanceTimeDelta` | Number | 20 | relevant only when using with `shouldAdvanceTime: true`. increment mocked time by `advanceTimeDelta` ms every `advanceTimeDelta` ms change in the real system time. |
|
||||
| `config.shouldClearNativeTimers` | Boolean | false | tells FakeTimers to clear 'native' (i.e. not fake) timers by delegating to their respective handlers. These are not cleared by default, leading to potentially unexpected behavior if timers existed prior to installing FakeTimers. |
|
||||
| `config.ignoreMissingTimers` | Boolean | false | tells FakeTimers to ignore missing timers that might not exist in the given environment |
|
||||
|
||||
### `var id = clock.setTimeout(callback, timeout)`
|
||||
|
||||
@@ -218,7 +234,9 @@ Cancels the callback scheduled by the provided id.
|
||||
|
||||
### `clock.requestIdleCallback(callback[, timeout])`
|
||||
|
||||
Queued the callback to be fired during idle periods to perform background and low priority work on the main event loop. Callbacks which have a timeout option will be fired no later than time in milliseconds. Returns an `id` which can be used to cancel the callback.
|
||||
Queued the callback to be fired during idle periods to perform background and low priority work on the main event loop.
|
||||
Callbacks which have a timeout option will be fired no later than time in milliseconds. Returns an `id` which can be
|
||||
used to cancel the callback.
|
||||
|
||||
### `clock.cancelIdleCallback(id)`
|
||||
|
||||
@@ -263,7 +281,8 @@ callbacks to execute _before_ running the timers.
|
||||
Advance the clock by jumping forward in time, firing callbacks at most once.
|
||||
`time` takes the same formats as [`clock.tick`](#clockticktime--await-clocktickasynctime).
|
||||
|
||||
This can be used to simulate the JS engine (such as a browser) being put to sleep and resumed later, skipping intermediary timers.
|
||||
This can be used to simulate the JS engine (such as a browser) being put to sleep and resumed later, skipping
|
||||
intermediary timers.
|
||||
|
||||
### `clock.reset()`
|
||||
|
||||
@@ -273,9 +292,11 @@ Useful to reset the state of the clock without having to `uninstall` and `instal
|
||||
|
||||
### `clock.runAll()` / `await clock.runAllAsync()`
|
||||
|
||||
This runs all pending timers until there are none remaining. If new timers are added while it is executing they will be run as well.
|
||||
This runs all pending timers until there are none remaining. If new timers are added while it is executing they will be
|
||||
run as well.
|
||||
|
||||
This makes it easier to run asynchronous tests to completion without worrying about the number of timers they use, or the delays in those timers.
|
||||
This makes it easier to run asynchronous tests to completion without worrying about the number of timers they use, or
|
||||
the delays in those timers.
|
||||
|
||||
It runs a maximum of `loopLimit` times after which it assumes there is an infinite loop of timers and throws an error.
|
||||
|
||||
@@ -284,7 +305,8 @@ callbacks to execute _before_ running the timers.
|
||||
|
||||
### `clock.runMicrotasks()`
|
||||
|
||||
This runs all pending microtasks scheduled with `nextTick` but none of the timers and is mostly useful for libraries using FakeTimers underneath and for running `nextTick` items without any timers.
|
||||
This runs all pending microtasks scheduled with `nextTick` but none of the timers and is mostly useful for libraries
|
||||
using FakeTimers underneath and for running `nextTick` items without any timers.
|
||||
|
||||
### `clock.runToFrame()`
|
||||
|
||||
@@ -323,11 +345,22 @@ Implements the `Date` object but using the clock to provide the correct time.
|
||||
|
||||
### `Performance`
|
||||
|
||||
Implements the `now` method of the [`Performance`](https://developer.mozilla.org/en-US/docs/Web/API/Performance/now) object but using the clock to provide the correct time. Only available in environments that support the Performance object (browsers mostly).
|
||||
Implements the `now` method of the [`Performance`](https://developer.mozilla.org/en-US/docs/Web/API/Performance/now)
|
||||
object but using the clock to provide the correct time. Only available in environments that support the Performance
|
||||
object (browsers mostly).
|
||||
|
||||
### `FakeTimers.withGlobal`
|
||||
|
||||
In order to support creating clocks based on separate or sandboxed environments (such as JSDOM), FakeTimers exports a factory method which takes single argument `global`, which it inspects to figure out what to mock and what features to support. When invoking this function with a global, you will get back an object with `timers`, `createClock` and `install` - same as the regular FakeTimers exports only based on the passed in global instead of the global environment.
|
||||
In order to support creating clocks based on separate or sandboxed environments (such as JSDOM), FakeTimers exports a
|
||||
factory method which takes single argument `global`, which it inspects to figure out what to mock and what features to
|
||||
support. When invoking this function with a global, you will get back an object with `timers`, `createClock`
|
||||
and `install` - same as the regular FakeTimers exports only based on the passed in global instead of the global
|
||||
environment.
|
||||
|
||||
## Promises and fake time
|
||||
|
||||
If you use a Promise library like Bluebird, note that you should either call `clock.runMicrotasks()` or make sure to
|
||||
_not_ mock `nextTick`.
|
||||
|
||||
## Running tests
|
||||
|
||||
@@ -349,8 +382,8 @@ $(npm bin)/mocha ./test/fake-timers-test.js
|
||||
|
||||
### In the browser
|
||||
|
||||
[Mochify](https://github.com/mantoni/mochify.js) is used to run the tests in
|
||||
PhantomJS. Make sure you have `phantomjs` installed. Then:
|
||||
[Mochify](https://github.com/mochify-js) is used to run the tests in headless
|
||||
Chrome.
|
||||
|
||||
```sh
|
||||
npm test-headless
|
||||
|
||||
45
node_modules/@sinonjs/fake-timers/package.json
generated
vendored
45
node_modules/@sinonjs/fake-timers/package.json
generated
vendored
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@sinonjs/fake-timers",
|
||||
"description": "Fake JavaScript timers",
|
||||
"version": "11.2.2",
|
||||
"version": "13.0.2",
|
||||
"homepage": "https://github.com/sinonjs/fake-timers",
|
||||
"author": "Christian Johansen",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/sinonjs/fake-timers.git"
|
||||
"url": "git+https://github.com/sinonjs/fake-timers.git"
|
||||
},
|
||||
"bugs": {
|
||||
"mail": "christian@cjohansen.no",
|
||||
@@ -16,39 +16,52 @@
|
||||
"scripts": {
|
||||
"lint": "eslint .",
|
||||
"test-node": "mocha --timeout 200 test/ integration-test/ -R dot --check-leaks",
|
||||
"test-headless": "mochify --no-detect-globals --timeout=10000",
|
||||
"test-headless": "mochify --driver puppeteer",
|
||||
"test-check-coverage": "npm run test-coverage && nyc check-coverage",
|
||||
"test-cloud": "mochify --wd --no-detect-globals --timeout=10000",
|
||||
"test-coverage": "nyc --all --reporter text --reporter html --reporter lcovonly npm run test-node",
|
||||
"test-cloud": "npm run test-edge && npm run test-firefox && npm run test-safari",
|
||||
"test-edge": "BROWSER_NAME=MicrosoftEdge mochify --config mochify.webdriver.js",
|
||||
"test-firefox": "BROWSER_NAME=firefox mochify --config mochify.webdriver.js",
|
||||
"test-safari": "BROWSER_NAME=safari mochify --config mochify.webdriver.js",
|
||||
"test-coverage": "nyc -x mochify.webdriver.js -x coverage --all --reporter text --reporter html --reporter lcovonly npm run test-node",
|
||||
"test": "npm run test-node && npm run test-headless",
|
||||
"prettier:check": "prettier --check '**/*.{js,css,md}'",
|
||||
"prettier:write": "prettier --write '**/*.{js,css,md}'",
|
||||
"preversion": "./scripts/preversion.sh",
|
||||
"version": "./scripts/version.sh",
|
||||
"postversion": "./scripts/postversion.sh",
|
||||
"prepare": "husky install"
|
||||
"prepare": "husky"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.{js,css,md}": "prettier --check",
|
||||
"*.js": "eslint"
|
||||
},
|
||||
"mochify": {
|
||||
"reporter": "dot",
|
||||
"timeout": 10000,
|
||||
"bundle": "esbuild --bundle --sourcemap=inline --define:process.env.NODE_DEBUG=\"\"",
|
||||
"bundle_stdin": "require",
|
||||
"spec": "test/**/*-test.js"
|
||||
},
|
||||
"files": [
|
||||
"src/"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@sinonjs/eslint-config": "^4.1.0",
|
||||
"@sinonjs/referee-sinon": "11.0.0",
|
||||
"husky": "^8.0.3",
|
||||
"jsdom": "22.1.0",
|
||||
"lint-staged": "15.0.1",
|
||||
"mocha": "10.2.0",
|
||||
"mochify": "9.2.0",
|
||||
"nyc": "15.1.0",
|
||||
"prettier": "3.0.3"
|
||||
"@mochify/cli": "^0.4.1",
|
||||
"@mochify/driver-puppeteer": "^0.4.0",
|
||||
"@mochify/driver-webdriver": "^0.2.1",
|
||||
"@sinonjs/eslint-config": "^5.0.3",
|
||||
"@sinonjs/referee-sinon": "12.0.0",
|
||||
"esbuild": "^0.23.1",
|
||||
"husky": "^9.1.5",
|
||||
"jsdom": "24.1.1",
|
||||
"lint-staged": "15.2.9",
|
||||
"mocha": "10.7.3",
|
||||
"nyc": "17.0.0",
|
||||
"prettier": "3.3.3"
|
||||
},
|
||||
"main": "./src/fake-timers-src.js",
|
||||
"dependencies": {
|
||||
"@sinonjs/commons": "^3.0.0"
|
||||
"@sinonjs/commons": "^3.0.1"
|
||||
},
|
||||
"nyc": {
|
||||
"branches": 85,
|
||||
|
||||
574
node_modules/@sinonjs/fake-timers/src/fake-timers-src.js
generated
vendored
574
node_modules/@sinonjs/fake-timers/src/fake-timers-src.js
generated
vendored
@@ -1,13 +1,18 @@
|
||||
"use strict";
|
||||
|
||||
const globalObject = require("@sinonjs/commons").global;
|
||||
let timersModule;
|
||||
let timersModule, timersPromisesModule;
|
||||
if (typeof require === "function" && typeof module === "object") {
|
||||
try {
|
||||
timersModule = require("timers");
|
||||
} catch (e) {
|
||||
// ignored
|
||||
}
|
||||
try {
|
||||
timersPromisesModule = require("timers/promises");
|
||||
} catch (e) {
|
||||
// ignored
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -94,6 +99,8 @@ if (typeof require === "function" && typeof module === "object") {
|
||||
* @property {Function[]} methods - the methods that are faked
|
||||
* @property {boolean} [shouldClearNativeTimers] inherited from config
|
||||
* @property {{methodName:string, original:any}[] | undefined} timersModuleMethods
|
||||
* @property {{methodName:string, original:any}[] | undefined} timersPromisesModuleMethods
|
||||
* @property {Map<function(): void, AbortSignal>} abortListenerMap
|
||||
*/
|
||||
/* eslint-enable jsdoc/require-property-description */
|
||||
|
||||
@@ -107,6 +114,7 @@ if (typeof require === "function" && typeof module === "object") {
|
||||
* @property {boolean} [shouldAdvanceTime] tells FakeTimers to increment mocked time automatically (default false)
|
||||
* @property {number} [advanceTimeDelta] increment mocked time every <<advanceTimeDelta>> ms (default: 20ms)
|
||||
* @property {boolean} [shouldClearNativeTimers] forwards clear timer calls to native functions if they are not fakes (default: false)
|
||||
* @property {boolean} [ignoreMissingTimers] default is false, meaning asking to fake timers that are not present will throw an error
|
||||
*/
|
||||
|
||||
/* eslint-disable jsdoc/require-property-description */
|
||||
@@ -151,16 +159,26 @@ function withGlobal(_global) {
|
||||
const NOOP_ARRAY = function () {
|
||||
return [];
|
||||
};
|
||||
const timeoutResult = _global.setTimeout(NOOP, 0);
|
||||
const addTimerReturnsObject = typeof timeoutResult === "object";
|
||||
const hrtimePresent =
|
||||
const isPresent = {};
|
||||
let timeoutResult,
|
||||
addTimerReturnsObject = false;
|
||||
|
||||
if (_global.setTimeout) {
|
||||
isPresent.setTimeout = true;
|
||||
timeoutResult = _global.setTimeout(NOOP, 0);
|
||||
addTimerReturnsObject = typeof timeoutResult === "object";
|
||||
}
|
||||
isPresent.clearTimeout = Boolean(_global.clearTimeout);
|
||||
isPresent.setInterval = Boolean(_global.setInterval);
|
||||
isPresent.clearInterval = Boolean(_global.clearInterval);
|
||||
isPresent.hrtime =
|
||||
_global.process && typeof _global.process.hrtime === "function";
|
||||
const hrtimeBigintPresent =
|
||||
hrtimePresent && typeof _global.process.hrtime.bigint === "function";
|
||||
const nextTickPresent =
|
||||
isPresent.hrtimeBigint =
|
||||
isPresent.hrtime && typeof _global.process.hrtime.bigint === "function";
|
||||
isPresent.nextTick =
|
||||
_global.process && typeof _global.process.nextTick === "function";
|
||||
const utilPromisify = _global.process && require("util").promisify;
|
||||
const performancePresent =
|
||||
isPresent.performance =
|
||||
_global.performance && typeof _global.performance.now === "function";
|
||||
const hasPerformancePrototype =
|
||||
_global.Performance &&
|
||||
@@ -169,29 +187,60 @@ function withGlobal(_global) {
|
||||
_global.performance &&
|
||||
_global.performance.constructor &&
|
||||
_global.performance.constructor.prototype;
|
||||
const queueMicrotaskPresent = _global.hasOwnProperty("queueMicrotask");
|
||||
const requestAnimationFramePresent =
|
||||
isPresent.queueMicrotask = _global.hasOwnProperty("queueMicrotask");
|
||||
isPresent.requestAnimationFrame =
|
||||
_global.requestAnimationFrame &&
|
||||
typeof _global.requestAnimationFrame === "function";
|
||||
const cancelAnimationFramePresent =
|
||||
isPresent.cancelAnimationFrame =
|
||||
_global.cancelAnimationFrame &&
|
||||
typeof _global.cancelAnimationFrame === "function";
|
||||
const requestIdleCallbackPresent =
|
||||
isPresent.requestIdleCallback =
|
||||
_global.requestIdleCallback &&
|
||||
typeof _global.requestIdleCallback === "function";
|
||||
const cancelIdleCallbackPresent =
|
||||
isPresent.cancelIdleCallbackPresent =
|
||||
_global.cancelIdleCallback &&
|
||||
typeof _global.cancelIdleCallback === "function";
|
||||
const setImmediatePresent =
|
||||
isPresent.setImmediate =
|
||||
_global.setImmediate && typeof _global.setImmediate === "function";
|
||||
const intlPresent = _global.Intl && typeof _global.Intl === "object";
|
||||
isPresent.clearImmediate =
|
||||
_global.clearImmediate && typeof _global.clearImmediate === "function";
|
||||
isPresent.Intl = _global.Intl && typeof _global.Intl === "object";
|
||||
|
||||
_global.clearTimeout(timeoutResult);
|
||||
if (_global.clearTimeout) {
|
||||
_global.clearTimeout(timeoutResult);
|
||||
}
|
||||
|
||||
const NativeDate = _global.Date;
|
||||
const NativeIntl = _global.Intl;
|
||||
let uniqueTimerId = idCounterStart;
|
||||
|
||||
if (NativeDate === undefined) {
|
||||
throw new Error(
|
||||
"The global scope doesn't have a `Date` object" +
|
||||
" (see https://github.com/sinonjs/sinon/issues/1852#issuecomment-419622780)",
|
||||
);
|
||||
}
|
||||
isPresent.Date = true;
|
||||
|
||||
/**
|
||||
* The PerformanceEntry object encapsulates a single performance metric
|
||||
* that is part of the browser's performance timeline.
|
||||
*
|
||||
* This is an object returned by the `mark` and `measure` methods on the Performance prototype
|
||||
*/
|
||||
class FakePerformanceEntry {
|
||||
constructor(name, entryType, startTime, duration) {
|
||||
this.name = name;
|
||||
this.entryType = entryType;
|
||||
this.startTime = startTime;
|
||||
this.duration = duration;
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return JSON.stringify({ ...this });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} num
|
||||
* @returns {boolean}
|
||||
@@ -376,109 +425,76 @@ function withGlobal(_global) {
|
||||
return infiniteLoopError;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Date} target
|
||||
* @param {Date} source
|
||||
* @returns {Date} the target after modifications
|
||||
*/
|
||||
function mirrorDateProperties(target, source) {
|
||||
let prop;
|
||||
for (prop in source) {
|
||||
if (source.hasOwnProperty(prop)) {
|
||||
target[prop] = source[prop];
|
||||
}
|
||||
}
|
||||
|
||||
// set special now implementation
|
||||
if (source.now) {
|
||||
target.now = function now() {
|
||||
return target.clock.now;
|
||||
};
|
||||
} else {
|
||||
delete target.now;
|
||||
}
|
||||
|
||||
// set special toSource implementation
|
||||
if (source.toSource) {
|
||||
target.toSource = function toSource() {
|
||||
return source.toSource();
|
||||
};
|
||||
} else {
|
||||
delete target.toSource;
|
||||
}
|
||||
|
||||
// set special toString implementation
|
||||
target.toString = function toString() {
|
||||
return source.toString();
|
||||
};
|
||||
|
||||
target.prototype = source.prototype;
|
||||
target.parse = source.parse;
|
||||
target.UTC = source.UTC;
|
||||
target.prototype.toUTCString = source.prototype.toUTCString;
|
||||
target.isFake = true;
|
||||
|
||||
return target;
|
||||
}
|
||||
|
||||
//eslint-disable-next-line jsdoc/require-jsdoc
|
||||
function createDate() {
|
||||
/**
|
||||
* @param {number} year
|
||||
* @param {number} month
|
||||
* @param {number} date
|
||||
* @param {number} hour
|
||||
* @param {number} minute
|
||||
* @param {number} second
|
||||
* @param {number} ms
|
||||
* @returns {Date}
|
||||
*/
|
||||
function ClockDate(year, month, date, hour, minute, second, ms) {
|
||||
// the Date constructor called as a function, ref Ecma-262 Edition 5.1, section 15.9.2.
|
||||
// This remains so in the 10th edition of 2019 as well.
|
||||
if (!(this instanceof ClockDate)) {
|
||||
return new NativeDate(ClockDate.clock.now).toString();
|
||||
class ClockDate extends NativeDate {
|
||||
/**
|
||||
* @param {number} year
|
||||
* @param {number} month
|
||||
* @param {number} date
|
||||
* @param {number} hour
|
||||
* @param {number} minute
|
||||
* @param {number} second
|
||||
* @param {number} ms
|
||||
* @returns void
|
||||
*/
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
constructor(year, month, date, hour, minute, second, ms) {
|
||||
// Defensive and verbose to avoid potential harm in passing
|
||||
// explicit undefined when user does not pass argument
|
||||
if (arguments.length === 0) {
|
||||
super(ClockDate.clock.now);
|
||||
} else {
|
||||
super(...arguments);
|
||||
}
|
||||
}
|
||||
|
||||
// if Date is called as a constructor with 'new' keyword
|
||||
// Defensive and verbose to avoid potential harm in passing
|
||||
// explicit undefined when user does not pass argument
|
||||
switch (arguments.length) {
|
||||
case 0:
|
||||
return new NativeDate(ClockDate.clock.now);
|
||||
case 1:
|
||||
return new NativeDate(year);
|
||||
case 2:
|
||||
return new NativeDate(year, month);
|
||||
case 3:
|
||||
return new NativeDate(year, month, date);
|
||||
case 4:
|
||||
return new NativeDate(year, month, date, hour);
|
||||
case 5:
|
||||
return new NativeDate(year, month, date, hour, minute);
|
||||
case 6:
|
||||
return new NativeDate(
|
||||
year,
|
||||
month,
|
||||
date,
|
||||
hour,
|
||||
minute,
|
||||
second,
|
||||
);
|
||||
default:
|
||||
return new NativeDate(
|
||||
year,
|
||||
month,
|
||||
date,
|
||||
hour,
|
||||
minute,
|
||||
second,
|
||||
ms,
|
||||
);
|
||||
static [Symbol.hasInstance](instance) {
|
||||
return instance instanceof NativeDate;
|
||||
}
|
||||
}
|
||||
|
||||
return mirrorDateProperties(ClockDate, NativeDate);
|
||||
ClockDate.isFake = true;
|
||||
|
||||
if (NativeDate.now) {
|
||||
ClockDate.now = function now() {
|
||||
return ClockDate.clock.now;
|
||||
};
|
||||
}
|
||||
|
||||
if (NativeDate.toSource) {
|
||||
ClockDate.toSource = function toSource() {
|
||||
return NativeDate.toSource();
|
||||
};
|
||||
}
|
||||
|
||||
ClockDate.toString = function toString() {
|
||||
return NativeDate.toString();
|
||||
};
|
||||
|
||||
// noinspection UnnecessaryLocalVariableJS
|
||||
/**
|
||||
* A normal Class constructor cannot be called without `new`, but Date can, so we need
|
||||
* to wrap it in a Proxy in order to ensure this functionality of Date is kept intact
|
||||
*
|
||||
* @type {ClockDate}
|
||||
*/
|
||||
const ClockDateProxy = new Proxy(ClockDate, {
|
||||
// handler for [[Call]] invocations (i.e. not using `new`)
|
||||
apply() {
|
||||
// the Date constructor called as a function, ref Ecma-262 Edition 5.1, section 15.9.2.
|
||||
// This remains so in the 10th edition of 2019 as well.
|
||||
if (this instanceof ClockDate) {
|
||||
throw new TypeError(
|
||||
"A Proxy should only capture `new` calls with the `construct` handler. This is not supposed to be possible, so check the logic.",
|
||||
);
|
||||
}
|
||||
|
||||
return new NativeDate(ClockDate.clock.now).toString();
|
||||
},
|
||||
});
|
||||
|
||||
return ClockDateProxy;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -931,6 +947,16 @@ function withGlobal(_global) {
|
||||
timersModule[entry.methodName] = entry.original;
|
||||
}
|
||||
}
|
||||
if (clock.timersPromisesModuleMethods !== undefined) {
|
||||
for (
|
||||
let j = 0;
|
||||
j < clock.timersPromisesModuleMethods.length;
|
||||
j++
|
||||
) {
|
||||
const entry = clock.timersPromisesModuleMethods[j];
|
||||
timersPromisesModule[entry.methodName] = entry.original;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (config.shouldAdvanceTime === true) {
|
||||
@@ -940,6 +966,11 @@ function withGlobal(_global) {
|
||||
// Prevent multiple executions which will completely remove these props
|
||||
clock.methods = [];
|
||||
|
||||
for (const [listener, signal] of clock.abortListenerMap.entries()) {
|
||||
signal.removeEventListener("abort", listener);
|
||||
clock.abortListenerMap.delete(listener);
|
||||
}
|
||||
|
||||
// return pending timers, to enable checking what timers remained on uninstall
|
||||
if (!clock.timers) {
|
||||
return [];
|
||||
@@ -962,8 +993,7 @@ function withGlobal(_global) {
|
||||
clock[`_${method}`] = target[method];
|
||||
|
||||
if (method === "Date") {
|
||||
const date = mirrorDateProperties(clock[method], target[method]);
|
||||
target[method] = date;
|
||||
target[method] = clock[method];
|
||||
} else if (method === "Intl") {
|
||||
target[method] = clock[method];
|
||||
} else if (method === "performance") {
|
||||
@@ -1042,44 +1072,47 @@ function withGlobal(_global) {
|
||||
Date: _global.Date,
|
||||
};
|
||||
|
||||
if (setImmediatePresent) {
|
||||
if (isPresent.setImmediate) {
|
||||
timers.setImmediate = _global.setImmediate;
|
||||
}
|
||||
|
||||
if (isPresent.clearImmediate) {
|
||||
timers.clearImmediate = _global.clearImmediate;
|
||||
}
|
||||
|
||||
if (hrtimePresent) {
|
||||
if (isPresent.hrtime) {
|
||||
timers.hrtime = _global.process.hrtime;
|
||||
}
|
||||
|
||||
if (nextTickPresent) {
|
||||
if (isPresent.nextTick) {
|
||||
timers.nextTick = _global.process.nextTick;
|
||||
}
|
||||
|
||||
if (performancePresent) {
|
||||
if (isPresent.performance) {
|
||||
timers.performance = _global.performance;
|
||||
}
|
||||
|
||||
if (requestAnimationFramePresent) {
|
||||
if (isPresent.requestAnimationFrame) {
|
||||
timers.requestAnimationFrame = _global.requestAnimationFrame;
|
||||
}
|
||||
|
||||
if (queueMicrotaskPresent) {
|
||||
timers.queueMicrotask = true;
|
||||
if (isPresent.queueMicrotask) {
|
||||
timers.queueMicrotask = _global.queueMicrotask;
|
||||
}
|
||||
|
||||
if (cancelAnimationFramePresent) {
|
||||
if (isPresent.cancelAnimationFrame) {
|
||||
timers.cancelAnimationFrame = _global.cancelAnimationFrame;
|
||||
}
|
||||
|
||||
if (requestIdleCallbackPresent) {
|
||||
if (isPresent.requestIdleCallback) {
|
||||
timers.requestIdleCallback = _global.requestIdleCallback;
|
||||
}
|
||||
|
||||
if (cancelIdleCallbackPresent) {
|
||||
if (isPresent.cancelIdleCallback) {
|
||||
timers.cancelIdleCallback = _global.cancelIdleCallback;
|
||||
}
|
||||
|
||||
if (intlPresent) {
|
||||
if (isPresent.Intl) {
|
||||
timers.Intl = _global.Intl;
|
||||
}
|
||||
|
||||
@@ -1098,13 +1131,6 @@ function withGlobal(_global) {
|
||||
let nanos = 0;
|
||||
const adjustedSystemTime = [0, 0]; // [millis, nanoremainder]
|
||||
|
||||
if (NativeDate === undefined) {
|
||||
throw new Error(
|
||||
"The global scope doesn't have a `Date` object" +
|
||||
" (see https://github.com/sinonjs/sinon/issues/1852#issuecomment-419622780)",
|
||||
);
|
||||
}
|
||||
|
||||
const clock = {
|
||||
now: start,
|
||||
Date: createDate(),
|
||||
@@ -1165,14 +1191,14 @@ function withGlobal(_global) {
|
||||
return millis;
|
||||
}
|
||||
|
||||
if (hrtimeBigintPresent) {
|
||||
if (isPresent.hrtimeBigint) {
|
||||
hrtime.bigint = function () {
|
||||
const parts = hrtime();
|
||||
return BigInt(parts[0]) * BigInt(1e9) + BigInt(parts[1]); // eslint-disable-line
|
||||
};
|
||||
}
|
||||
|
||||
if (intlPresent) {
|
||||
if (isPresent.Intl) {
|
||||
clock.Intl = createIntl();
|
||||
clock.Intl.clock = clock;
|
||||
}
|
||||
@@ -1257,7 +1283,7 @@ function withGlobal(_global) {
|
||||
return clearTimer(clock, timerId, "Interval");
|
||||
};
|
||||
|
||||
if (setImmediatePresent) {
|
||||
if (isPresent.setImmediate) {
|
||||
clock.setImmediate = function setImmediate(func) {
|
||||
return addTimer(clock, {
|
||||
func: func,
|
||||
@@ -1696,12 +1722,12 @@ function withGlobal(_global) {
|
||||
clock.tick(ms);
|
||||
};
|
||||
|
||||
if (performancePresent) {
|
||||
if (isPresent.performance) {
|
||||
clock.performance = Object.create(null);
|
||||
clock.performance.now = fakePerformanceNow;
|
||||
}
|
||||
|
||||
if (hrtimePresent) {
|
||||
if (isPresent.hrtime) {
|
||||
clock.hrtime = hrtime;
|
||||
}
|
||||
|
||||
@@ -1749,6 +1775,20 @@ function withGlobal(_global) {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} timer/object the name of the thing that is not present
|
||||
* @param timer
|
||||
*/
|
||||
function handleMissingTimer(timer) {
|
||||
if (config.ignoreMissingTimers) {
|
||||
return;
|
||||
}
|
||||
|
||||
throw new ReferenceError(
|
||||
`non-existent timers and/or objects cannot be faked: '${timer}'`,
|
||||
);
|
||||
}
|
||||
|
||||
let i, l;
|
||||
const clock = createClock(config.now, config.loopLimit);
|
||||
clock.shouldClearNativeTimers = config.shouldClearNativeTimers;
|
||||
@@ -1757,13 +1797,12 @@ function withGlobal(_global) {
|
||||
return uninstall(clock, config);
|
||||
};
|
||||
|
||||
clock.abortListenerMap = new Map();
|
||||
|
||||
clock.methods = config.toFake || [];
|
||||
|
||||
if (clock.methods.length === 0) {
|
||||
// do not fake nextTick by default - GitHub#126
|
||||
clock.methods = Object.keys(timers).filter(function (key) {
|
||||
return key !== "nextTick" && key !== "queueMicrotask";
|
||||
});
|
||||
clock.methods = Object.keys(timers);
|
||||
}
|
||||
|
||||
if (config.shouldAdvanceTime === true) {
|
||||
@@ -1797,18 +1836,30 @@ function withGlobal(_global) {
|
||||
: NOOP;
|
||||
}
|
||||
});
|
||||
// ensure `mark` returns a value that is valid
|
||||
clock.performance.mark = (name) =>
|
||||
new FakePerformanceEntry(name, "mark", 0, 0);
|
||||
clock.performance.measure = (name) =>
|
||||
new FakePerformanceEntry(name, "measure", 0, 100);
|
||||
} else if ((config.toFake || []).includes("performance")) {
|
||||
// user explicitly tried to fake performance when not present
|
||||
throw new ReferenceError(
|
||||
"non-existent performance object cannot be faked",
|
||||
);
|
||||
return handleMissingTimer("performance");
|
||||
}
|
||||
}
|
||||
if (_global === globalObject && timersModule) {
|
||||
clock.timersModuleMethods = [];
|
||||
}
|
||||
if (_global === globalObject && timersPromisesModule) {
|
||||
clock.timersPromisesModuleMethods = [];
|
||||
}
|
||||
for (i = 0, l = clock.methods.length; i < l; i++) {
|
||||
const nameOfMethodToReplace = clock.methods[i];
|
||||
|
||||
if (!isPresent[nameOfMethodToReplace]) {
|
||||
handleMissingTimer(nameOfMethodToReplace);
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
}
|
||||
|
||||
if (nameOfMethodToReplace === "hrtime") {
|
||||
if (
|
||||
_global.process &&
|
||||
@@ -1838,6 +1889,239 @@ function withGlobal(_global) {
|
||||
timersModule[nameOfMethodToReplace] =
|
||||
_global[nameOfMethodToReplace];
|
||||
}
|
||||
if (clock.timersPromisesModuleMethods !== undefined) {
|
||||
if (nameOfMethodToReplace === "setTimeout") {
|
||||
clock.timersPromisesModuleMethods.push({
|
||||
methodName: "setTimeout",
|
||||
original: timersPromisesModule.setTimeout,
|
||||
});
|
||||
|
||||
timersPromisesModule.setTimeout = (
|
||||
delay,
|
||||
value,
|
||||
options = {},
|
||||
) =>
|
||||
new Promise((resolve, reject) => {
|
||||
const abort = () => {
|
||||
options.signal.removeEventListener(
|
||||
"abort",
|
||||
abort,
|
||||
);
|
||||
clock.abortListenerMap.delete(abort);
|
||||
|
||||
// This is safe, there is no code path that leads to this function
|
||||
// being invoked before handle has been assigned.
|
||||
// eslint-disable-next-line no-use-before-define
|
||||
clock.clearTimeout(handle);
|
||||
reject(options.signal.reason);
|
||||
};
|
||||
|
||||
const handle = clock.setTimeout(() => {
|
||||
if (options.signal) {
|
||||
options.signal.removeEventListener(
|
||||
"abort",
|
||||
abort,
|
||||
);
|
||||
clock.abortListenerMap.delete(abort);
|
||||
}
|
||||
|
||||
resolve(value);
|
||||
}, delay);
|
||||
|
||||
if (options.signal) {
|
||||
if (options.signal.aborted) {
|
||||
abort();
|
||||
} else {
|
||||
options.signal.addEventListener(
|
||||
"abort",
|
||||
abort,
|
||||
);
|
||||
clock.abortListenerMap.set(
|
||||
abort,
|
||||
options.signal,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (nameOfMethodToReplace === "setImmediate") {
|
||||
clock.timersPromisesModuleMethods.push({
|
||||
methodName: "setImmediate",
|
||||
original: timersPromisesModule.setImmediate,
|
||||
});
|
||||
|
||||
timersPromisesModule.setImmediate = (value, options = {}) =>
|
||||
new Promise((resolve, reject) => {
|
||||
const abort = () => {
|
||||
options.signal.removeEventListener(
|
||||
"abort",
|
||||
abort,
|
||||
);
|
||||
clock.abortListenerMap.delete(abort);
|
||||
|
||||
// This is safe, there is no code path that leads to this function
|
||||
// being invoked before handle has been assigned.
|
||||
// eslint-disable-next-line no-use-before-define
|
||||
clock.clearImmediate(handle);
|
||||
reject(options.signal.reason);
|
||||
};
|
||||
|
||||
const handle = clock.setImmediate(() => {
|
||||
if (options.signal) {
|
||||
options.signal.removeEventListener(
|
||||
"abort",
|
||||
abort,
|
||||
);
|
||||
clock.abortListenerMap.delete(abort);
|
||||
}
|
||||
|
||||
resolve(value);
|
||||
});
|
||||
|
||||
if (options.signal) {
|
||||
if (options.signal.aborted) {
|
||||
abort();
|
||||
} else {
|
||||
options.signal.addEventListener(
|
||||
"abort",
|
||||
abort,
|
||||
);
|
||||
clock.abortListenerMap.set(
|
||||
abort,
|
||||
options.signal,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (nameOfMethodToReplace === "setInterval") {
|
||||
clock.timersPromisesModuleMethods.push({
|
||||
methodName: "setInterval",
|
||||
original: timersPromisesModule.setInterval,
|
||||
});
|
||||
|
||||
timersPromisesModule.setInterval = (
|
||||
delay,
|
||||
value,
|
||||
options = {},
|
||||
) => ({
|
||||
[Symbol.asyncIterator]: () => {
|
||||
const createResolvable = () => {
|
||||
let resolve, reject;
|
||||
const promise = new Promise((res, rej) => {
|
||||
resolve = res;
|
||||
reject = rej;
|
||||
});
|
||||
promise.resolve = resolve;
|
||||
promise.reject = reject;
|
||||
return promise;
|
||||
};
|
||||
|
||||
let done = false;
|
||||
let hasThrown = false;
|
||||
let returnCall;
|
||||
let nextAvailable = 0;
|
||||
const nextQueue = [];
|
||||
|
||||
const handle = clock.setInterval(() => {
|
||||
if (nextQueue.length > 0) {
|
||||
nextQueue.shift().resolve();
|
||||
} else {
|
||||
nextAvailable++;
|
||||
}
|
||||
}, delay);
|
||||
|
||||
const abort = () => {
|
||||
options.signal.removeEventListener(
|
||||
"abort",
|
||||
abort,
|
||||
);
|
||||
clock.abortListenerMap.delete(abort);
|
||||
|
||||
clock.clearInterval(handle);
|
||||
done = true;
|
||||
for (const resolvable of nextQueue) {
|
||||
resolvable.resolve();
|
||||
}
|
||||
};
|
||||
|
||||
if (options.signal) {
|
||||
if (options.signal.aborted) {
|
||||
done = true;
|
||||
} else {
|
||||
options.signal.addEventListener(
|
||||
"abort",
|
||||
abort,
|
||||
);
|
||||
clock.abortListenerMap.set(
|
||||
abort,
|
||||
options.signal,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
next: async () => {
|
||||
if (options.signal?.aborted && !hasThrown) {
|
||||
hasThrown = true;
|
||||
throw options.signal.reason;
|
||||
}
|
||||
|
||||
if (done) {
|
||||
return { done: true, value: undefined };
|
||||
}
|
||||
|
||||
if (nextAvailable > 0) {
|
||||
nextAvailable--;
|
||||
return { done: false, value: value };
|
||||
}
|
||||
|
||||
const resolvable = createResolvable();
|
||||
nextQueue.push(resolvable);
|
||||
|
||||
await resolvable;
|
||||
|
||||
if (returnCall && nextQueue.length === 0) {
|
||||
returnCall.resolve();
|
||||
}
|
||||
|
||||
if (options.signal?.aborted && !hasThrown) {
|
||||
hasThrown = true;
|
||||
throw options.signal.reason;
|
||||
}
|
||||
|
||||
if (done) {
|
||||
return { done: true, value: undefined };
|
||||
}
|
||||
|
||||
return { done: false, value: value };
|
||||
},
|
||||
return: async () => {
|
||||
if (done) {
|
||||
return { done: true, value: undefined };
|
||||
}
|
||||
|
||||
if (nextQueue.length > 0) {
|
||||
returnCall = createResolvable();
|
||||
await returnCall;
|
||||
}
|
||||
|
||||
clock.clearInterval(handle);
|
||||
done = true;
|
||||
|
||||
if (options.signal) {
|
||||
options.signal.removeEventListener(
|
||||
"abort",
|
||||
abort,
|
||||
);
|
||||
clock.abortListenerMap.delete(abort);
|
||||
}
|
||||
|
||||
return { done: true, value: undefined };
|
||||
},
|
||||
};
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return clock;
|
||||
|
||||
10
node_modules/@sinonjs/samsam/docs/index.md
generated
vendored
10
node_modules/@sinonjs/samsam/docs/index.md
generated
vendored
@@ -249,7 +249,7 @@ samsam.match(
|
||||
return "yeah";
|
||||
},
|
||||
},
|
||||
"Yeah!"
|
||||
"Yeah!",
|
||||
); // true
|
||||
```
|
||||
|
||||
@@ -278,7 +278,7 @@ samsam.match(
|
||||
return "yeah!";
|
||||
},
|
||||
},
|
||||
/yeah/
|
||||
/yeah/,
|
||||
); // true
|
||||
samsam.match(234, /[a-z]/); // false
|
||||
```
|
||||
@@ -296,7 +296,7 @@ samsam.match(
|
||||
return "42";
|
||||
},
|
||||
},
|
||||
42
|
||||
42,
|
||||
); // true
|
||||
samsam.match(234, 1234); // false
|
||||
```
|
||||
@@ -328,7 +328,7 @@ samsam.match(
|
||||
},
|
||||
function () {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// false
|
||||
@@ -367,7 +367,7 @@ samsam.match(
|
||||
},
|
||||
{
|
||||
name: "Chris",
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// false
|
||||
|
||||
219
node_modules/@sinonjs/samsam/lib/create-matcher.js
generated
vendored
219
node_modules/@sinonjs/samsam/lib/create-matcher.js
generated
vendored
@@ -44,7 +44,7 @@ function createMatcher(expectation, message) {
|
||||
|
||||
if (arguments.length > 2) {
|
||||
throw new TypeError(
|
||||
`Expected 1 or 2 arguments, received ${arguments.length}`
|
||||
`Expected 1 or 2 arguments, received ${arguments.length}`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -89,9 +89,12 @@ createMatcher.falsy = createMatcher(function (actual) {
|
||||
}, "falsy");
|
||||
|
||||
createMatcher.same = function (expectation) {
|
||||
return createMatcher(function (actual) {
|
||||
return expectation === actual;
|
||||
}, `same(${valueToString(expectation)})`);
|
||||
return createMatcher(
|
||||
function (actual) {
|
||||
return expectation === actual;
|
||||
},
|
||||
`same(${valueToString(expectation)})`,
|
||||
);
|
||||
};
|
||||
|
||||
createMatcher.in = function (arrayOfExpectations) {
|
||||
@@ -99,11 +102,14 @@ createMatcher.in = function (arrayOfExpectations) {
|
||||
throw new TypeError("array expected");
|
||||
}
|
||||
|
||||
return createMatcher(function (actual) {
|
||||
return some(arrayOfExpectations, function (expectation) {
|
||||
return expectation === actual;
|
||||
});
|
||||
}, `in(${valueToString(arrayOfExpectations)})`);
|
||||
return createMatcher(
|
||||
function (actual) {
|
||||
return some(arrayOfExpectations, function (expectation) {
|
||||
return expectation === actual;
|
||||
});
|
||||
},
|
||||
`in(${valueToString(arrayOfExpectations)})`,
|
||||
);
|
||||
};
|
||||
|
||||
createMatcher.typeOf = function (type) {
|
||||
@@ -125,12 +131,15 @@ createMatcher.instanceOf = function (type) {
|
||||
type,
|
||||
Symbol.hasInstance,
|
||||
"type",
|
||||
"[Symbol.hasInstance]"
|
||||
"[Symbol.hasInstance]",
|
||||
);
|
||||
}
|
||||
return createMatcher(function (actual) {
|
||||
return actual instanceof type;
|
||||
}, `instanceOf(${functionName(type) || objectToString(type)})`);
|
||||
return createMatcher(
|
||||
function (actual) {
|
||||
return actual instanceof type;
|
||||
},
|
||||
`instanceOf(${functionName(type) || objectToString(type)})`,
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -259,111 +268,137 @@ createMatcher.some = function (predicate) {
|
||||
createMatcher.array = createMatcher.typeOf("array");
|
||||
|
||||
createMatcher.array.deepEquals = function (expectation) {
|
||||
return createMatcher(function (actual) {
|
||||
// Comparing lengths is the fastest way to spot a difference before iterating through every item
|
||||
var sameLength = actual.length === expectation.length;
|
||||
return (
|
||||
typeOf(actual) === "array" &&
|
||||
sameLength &&
|
||||
every(actual, function (element, index) {
|
||||
var expected = expectation[index];
|
||||
return typeOf(expected) === "array" &&
|
||||
typeOf(element) === "array"
|
||||
? createMatcher.array.deepEquals(expected).test(element)
|
||||
: deepEqual(expected, element);
|
||||
})
|
||||
);
|
||||
}, `deepEquals([${iterableToString(expectation)}])`);
|
||||
return createMatcher(
|
||||
function (actual) {
|
||||
// Comparing lengths is the fastest way to spot a difference before iterating through every item
|
||||
var sameLength = actual.length === expectation.length;
|
||||
return (
|
||||
typeOf(actual) === "array" &&
|
||||
sameLength &&
|
||||
every(actual, function (element, index) {
|
||||
var expected = expectation[index];
|
||||
return typeOf(expected) === "array" &&
|
||||
typeOf(element) === "array"
|
||||
? createMatcher.array.deepEquals(expected).test(element)
|
||||
: deepEqual(expected, element);
|
||||
})
|
||||
);
|
||||
},
|
||||
`deepEquals([${iterableToString(expectation)}])`,
|
||||
);
|
||||
};
|
||||
|
||||
createMatcher.array.startsWith = function (expectation) {
|
||||
return createMatcher(function (actual) {
|
||||
return (
|
||||
typeOf(actual) === "array" &&
|
||||
every(expectation, function (expectedElement, index) {
|
||||
return actual[index] === expectedElement;
|
||||
})
|
||||
);
|
||||
}, `startsWith([${iterableToString(expectation)}])`);
|
||||
return createMatcher(
|
||||
function (actual) {
|
||||
return (
|
||||
typeOf(actual) === "array" &&
|
||||
every(expectation, function (expectedElement, index) {
|
||||
return actual[index] === expectedElement;
|
||||
})
|
||||
);
|
||||
},
|
||||
`startsWith([${iterableToString(expectation)}])`,
|
||||
);
|
||||
};
|
||||
|
||||
createMatcher.array.endsWith = function (expectation) {
|
||||
return createMatcher(function (actual) {
|
||||
// This indicates the index in which we should start matching
|
||||
var offset = actual.length - expectation.length;
|
||||
return createMatcher(
|
||||
function (actual) {
|
||||
// This indicates the index in which we should start matching
|
||||
var offset = actual.length - expectation.length;
|
||||
|
||||
return (
|
||||
typeOf(actual) === "array" &&
|
||||
every(expectation, function (expectedElement, index) {
|
||||
return actual[offset + index] === expectedElement;
|
||||
})
|
||||
);
|
||||
}, `endsWith([${iterableToString(expectation)}])`);
|
||||
return (
|
||||
typeOf(actual) === "array" &&
|
||||
every(expectation, function (expectedElement, index) {
|
||||
return actual[offset + index] === expectedElement;
|
||||
})
|
||||
);
|
||||
},
|
||||
`endsWith([${iterableToString(expectation)}])`,
|
||||
);
|
||||
};
|
||||
|
||||
createMatcher.array.contains = function (expectation) {
|
||||
return createMatcher(function (actual) {
|
||||
return (
|
||||
typeOf(actual) === "array" &&
|
||||
every(expectation, function (expectedElement) {
|
||||
return arrayIndexOf(actual, expectedElement) !== -1;
|
||||
})
|
||||
);
|
||||
}, `contains([${iterableToString(expectation)}])`);
|
||||
return createMatcher(
|
||||
function (actual) {
|
||||
return (
|
||||
typeOf(actual) === "array" &&
|
||||
every(expectation, function (expectedElement) {
|
||||
return arrayIndexOf(actual, expectedElement) !== -1;
|
||||
})
|
||||
);
|
||||
},
|
||||
`contains([${iterableToString(expectation)}])`,
|
||||
);
|
||||
};
|
||||
|
||||
createMatcher.map = createMatcher.typeOf("map");
|
||||
|
||||
createMatcher.map.deepEquals = function mapDeepEquals(expectation) {
|
||||
return createMatcher(function (actual) {
|
||||
// Comparing lengths is the fastest way to spot a difference before iterating through every item
|
||||
var sameLength = actual.size === expectation.size;
|
||||
return (
|
||||
typeOf(actual) === "map" &&
|
||||
sameLength &&
|
||||
every(actual, function (element, key) {
|
||||
return expectation.has(key) && expectation.get(key) === element;
|
||||
})
|
||||
);
|
||||
}, `deepEquals(Map[${iterableToString(expectation)}])`);
|
||||
return createMatcher(
|
||||
function (actual) {
|
||||
// Comparing lengths is the fastest way to spot a difference before iterating through every item
|
||||
var sameLength = actual.size === expectation.size;
|
||||
return (
|
||||
typeOf(actual) === "map" &&
|
||||
sameLength &&
|
||||
every(actual, function (element, key) {
|
||||
return (
|
||||
expectation.has(key) && expectation.get(key) === element
|
||||
);
|
||||
})
|
||||
);
|
||||
},
|
||||
`deepEquals(Map[${iterableToString(expectation)}])`,
|
||||
);
|
||||
};
|
||||
|
||||
createMatcher.map.contains = function mapContains(expectation) {
|
||||
return createMatcher(function (actual) {
|
||||
return (
|
||||
typeOf(actual) === "map" &&
|
||||
every(expectation, function (element, key) {
|
||||
return actual.has(key) && actual.get(key) === element;
|
||||
})
|
||||
);
|
||||
}, `contains(Map[${iterableToString(expectation)}])`);
|
||||
return createMatcher(
|
||||
function (actual) {
|
||||
return (
|
||||
typeOf(actual) === "map" &&
|
||||
every(expectation, function (element, key) {
|
||||
return actual.has(key) && actual.get(key) === element;
|
||||
})
|
||||
);
|
||||
},
|
||||
`contains(Map[${iterableToString(expectation)}])`,
|
||||
);
|
||||
};
|
||||
|
||||
createMatcher.set = createMatcher.typeOf("set");
|
||||
|
||||
createMatcher.set.deepEquals = function setDeepEquals(expectation) {
|
||||
return createMatcher(function (actual) {
|
||||
// Comparing lengths is the fastest way to spot a difference before iterating through every item
|
||||
var sameLength = actual.size === expectation.size;
|
||||
return (
|
||||
typeOf(actual) === "set" &&
|
||||
sameLength &&
|
||||
every(actual, function (element) {
|
||||
return expectation.has(element);
|
||||
})
|
||||
);
|
||||
}, `deepEquals(Set[${iterableToString(expectation)}])`);
|
||||
return createMatcher(
|
||||
function (actual) {
|
||||
// Comparing lengths is the fastest way to spot a difference before iterating through every item
|
||||
var sameLength = actual.size === expectation.size;
|
||||
return (
|
||||
typeOf(actual) === "set" &&
|
||||
sameLength &&
|
||||
every(actual, function (element) {
|
||||
return expectation.has(element);
|
||||
})
|
||||
);
|
||||
},
|
||||
`deepEquals(Set[${iterableToString(expectation)}])`,
|
||||
);
|
||||
};
|
||||
|
||||
createMatcher.set.contains = function setContains(expectation) {
|
||||
return createMatcher(function (actual) {
|
||||
return (
|
||||
typeOf(actual) === "set" &&
|
||||
every(expectation, function (element) {
|
||||
return actual.has(element);
|
||||
})
|
||||
);
|
||||
}, `contains(Set[${iterableToString(expectation)}])`);
|
||||
return createMatcher(
|
||||
function (actual) {
|
||||
return (
|
||||
typeOf(actual) === "set" &&
|
||||
every(expectation, function (element) {
|
||||
return actual.has(element);
|
||||
})
|
||||
);
|
||||
},
|
||||
`contains(Set[${iterableToString(expectation)}])`,
|
||||
);
|
||||
};
|
||||
|
||||
createMatcher.bool = createMatcher.typeOf("boolean");
|
||||
|
||||
2
node_modules/@sinonjs/samsam/lib/create-matcher/assert-type.js
generated
vendored
2
node_modules/@sinonjs/samsam/lib/create-matcher/assert-type.js
generated
vendored
@@ -16,7 +16,7 @@ function assertType(value, type, name) {
|
||||
var actual = typeOf(value);
|
||||
if (actual !== type) {
|
||||
throw new TypeError(
|
||||
`Expected type of ${name} to be ${type}, but was ${actual}`
|
||||
`Expected type of ${name} to be ${type}, but was ${actual}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user