Compare commits

...

169 Commits

Author SHA1 Message Date
Edoardo Pirovano
384cfc42b2 Merge pull request #879 from github/update-v1.0.29-67c0353a
Merge main into v1
2022-01-21 11:11:07 +00:00
github-actions[bot]
5a1e31dc6a 1.0.29 2022-01-21 10:47:33 +00:00
Edoardo Pirovano
67c0353a8c Merge pull request #878 from github/revert-wait-for-processing
Revert "Start waiting for processing by default."
2022-01-21 10:45:30 +00:00
Chris Gavin
7ec25e02e3 Add a changelog note. 2022-01-21 10:26:18 +00:00
Chris Gavin
713eacdf6c Revert "Start waiting for processing by default."
This reverts commit b661ef1697.
2022-01-21 10:25:57 +00:00
Edoardo Pirovano
c0b507e521 Merge pull request #875 from github/mergeback/v1.0.28-to-main-8a4b243f
Mergeback v1.0.28 refs/heads/v1 into main
2022-01-18 22:21:27 +00:00
github-actions[bot]
d563b098d7 Update checked-in dependencies 2022-01-18 21:54:31 +00:00
github-actions[bot]
fca047627b 1.0.29 2022-01-18 21:35:32 +00:00
github-actions[bot]
28fe8e7028 Update changelog and version after v1.0.28 2022-01-18 21:35:30 +00:00
Andrew Eisenberg
8a4b243fbf Merge pull request #874 from github/update-v1.0.28-b31df3ff
Merge main into v1
2022-01-18 13:33:47 -08:00
Andrew Eisenberg
19970ae6b5 Update changelog
Add an entry and move an entry.
2022-01-18 12:44:43 -08:00
github-actions[bot]
ec1b16574e 1.0.28 2022-01-18 19:13:38 +00:00
Henning Makholm
b31df3ff95 Merge pull request #866 from github/hmakholm/pr/2.7.5
Bump CodeQL version to 2.7.5
2022-01-17 19:45:58 +01:00
Henning Makholm
776db51d2e Merge remote-tracking branch 'origin/main' into hmakholm/pr/2.7.5 2022-01-17 18:27:39 +01:00
Andrew Eisenberg
b886234637 Merge pull request #872 from github/aeisenberg/category-with-tool
Change category uniqueness test
2022-01-17 09:19:39 -08:00
Henning Makholm
9913c9bfa5 Merge remote-tracking branch 'origin/main' into hmakholm/pr/2.7.5 2022-01-17 18:06:10 +01:00
Andrew Eisenberg
8de62beb50 Merge branch 'main' into aeisenberg/category-with-tool 2022-01-17 09:00:14 -08:00
Andrew Eisenberg
b6fbccaba1 Merge pull request #873 from github/nickrolfe/ruby
Update warning about interpreted languages to mention Ruby
2022-01-14 09:05:50 -08:00
Nick Rolfe
df0c306daf Update warning about interpreted languages to mention Ruby 2022-01-14 11:57:29 +00:00
Andrew Eisenberg
ab1f709732 Allow duplicate categories in the same validation step
A single SARIF file should be allowed to have duplicated
categories.
2022-01-13 10:35:03 -08:00
Andrew Eisenberg
8454e21c9c Change category uniqueness test
Turboscan only allows a single combination of tool name and automation
details id for testing category uniqueness.

Previously, the check in the action was not entirely correct since it
only looked at the _category_ and not the combination of the category
and the tool name.

It's even more precise now since it is looking at the actual, computed
value of the automation details id, rather than an inputted value of
the category.

This change also includes a refactoring where the action is now avoiding
multiple parsing/stringifying of the sarif files. Instead, sarif is
parsed once at the start of the process and stringified once, after
sarif processing is completely finished.
2022-01-12 15:26:34 -08:00
Henning Makholm
d85c3e58ec Bump CodeQL version to 2.7.5 2022-01-12 19:36:20 +01:00
Edoardo Pirovano
cbabe47a0b Merge pull request #871 from github/mergeback/v1.0.27-to-main-cd783c8a
Mergeback v1.0.27 refs/heads/v1 into main
2022-01-11 22:22:02 +00:00
github-actions[bot]
f8a48f464d Update checked-in dependencies 2022-01-11 21:57:25 +00:00
github-actions[bot]
f6f23f8671 1.0.28 2022-01-11 21:43:35 +00:00
github-actions[bot]
c2a7379048 Update changelog and version after v1.0.27 2022-01-11 21:43:33 +00:00
Edoardo Pirovano
cd783c8a29 Merge pull request #870 from github/update-v1.0.27-faa9ba73
Merge main into v1
2022-01-11 21:42:44 +00:00
github-actions[bot]
300c8b6dcb 1.0.27 2022-01-11 20:35:30 +00:00
Edoardo Pirovano
faa9ba7363 Merge pull request #869 from github/edoardo/windows-11-error
Refuse to run on Windows 11
2022-01-11 20:34:11 +00:00
Edoardo Pirovano
d2a0fc83dc Refuse to run on Windows 11 2022-01-11 18:34:33 +00:00
Edoardo Pirovano
71112ab35d Merge pull request #868 from edoardopirovano/debug-artifact-name
Make name of debugging artifact and DB within it configurable
2022-01-07 17:54:41 +00:00
Edoardo Pirovano
e677af3fd0 Make name of debugging artifact and DB within it configurable 2022-01-07 15:10:26 +00:00
Henry Mercer
848e5140d4 Merge pull request #857 from github/henrymercer/ml-powered-queries
Add support for running ML-powered queries for JS `security-extended` behind `ml_powered_queries` feature flag
2022-01-06 17:55:06 +00:00
Henry Mercer
e7fe6da378 Allow patch version of ML-powered queries pack to be bumped 2022-01-06 11:58:03 +00:00
Henry Mercer
2159631658 Only run ML-powered queries with v2.7.5 or newer of the CLI 2022-01-06 11:58:03 +00:00
Henry Mercer
9de1702400 Document use of redundant feature flag API call 2022-01-06 11:58:02 +00:00
Henry Mercer
efded22908 Bump the version of the ATM query pack to 0.0.2 2022-01-06 11:57:33 +00:00
Henry Mercer
5602bd50bf Test loading of ML-powered queries 2022-01-06 11:57:33 +00:00
Henry Mercer
2f4be8e34b Run ML-powered queries for JS security-extended behind feature flag 2022-01-06 11:57:33 +00:00
Edoardo Pirovano
9763bdd6ec Merge pull request #860 from edoardopirovano/always-upload-db
Always upload DB when in debug mode
2022-01-04 18:25:33 +00:00
Edoardo Pirovano
00d4d60204 Always upload DB when in debug mode 2022-01-04 16:49:31 +00:00
Edoardo Pirovano
e5d84de18b Merge pull request #861 from github/remove-debug-output
Remove debugging output
2022-01-02 10:05:46 +00:00
Edoardo Pirovano
ea1acc573a Merge branch 'main' into remove-debug-output 2022-01-02 09:41:49 +00:00
Edoardo Pirovano
79ea6d6a7c Merge pull request #862 from github/aeisenberg/fix-python-tests
Force virtualenv version
2022-01-02 09:41:38 +00:00
Andrew Eisenberg
3e50d096f8 Force virtualenv version
Force the virtualenv version to be 20.11 or less.
The 20.12 version is failing for python 2 right now.
2022-01-01 19:13:10 -08:00
Edoardo Pirovano
cca1cfdacf Remove debugging output 2021-12-31 16:32:08 +00:00
Edoardo Pirovano
cdea582765 Merge pull request #859 from github/update-supported-enterprise-server-versions
Update supported GitHub Enterprise Server versions.
2021-12-29 09:39:28 +00:00
GitHub
3e59dee9e2 Update supported GitHub Enterprise Server versions. 2021-12-29 00:07:19 +00:00
Henry Mercer
249c7ffce1 Merge pull request #856 from github/henrymercer/feature-flagging
Feature flagging via the GitHub API
2021-12-16 16:18:46 +00:00
Henry Mercer
254816c2d2 Stub feature flag API endpoint in tests 2021-12-16 13:39:18 +00:00
Henry Mercer
6d62c245ec Represent feature flags using an enum
Replaces the previous string literal type
2021-12-16 13:38:34 +00:00
Henry Mercer
5e87034b3b Explicitly pass repository to feature flags constructor
As suggested in review: The `GITHUB_REPOSITORY` environment variable is
only available on Actions. Passing it in explicitly avoids potentially
crashing if this code is called from the runner.
2021-12-15 17:03:43 +00:00
Henry Mercer
621e0794ac Throw an error if the feature flag API request errors 2021-12-15 16:34:26 +00:00
Henry Mercer
d6499fad61 Use new feature flag architecture when uploading databases 2021-12-15 13:17:05 +00:00
Henry Mercer
04671efa1d Add support for feature flagging via the GitHub API 2021-12-15 13:16:33 +00:00
Andrew Eisenberg
e1f05902cd Merge pull request #855 from github/correct-changelog
Move changelog entry into correct place
2021-12-14 11:06:40 -08:00
Thomas Horstmeyer
f9e96fa857 Move changelog entry into correct place 2021-12-14 13:49:52 +00:00
Andrew Eisenberg
14a5537e13 Merge pull request #853 from github/aeisenberg/remove-scheduled
Remove scheduled releases
2021-12-13 19:14:37 -08:00
Andrew Eisenberg
d3eb4974a3 Merge branch 'main' into aeisenberg/remove-scheduled 2021-12-13 18:37:38 -08:00
Edoardo Pirovano
39216d10d3 Merge pull request #854 from edoardopirovano/remove-dotnet-restore
Remove `dotnet restore` calls from CI checks
2021-12-13 23:29:58 +00:00
Edoardo Pirovano
265a7db16a Remove dotnet restore calls from CI checks 2021-12-13 18:04:46 +00:00
Andrew Eisenberg
f623d4cec3 Remove scheduled releases
The action will be released roughly every two weeks along
with the CodeQL CLI release process.
2021-12-13 09:34:53 -08:00
Chris Gavin
eacec3646a Merge pull request #843 from github/wait-for-processing-by-default
Start waiting for processing by default.
2021-12-13 12:19:11 +00:00
Chris Gavin
e0e2abc1a5 Merge branch 'main' into wait-for-processing-by-default 2021-12-13 12:04:22 +00:00
Chris Gavin
716b5980cd Merge pull request #844 from github/duplicated-output
Stop printing all output twice.
2021-12-13 11:57:11 +00:00
Chris Gavin
1d83f2a0bc Merge branch 'main' into duplicated-output 2021-12-13 11:44:22 +00:00
Andrew Eisenberg
ce77f88627 Merge pull request #849 from github/mergeback/v1.0.26-to-main-5f532563
Mergeback v1.0.26 refs/heads/v1 into main
2021-12-10 13:41:53 -08:00
github-actions[bot]
a777b51ef7 Update checked-in dependencies 2021-12-10 19:09:21 +00:00
github-actions[bot]
88fbabe21d 1.0.27 2021-12-10 18:40:13 +00:00
github-actions[bot]
eeb215b041 Update changelog and version after v1.0.26 2021-12-10 18:40:11 +00:00
Andrew Eisenberg
5f53256358 Merge pull request #848 from github/update-v1.0.26-07825549
Merge main into v1
2021-12-10 10:39:01 -08:00
github-actions[bot]
25a5103778 1.0.26 2021-12-10 18:20:01 +00:00
Edoardo Pirovano
0782554948 Merge pull request #845 from github/refuse-broken-versions
Refuse to use broken versions in the toolcache
2021-12-09 16:53:57 +00:00
Edoardo Pirovano
705f634a1d Refuse to use broken versions in the toolcache 2021-12-09 13:43:57 +00:00
Chris Gavin
b7b7607959 Stop printing all output twice. 2021-12-09 13:21:32 +00:00
Chris Gavin
7bcc6564d4 Add more context to a change note.
Co-authored-by: Thomas Horstmeyer <73262256+cannist@users.noreply.github.com>
2021-12-09 12:53:20 +00:00
Chris Gavin
b661ef1697 Start waiting for processing by default. 2021-12-09 10:21:34 +00:00
Andrew Eisenberg
6ad00fd084 Merge pull request #842 from github/aeisenberg/cli-2.7.3
Prepare for the CodeQL 2.7.3 release
2021-12-08 17:46:32 -08:00
Andrew Eisenberg
fccdee04ba Prepare for the CodeQL 2.7.3 release 2021-12-08 17:18:05 -08:00
Andrew Eisenberg
e694ca6192 Merge pull request #841 from github/aeisenberg/fix-del
Always use `force: true` for del
2021-12-08 16:01:45 -08:00
Andrew Eisenberg
67d11b5928 Always use force: true for del 2021-12-08 15:37:43 -08:00
Aditya Sharad
924a64d2e0 Merge pull request #840 from github/aeisenberg/fix-rm
Remove rmDir references
2021-12-08 15:08:58 -08:00
Andrew Eisenberg
45dc27d3c1 Remove rmDir references
`rmDir` is not available on the node version used by the actions runner.

Instead, use the `del` package. It is safe, well-tested, and
cross-platform.
2021-12-08 12:11:31 -08:00
Andrew Eisenberg
cbed0358c6 Merge pull request #839 from github/revert-837-aeisenberg/cli-2.7.3
Revert "Bump default CodeQL version to 2.7.3"
2021-12-08 10:58:53 -08:00
Andrew Eisenberg
a8cf6f42c2 Revert "Bump default CodeQL version to 2.7.3" 2021-12-08 10:07:10 -08:00
Andrew Eisenberg
eebe7c46f1 Merge pull request #837 from github/aeisenberg/cli-2.7.3
Bump default CodeQL version to 2.7.3
2021-12-08 09:02:37 -08:00
Edoardo Pirovano
dc32d5448f Add a workflow step to do dotnet restore 2021-12-08 14:47:42 +00:00
Andrew Eisenberg
fac22de4f9 Autobuild: Prefix invocations with CODEQL_RUNNER
Co-authored-by: Aditya Sharad <6874315+adityasharad@users.noreply.github.com>
2021-12-07 20:50:17 -08:00
Andrew Eisenberg
0a1efd7f45 Update changelog 2021-12-07 10:50:12 -08:00
Andrew Eisenberg
043e3deaeb Bump default CodeQL version to 2.7.3 2021-12-07 10:45:17 -08:00
Edoardo Pirovano
0dbcb55617 Merge pull request #835 from github/mergeback/v1.0.25-to-main-546b30f3
Mergeback v1.0.25 refs/heads/v1 into main
2021-12-06 16:23:57 +00:00
github-actions[bot]
00c59b98ce Update checked-in dependencies 2021-12-06 15:25:38 +00:00
github-actions[bot]
7069ada3ed 1.0.26 2021-12-06 15:14:23 +00:00
github-actions[bot]
dd1f9a96d8 Update changelog and version after v1.0.25 2021-12-06 15:14:20 +00:00
Robert
546b30f35a Merge pull request #834 from github/update-v1.0.25-f44219c9
Merge main into v1
2021-12-06 15:13:18 +00:00
github-actions[bot]
d1dde03d7a 1.0.25 2021-12-06 14:42:24 +00:00
Robert
f44219c94b Merge pull request #832 from github/robertbrignull/upload_domain
Upload using uploads.github.com if enabled for that repository
2021-12-06 10:24:27 +00:00
Robert
bdaac951f7 Merge branch 'main' into robertbrignull/upload_domain 2021-12-06 09:59:11 +00:00
Robert
a82f53a364 Merge pull request #833 from github/robertbrignull/fix-dotnet
Deal with new dotnet version
2021-12-06 09:58:58 +00:00
Robert
f721f011bf Add call to dotnet restore 2021-12-06 09:43:07 +00:00
Robert
c82e09aa41 Delete bundled db before recreating 2021-12-01 12:25:57 +00:00
Robert
460d053698 Upload using uploads.github.com if enabled for that repository 2021-12-01 12:13:46 +00:00
Edoardo Pirovano
3bf14e85d8 Merge pull request #829 from github/mergeback/v1.0.24-to-main-e095058b
Mergeback v1.0.24 refs/heads/v1 into main
2021-11-23 11:32:20 +00:00
github-actions[bot]
13a9d6c442 Update checked-in dependencies 2021-11-23 11:12:31 +00:00
github-actions[bot]
dd65833ab6 1.0.25 2021-11-23 10:59:43 +00:00
github-actions[bot]
c2d9e4b48f Update changelog and version after v1.0.24 2021-11-23 10:59:41 +00:00
Edoardo Pirovano
e095058bfa Merge pull request #828 from github/update-v1.0.24-0b242db7
Merge main into v1
2021-11-23 10:58:34 +00:00
Edoardo Pirovano
2c99f99c4a Merge branch 'v1' into update-v1.0.24-0b242db7 2021-11-23 09:54:57 +00:00
github-actions[bot]
bcd7e6896f 1.0.24 2021-11-23 09:52:25 +00:00
Edoardo Pirovano
0b242db78f Merge pull request #827 from github/2.7.2-release
Bump default CodeQL version to 2.7.2
2021-11-22 15:17:04 +00:00
Edoardo Pirovano
c897659213 Add CHANGELOG note for new bundle version 2021-11-22 13:11:20 +00:00
Edoardo Pirovano
8b902e1723 Bump default CodeQL version to 2.7.2 2021-11-22 13:09:42 +00:00
Edoardo Pirovano
26567f6a49 Merge pull request #819 from github/mergeback/v1.0.23-to-main-a627e9fa
Mergeback v1.0.23 refs/heads/v1 into main
2021-11-20 00:30:04 +00:00
github-actions[bot]
dbf7ac4b37 Update checked-in dependencies 2021-11-19 23:52:01 +00:00
github-actions[bot]
077f7b2532 1.0.24 2021-11-19 23:46:10 +00:00
github-actions[bot]
a392055010 Update changelog and version after v1.0.23 2021-11-19 23:46:10 +00:00
Henning Makholm
0aea878963 fix changelog 2021-11-19 23:46:10 +00:00
github-actions[bot]
bca71988d3 1.0.23 2021-11-19 23:46:10 +00:00
Edoardo Pirovano
02e1cdcd36 Merge pull request #823 from github/fix-update-workflow
Remove `persist-credentials: false` from workflow
2021-11-19 23:45:25 +00:00
Edoardo Pirovano
4860ed1ad4 Remove persist-credentials: false from workflow 2021-11-19 17:55:10 +00:00
Chris Gavin
3e36cddb07 Merge pull request #781 from github/wait-for-processing
Add an option to allow waiting until an analysis has been processed before finishing the Action.
2021-11-18 12:02:43 +00:00
Chris Gavin
b9bd459b70 Add a clarifying comment to a break. 2021-11-17 15:52:36 +00:00
Chris Gavin
215c4f5ff5 Move the delay to the end of the loop. 2021-11-17 15:51:50 +00:00
Chris Gavin
4eef7ef32c Split out waiting for processing. 2021-11-17 13:20:36 +00:00
Chris Gavin
e0b9b9a248 Check for errors in the analysis status response. 2021-11-17 12:33:42 +00:00
Chris Gavin
823bb21bbb Add a default value for wait-for-processing. 2021-11-17 12:33:38 +00:00
Chris Gavin
49fc4c9b40 Reduce log message level.
Co-authored-by: Andrew Eisenberg <aeisenberg@github.com>
2021-11-17 12:14:48 +00:00
Chris Gavin
21a786fda0 Improve a log message.
Co-authored-by: Andrew Eisenberg <aeisenberg@github.com>
2021-11-17 12:14:25 +00:00
Chris Gavin
316ad9d919 Add an option to allow waiting until an analysis has been processed before finishing the Action. 2021-11-17 12:14:13 +00:00
Henning Makholm
a627e9fa50 Merge pull request #818 from github/update-v1.0.23-2ecc17d7
Merge main into v1
2021-11-16 20:56:34 +01:00
Henning Makholm
160021fe53 fix changelog 2021-11-16 20:39:37 +01:00
github-actions[bot]
3f2269bf58 1.0.23 2021-11-16 18:48:44 +00:00
Edoardo Pirovano
2ecc17d74f Merge pull request #817 from edoardopirovano/respect-ld-preload
Respect value of `LD_PRELOAD` given by the CLI
2021-11-16 00:22:50 +00:00
Edoardo Pirovano
9b506fed7c Respect value of LD_PRELOAD given by the CLI 2021-11-15 22:16:59 +00:00
Henning Makholm
2803f4a792 Merge pull request #816 from github/hmakholm/pr/2.7.1
Update bundle to version 2.7.1
2021-11-15 20:27:55 +01:00
Henning Makholm
720bf9d157 Merge remote-tracking branch 'origin/main' into hmakholm/pr/2.7.1 2021-11-15 20:05:55 +01:00
Andrew Eisenberg
bbf0a22e84 Merge pull request #801 from github/aeisenberg/upload-by-category
Allow multiple uploads in a single job
2021-11-15 10:57:49 -08:00
Andrew Eisenberg
d7b5c618a4 Merge branch 'main' into aeisenberg/upload-by-category 2021-11-15 10:33:14 -08:00
Henning Makholm
37a4db94ad Update bundle to version 2.7.1 2021-11-15 19:32:53 +01:00
Andrew Eisenberg
6a98a4b500 Allow multiple uploads in a single job
They must all have a unique category. The category will be
converted into an environment variable.
2021-11-15 09:16:25 -08:00
Edoardo Pirovano
ea8a175a94 Merge pull request #812 from github/sanitize-artifact-name
Sanitize artifact name before using
2021-11-08 11:23:19 +00:00
Edoardo Pirovano
f360da772a Sanitize artifact name before using 2021-11-05 08:40:16 +00:00
Andrew Eisenberg
ea169430d2 Merge pull request #809 from edoardopirovano/include-run-id
Include matrix information in debug artifact name
2021-11-04 14:13:02 -07:00
Edoardo Pirovano
375c14729e Include matrix information in debug artifact name 2021-11-04 19:20:24 +00:00
Edoardo Pirovano
0442e71a2a Merge pull request #806 from github/mergeback/v1.0.22-to-main-5581e08a
Mergeback v1.0.22 refs/heads/v1 into main
2021-11-04 12:35:39 +00:00
Edoardo Pirovano
3832953584 Update version numbers in lockfiles 2021-11-04 12:21:48 +00:00
github-actions[bot]
3ce10aec2e 1.0.23 2021-11-04 11:49:59 +00:00
github-actions[bot]
c4b0d49ea3 Update changelog and version after v1.0.22 2021-11-04 11:49:57 +00:00
Edoardo Pirovano
5581e08a65 Merge pull request #805 from github/update-v1.0.22-ae284321
Merge main into v1
2021-11-04 11:48:43 +00:00
github-actions[bot]
df5cf240b0 1.0.22 2021-11-04 11:15:19 +00:00
Edoardo Pirovano
ae2843216b Merge pull request #770 from github/enable-new-tracing
Re-enable new style of tracing
2021-11-04 10:03:53 +00:00
Edoardo Pirovano
5156a89668 Re-enable new style of tracing 2021-11-04 09:47:56 +00:00
Chuan-kai Lin
8f0825e9c0 Merge pull request #804 from github/cklin/include-custom-query-help
Include custom query help in analysis results
2021-11-03 13:33:39 -07:00
Chuan-kai Lin
9a44540e25 Include custom query help in analysis results 2021-11-03 13:19:01 -07:00
Andrew Eisenberg
ff3272d4e1 Merge pull request #786 from github/aeisenberg/dependabot
Fix dependabot issues
2021-11-03 09:56:33 -07:00
Andrew Eisenberg
56c7489b94 Merge branch 'main' into aeisenberg/dependabot 2021-11-02 10:35:28 -07:00
Edoardo Pirovano
3ba4184b13 Merge pull request #798 from edoardopirovano/debug-mode
Add an option to upload some debugging artifacts
2021-11-01 16:44:51 +00:00
Edoardo Pirovano
bc31f604d3 Add an option to upload some debugging artifacts 2021-11-01 16:12:50 +00:00
Chuan-kai Lin
4293754ed2 Merge pull request #738 from github/cklin/extractor-ram-threads-options
Add RAM and threads options to init action
2021-10-28 15:38:33 -07:00
Chuan-kai Lin
70b730eb7d Add RAM and threads options to init action 2021-10-28 15:09:59 -07:00
Edoardo Pirovano
2905689d8a Merge pull request #800 from github/mergeback/v1.0.21-to-main-e891551d
Mergeback v1.0.21 refs/heads/v1 into main
2021-10-28 23:04:40 +01:00
github-actions[bot]
1d123b770b Update checked-in dependencies 2021-10-28 21:33:38 +00:00
github-actions[bot]
9661171991 1.0.22 2021-10-28 21:19:05 +00:00
github-actions[bot]
e04751618e Update changelog and version after v1.0.21 2021-10-28 21:19:04 +00:00
Chuan-kai Lin
e891551dd4 Merge pull request #799 from github/update-v1.0.21-a53b8d0e
Merge main into v1
2021-10-28 14:18:14 -07:00
github-actions[bot]
bd48dc5be5 1.0.21 2021-10-28 20:46:17 +00:00
Chuan-kai Lin
a53b8d0ed1 Merge pull request #795 from github/cklin/codeql-cli-2.7.0-update
Update CodeQL bundle to 20211025 / 2.7.0
2021-10-25 14:10:10 -07:00
Chuan-kai Lin
22747bcb77 Update CodeQL bundle to 20211025 / 2.7.0 2021-10-25 13:45:26 -07:00
Andrew Eisenberg
531c6ba7c8 Fix dependabot issues 2021-10-25 08:56:16 -07:00
881 changed files with 64832 additions and 49849 deletions

View File

@@ -28,7 +28,6 @@ runs:
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz"
elif [[ ${{ inputs.version }} == *"stable"* ]]; then
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
echo "Hello $VERSION"
echo "::set-output name=tools-url::https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz"
elif [[ ${{ inputs.version }} == "latest" ]]; then
echo "::set-output name=tools-url::latest"

77
.github/workflows/__debug-artifacts.yml generated vendored Normal file
View File

@@ -0,0 +1,77 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Debug artifact upload
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- v1
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
debug-artifacts:
strategy:
matrix:
version:
- stable-20201028
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os: [ubuntu-latest, macos-latest]
name: Debug artifact upload
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
debug: true
debug-artifact-name: my-debug-artifacts
debug-database-name: my-db
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
id: analysis
- uses: actions/download-artifact@v2
with:
name: my-debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
- shell: bash
run: |
LANGUAGES="cpp csharp go java javascript python"
for language in $LANGUAGES; do
echo "Checking $language"
if [[ ! -f "$language.sarif" ]] ; then
echo "Missing a SARIF file for $language"
exit 1
fi
if [[ ! -f "my-db-$language.zip" ]] ; then
echo "Missing a database bundle for $language"
exit 1
fi
if [[ ! -d "$language/log" ]] ; then
echo "Missing logs for $language"
exit 1
fi
done
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

63
.github/workflows/__extractor-ram-threads.yml generated vendored Normal file
View File

@@ -0,0 +1,63 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Extractor ram and threads options test
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- v1
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
extractor-ram-threads:
strategy:
matrix:
version: [latest]
os: [ubuntu-latest]
name: Extractor ram and threads options test
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
languages: java
ram: 230
threads: 1
- name: Assert Results
shell: bash
run: |
if [ "${CODEQL_RAM}" != "230" ]; then
echo "CODEQL_RAM is '${CODEQL_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_RAM}" != "230" ]; then
echo "CODEQL_EXTRACTOR_JAVA_RAM is '${CODEQL_EXTRACTOR_JAVA_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_THREADS}" != "1" ]; then
echo "CODEQL_THREADS is '${CODEQL_THREADS}' instead of 1"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_THREADS}" != "1" ]; then
echo "CODEQL_EXTRACTOR_JAVA_THREADS is '${CODEQL_EXTRACTOR_JAVA_THREADS}' instead of 1"
exit 1
fi
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

88
.github/workflows/__unset-environment.yml generated vendored Normal file
View File

@@ -0,0 +1,88 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Test unsetting environment variables
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- v1
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
unset-environment:
strategy:
matrix:
version:
- stable-20201028
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os: [ubuntu-latest]
name: Test unsetting environment variables
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
db-location: ${{ runner.temp }}/customDbLocation
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
shell: bash
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
- uses: ./../action/analyze
id: analysis
env:
TEST_MODE: true
- shell: bash
run: |
CPP_DB=${{ fromJson(steps.analysis.outputs.db-locations).cpp }}
if [[ ! -d $CPP_DB ]] || [[ ! $CPP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for CPP, or created it in the wrong location."
exit 1
fi
CSHARP_DB=${{ fromJson(steps.analysis.outputs.db-locations).csharp }}
if [[ ! -d $CSHARP_DB ]] || [[ ! $CSHARP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for C Sharp, or created it in the wrong location."
exit 1
fi
GO_DB=${{ fromJson(steps.analysis.outputs.db-locations).go }}
if [[ ! -d $GO_DB ]] || [[ ! $GO_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Go, or created it in the wrong location."
exit 1
fi
JAVA_DB=${{ fromJson(steps.analysis.outputs.db-locations).java }}
if [[ ! -d $JAVA_DB ]] || [[ ! $JAVA_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Java, or created it in the wrong location."
exit 1
fi
JAVASCRIPT_DB=${{ fromJson(steps.analysis.outputs.db-locations).javascript }}
if [[ ! -d $JAVASCRIPT_DB ]] || [[ ! $JAVASCRIPT_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Javascript, or created it in the wrong location."
exit 1
fi
PYTHON_DB=${{ fromJson(steps.analysis.outputs.db-locations).python }}
if [[ ! -d $PYTHON_DB ]] || [[ ! $PYTHON_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Python, or created it in the wrong location."
exit 1
fi
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -393,3 +393,42 @@ jobs:
# Deliberately don't use TEST_MODE here. This is specifically testing
# the compatibility with the API.
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
runner-extractor-ram-threads-options:
name: Runner ubuntu extractor RAM and threads options
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-linux init --ram=230 --threads=1 --repository $GITHUB_REPOSITORY --languages java --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Assert Results
shell: bash
run: |
. ./codeql-runner/codeql-env.sh
if [ "${CODEQL_RAM}" != "230" ]; then
echo "CODEQL_RAM is '${CODEQL_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_RAM}" != "230" ]; then
echo "CODEQL_EXTRACTOR_JAVA_RAM is '${CODEQL_EXTRACTOR_JAVA_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_THREADS}" != "1" ]; then
echo "CODEQL_THREADS is '${CODEQL_THREADS}' instead of 1"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_THREADS}" != "1" ]; then
echo "CODEQL_EXTRACTOR_JAVA_THREADS is '${CODEQL_EXTRACTOR_JAVA_THREADS}' instead of 1"
exit 1
fi

View File

@@ -1,7 +1,5 @@
name: Update release branch
on:
schedule:
- cron: 0 9 * * 1
repository_dispatch:
# Example of how to trigger this:
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'

View File

@@ -1,9 +1,44 @@
# CodeQL Action and CodeQL Runner Changelog
## [UNRELEASED]
## 1.0.29 - 21 Jan 2022
- The feature to wait for SARIF processing to complete after upload has been disabled by default due to a bug in its interaction with pull requests from forks.
## 1.0.28 - 18 Jan 2022
- Update default CodeQL bundle version to 2.7.5. [#866](https://github.com/github/codeql-action/pull/866)
- Fix a bug where SARIF files were failing upload due to an invalid test for unique categories. [#872](https://github.com/github/codeql-action/pull/872)
## 1.0.27 - 11 Jan 2022
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`.
## 1.0.26 - 10 Dec 2021
- Update default CodeQL bundle version to 2.7.3. [#842](https://github.com/github/codeql-action/pull/842)
## 1.0.25 - 06 Dec 2021
No user facing changes.
## 1.0.24 - 23 Nov 2021
- Update default CodeQL bundle version to 2.7.2. [#827](https://github.com/github/codeql-action/pull/827)
## 1.0.23 - 16 Nov 2021
- The `upload-sarif` action now allows multiple uploads in a single job, as long as they have different categories. [#801](https://github.com/github/codeql-action/pull/801)
- Update default CodeQL bundle version to 2.7.1. [#816](https://github.com/github/codeql-action/pull/816)
## 1.0.22 - 04 Nov 2021
- The `init` step of the Action now supports `ram` and `threads` inputs to limit resource use of CodeQL extractors. These inputs also serve as defaults to the subsequent `analyze` step, which finalizes the database and executes queries. [#738](https://github.com/github/codeql-action/pull/738)
- When used with CodeQL 2.7.1 or above, the Action now includes custom query help in the analysis results uploaded to GitHub code scanning, if available. To add help text for a custom query, create a Markdown file next to the `.ql` file containing the query, using the same base name but the file extension `.md`. [#804](https://github.com/github/codeql-action/pull/804)
## 1.0.21 - 28 Oct 2021
- Update default CodeQL bundle version to 2.7.0. [#795](https://github.com/github/codeql-action/pull/795)
## 1.0.20 - 25 Oct 2021
No user facing changes.

View File

@@ -1,6 +1,6 @@
name: 'CodeQL: Finish'
description: 'Finalize CodeQL database'
author: 'GitHub'
name: "CodeQL: Finish"
description: "Finalize CodeQL database"
author: "GitHub"
inputs:
check_name:
description: The name of the check run to add text to.
@@ -8,9 +8,9 @@ inputs:
output:
description: The path of the directory in which to save the SARIF results
required: false
default: '../results'
default: "../results"
upload:
description: Upload the SARIF file
description: Upload the SARIF file to Code Scanning
required: false
default: "true"
cleanup-level:
@@ -18,7 +18,12 @@ inputs:
required: false
default: "brutal"
ram:
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
description: >-
The amount of memory in MB that can be used by CodeQL for database finalization and query execution.
By default, this action will use the same amount of memory as previously set in the "init" action.
If the "init" action also does not have an explicit "ram" input, this action will use most of the
memory available in the system (which for GitHub-hosted runners is 6GB for Linux, 5.5GB for Windows,
and 13GB for macOS).
required: false
add-snippets:
description: Specify whether or not to add code snippets to the output sarif file.
@@ -29,7 +34,12 @@ inputs:
required: false
default: "false"
threads:
description: The number of threads to be used by CodeQL.
description: >-
The number of threads that can be used by CodeQL for database finalization and query execution.
By default, this action will use the same number of threads as previously set in the "init" action.
If the "init" action also does not have an explicit "threads" input, this action will use all the
hardware threads available in the system (which for GitHub-hosted runners is 2 for Linux and Windows
and 3 for macOS).
required: false
checkout_path:
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
@@ -42,6 +52,10 @@ inputs:
description: Whether to upload the resulting CodeQL database
required: false
default: "true"
wait-for-processing:
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
required: true
default: "false"
token:
default: ${{ github.token }}
matrix:
@@ -50,5 +64,5 @@ outputs:
db-locations:
description: A map from language to absolute path for each database created by CodeQL.
runs:
using: 'node12'
main: '../lib/analyze-action.js'
using: "node12"
main: "../lib/analyze-action.js"

View File

@@ -41,6 +41,34 @@ inputs:
source-root:
description: Path of the root source code directory, relative to $GITHUB_WORKSPACE.
required: false
ram:
description: >-
The amount of memory in MB that can be used by CodeQL extractors.
By default, CodeQL extractors will use most of the memory available in the system
(which for GitHub-hosted runners is 6GB for Linux, 5.5GB for Windows, and 13GB for macOS).
This input also sets the amount of memory that can later be used by the "analyze" action.
required: false
threads:
description: >-
The number of threads that can be used by CodeQL extractors.
By default, CodeQL extractors will use all the hardware threads available in the system
(which for GitHub-hosted runners is 2 for Linux and Windows and 3 for macOS).
This input also sets the number of threads that can later be used by the "analyze" action.
required: false
debug:
description: Enable debugging mode. This will result in more output being produced which may be useful when debugging certain issues.
required: false
default: 'false'
debug-artifact-name:
description: >-
The name of the artifact to store debugging information in.
This is only used when debug mode is enabled.
required: false
debug-database-name:
description: >-
The name of the database uploaded to the debugging artifact.
This is only used when debug mode is enabled.
required: false
outputs:
codeql-path:
description: The path of the CodeQL binary used for analysis

7
lib/actions-util.js generated
View File

@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const core = __importStar(require("@actions/core"));
@@ -98,6 +98,7 @@ const getCommitOid = async function (ref = "HEAD") {
}
catch (e) {
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
core.info(e.stack || "NO STACK");
return (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
}
};
@@ -574,4 +575,8 @@ async function isAnalyzingDefaultBranch() {
return currentRef === defaultBranch;
}
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
function sanitizeArifactName(name) {
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
}
exports.sanitizeArifactName = sanitizeArifactName;
//# sourceMappingURL=actions-util.js.map

File diff suppressed because one or more lines are too long

View File

@@ -440,4 +440,10 @@ on: ["push"]
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
});
});
(0, ava_1.default)("sanitizeArifactName", (t) => {
t.deepEqual(actionsutil.sanitizeArifactName("hello-world_"), "hello-world_");
t.deepEqual(actionsutil.sanitizeArifactName("hello`world`"), "helloworld");
t.deepEqual(actionsutil.sanitizeArifactName("hello===123"), "hello123");
t.deepEqual(actionsutil.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
});
//# sourceMappingURL=actions-util.test.js.map

File diff suppressed because one or more lines are too long

4
lib/analysis-paths.js generated
View File

@@ -37,11 +37,11 @@ function buildIncludeExcludeEnvVar(paths) {
return paths.join("\n");
}
function printPathFiltersWarning(config, logger) {
// Index include/exclude/filters only work in javascript and python.
// Index include/exclude/filters only work in javascript/python/ruby.
// If any other languages are detected/configured then show a warning.
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
!config.languages.every(isInterpretedLanguage)) {
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript and Python');
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript, Python, and Ruby');
}
}
exports.printPathFiltersWarning = printPathFiltersWarning;

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,oEAAoE;IACpE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,4FAA4F,CAC7F,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}

View File

@@ -42,6 +42,9 @@ const util = __importStar(require("./util"));
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
@@ -63,6 +66,9 @@ const util = __importStar(require("./util"));
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
@@ -85,6 +91,9 @@ const util = __importStar(require("./util"));
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
dbLocation: path.resolve(tempDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

77
lib/analyze-action-env.test.js generated Normal file
View File

@@ -0,0 +1,77 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const sinon = __importStar(require("sinon"));
const actionsUtil = __importStar(require("./actions-util"));
const analyze = __importStar(require("./analyze"));
const configUtils = __importStar(require("./config-utils"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
(0, testing_utils_1.setupTests)(ava_1.default);
// This test needs to be in its own file so that ava would run it in its own
// nodejs process. The code being tested is in analyze-action.ts, which runs
// immediately on load. So the file needs to be loaded during part of the test,
// and that can happen only once per nodejs process. If multiple such tests are
// in the same test file, ava would run them in the same nodejs process, and all
// but the first test would fail.
(0, ava_1.default)("analyze action with RAM & threads from environment variables", async (t) => {
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
process.env["GITHUB_REPOSITORY"] = "fake/repository";
sinon
.stub(actionsUtil, "createStatusReportBase")
.resolves({});
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
sinon.stub(configUtils, "getConfig").resolves({
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
languages: [],
});
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("token").returns("fake-token");
requiredInputStub.withArgs("upload-database").returns("false");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("cleanup-level").returns("none");
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
// When there are no action inputs for RAM and threads, the action uses
// environment variables (passed down from the init action) to set RAM and
// threads usage.
process.env["CODEQL_THREADS"] = "-1";
process.env["CODEQL_RAM"] = "4992";
const runFinalizeStub = sinon.stub(analyze, "runFinalize");
const runQueriesStub = sinon.stub(analyze, "runQueries");
const analyzeAction = require("./analyze-action");
// When analyze-action.ts loads, it runs an async function from the top
// level but does not wait for it to finish. To ensure that calls to
// runFinalize and runQueries are correctly captured by spies, we explicitly
// wait for the action promise to complete before starting verification.
await analyzeAction.runPromise;
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=4992");
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=4992");
});
});
//# sourceMappingURL=analyze-action-env.test.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

77
lib/analyze-action-input.test.js generated Normal file
View File

@@ -0,0 +1,77 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const sinon = __importStar(require("sinon"));
const actionsUtil = __importStar(require("./actions-util"));
const analyze = __importStar(require("./analyze"));
const configUtils = __importStar(require("./config-utils"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
(0, testing_utils_1.setupTests)(ava_1.default);
// This test needs to be in its own file so that ava would run it in its own
// nodejs process. The code being tested is in analyze-action.ts, which runs
// immediately on load. So the file needs to be loaded during part of the test,
// and that can happen only once per nodejs process. If multiple such tests are
// in the same test file, ava would run them in the same nodejs process, and all
// but the first test would fail.
(0, ava_1.default)("analyze action with RAM & threads from action inputs", async (t) => {
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
process.env["GITHUB_REPOSITORY"] = "fake/repository";
sinon
.stub(actionsUtil, "createStatusReportBase")
.resolves({});
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
sinon.stub(configUtils, "getConfig").resolves({
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
languages: [],
});
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("token").returns("fake-token");
requiredInputStub.withArgs("upload-database").returns("false");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("cleanup-level").returns("none");
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
process.env["CODEQL_THREADS"] = "1";
process.env["CODEQL_RAM"] = "4992";
// Action inputs have precedence over environment variables.
optionalInputStub.withArgs("threads").returns("-1");
optionalInputStub.withArgs("ram").returns("3012");
const runFinalizeStub = sinon.stub(analyze, "runFinalize");
const runQueriesStub = sinon.stub(analyze, "runQueries");
const analyzeAction = require("./analyze-action");
// When analyze-action.ts loads, it runs an async function from the top
// level but does not wait for it to finish. To ensure that calls to
// runFinalize and runQueries are correctly captured by spies, we explicitly
// wait for the action promise to complete before starting verification.
await analyzeAction.runPromise;
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=3012");
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=3012");
});
});
//# sourceMappingURL=analyze-action-input.test.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

94
lib/analyze-action.js generated
View File

@@ -19,18 +19,22 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.runPromise = exports.sendStatusReport = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const artifact = __importStar(require("@actions/artifact"));
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const analyze_1 = require("./analyze");
const codeql_1 = require("./codeql");
const config_utils_1 = require("./config-utils");
const database_upload_1 = require("./database-upload");
const feature_flags_1 = require("./feature-flags");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
const util_1 = require("./util");
// eslint-disable-next-line import/no-commonjs
const pkg = require("../package.json");
async function sendStatusReport(startedAt, stats, error) {
@@ -44,9 +48,10 @@ async function sendStatusReport(startedAt, stats, error) {
};
await actionsUtil.sendStatusReport(statusReport);
}
exports.sendStatusReport = sendStatusReport;
async function run() {
const startedAt = new Date();
let uploadStats = undefined;
let uploadResult = undefined;
let runStats = undefined;
let config = undefined;
util.initializeEnvironment(util.Mode.actions, pkg.version);
@@ -65,11 +70,39 @@ async function run() {
url: util.getRequiredEnvParam("GITHUB_SERVER_URL"),
};
const outputDir = actionsUtil.getRequiredInput("output");
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger);
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram"));
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]);
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
const featureFlags = new feature_flags_1.GitHubFeatureFlags(config.gitHubVersion, apiDetails, repositoryNwo, logger);
// We currently perform an API request in both the `init` and `analyze` Actions to determine
// what feature flags are enabled. At the time of writing, this redundant API call is acceptable
// to us, but if we wanted to avoid it, we could do so by serializing the feature flags as part
// of the config file.
void featureFlags.preloadFeatureFlags();
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
if (config.debugMode) {
// Upload the SARIF files as an Actions artifact for debugging
await uploadDebugArtifacts(config.languages.map((lang) => path.resolve(outputDir, `${lang}.sarif`)), outputDir, config.debugArtifactName);
}
}
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
if (config.debugMode) {
// Upload the logs as an Actions artifact for debugging
const toUpload = [];
for (const language of config.languages) {
toUpload.push(...listFolder(path.resolve(util.getCodeQLDatabasePath(config, language), "log")));
}
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
// Multilanguage tracing: there are additional logs in the root of the cluster
toUpload.push(...listFolder(path.resolve(config.dbLocation, "log")));
}
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
await uploadDebugArtifacts([path.resolve(config.tempDir, "compound-build-tracer.log")], config.tempDir, config.debugArtifactName);
}
}
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
@@ -80,13 +113,17 @@ async function run() {
}
core.setOutput("db-locations", dbLocations);
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
uploadStats = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
uploadResult = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
}
else {
logger.info("Not uploading results");
}
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger);
// Possibly upload the database bundles for remote queries
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, featureFlags, apiDetails, logger);
if (uploadResult !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
}
}
catch (origError) {
const error = origError instanceof Error ? origError : new Error(String(origError));
@@ -102,6 +139,19 @@ async function run() {
return;
}
finally {
if (config !== undefined && config.debugMode) {
try {
// Upload the database bundles as an Actions artifact for debugging
const toUpload = [];
for (const language of config.languages) {
toUpload.push(await (0, util_1.bundleDb)(config, language, await (0, codeql_1.getCodeQL)(config.codeQLCmd), `${config.debugDatabaseName}-${language}`));
}
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
}
catch (error) {
console.log(`Failed to upload database debug bundles: ${error}`);
}
}
if (core.isDebug() && config !== undefined) {
core.info("Debug mode is on. Printing CodeQL debug logs...");
for (const language of config.languages) {
@@ -124,8 +174,11 @@ async function run() {
}
}
}
if (runStats && uploadStats) {
await sendStatusReport(startedAt, { ...runStats, ...uploadStats });
if (runStats && uploadResult) {
await sendStatusReport(startedAt, {
...runStats,
...uploadResult.statusReport,
});
}
else if (runStats) {
await sendStatusReport(startedAt, { ...runStats });
@@ -134,9 +187,32 @@ async function run() {
await sendStatusReport(startedAt, undefined);
}
}
async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
let suffix = "";
const matrix = actionsUtil.getRequiredInput("matrix");
if (matrix !== undefined && matrix !== "null") {
for (const entry of Object.entries(JSON.parse(matrix)).sort())
suffix += `-${entry[1]}`;
}
await artifact.create().uploadArtifact(actionsUtil.sanitizeArifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
}
function listFolder(dir) {
const entries = fs.readdirSync(dir, { withFileTypes: true });
const files = [];
for (const entry of entries) {
if (entry.isFile()) {
files.push(path.resolve(dir, entry.name));
}
else if (entry.isDirectory()) {
files.push(...listFolder(path.resolve(dir, entry.name)));
}
}
return files;
}
exports.runPromise = run();
async function runWrapper() {
try {
await run();
await exports.runPromise;
}
catch (error) {
core.setFailed(`analyze action failed: ${error}`);

File diff suppressed because one or more lines are too long

3
lib/analyze.test.js generated
View File

@@ -125,6 +125,9 @@ const util = __importStar(require("./util"));
},
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs,
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
};
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
recursive: true,

File diff suppressed because one or more lines are too long

View File

@@ -1 +1 @@
{ "maximumVersion": "3.3", "minimumVersion": "3.0" }
{ "maximumVersion": "3.4", "minimumVersion": "3.0" }

41
lib/codeql.js generated
View File

@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getExtraOptions = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
@@ -37,6 +37,7 @@ const languages_1 = require("./languages");
const toolcache = __importStar(require("./toolcache"));
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
const util = __importStar(require("./util"));
const util_1 = require("./util");
class CommandInvocationError extends Error {
constructor(cmd, args, exitCode, error) {
super(`Failure invoking ${cmd} with arguments ${args}.\n
@@ -72,22 +73,18 @@ const CODEQL_VERSION_METRICS = "2.5.5";
const CODEQL_VERSION_GROUP_RULES = "2.5.5";
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
/**
* Version above which we use the CLI's indirect build tracing and
* multi-language tracing features.
* This variable controls using the new style of tracing from the CodeQL
* CLI. In particular, with versions above this we will use both indirect
* tracing, and multi-language tracing together with database clusters.
*
* There are currently three blockers on the CLI's side to enabling this:
* (1) The logs directory should be created for a DB cluster, as some
* autobuilders expect it to be present.
* (2) The SEMMLE_PRELOAD_libtrace{32,64}? env variables need to be set.
* (3) The .environment and .win32env files need to be created next to
* the DB spec.
*
* Once _all_ of these are fixed, we can enable this by setting the
* version flag below to the earliest version of the CLI that resolved
* the above issues.
* Note that there were bugs in both of these features that were fixed in
* release 2.7.0 of the CodeQL CLI, therefore this flag is only enabled for
* versions above that.
*/
exports.CODEQL_VERSION_NEW_TRACING = "99.99.99";
exports.CODEQL_VERSION_NEW_TRACING = "2.7.0";
function getCodeQLBundleName() {
let platform;
if (process.platform === "win32") {
@@ -220,7 +217,7 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant
// specified explicitly (in which case we always honor it).
if (!codeqlFolder && !codeqlURL && !forceLatest) {
const codeqlVersions = toolcache.findAllVersions("CodeQL", toolCacheDir, logger);
if (codeqlVersions.length === 1) {
if (codeqlVersions.length === 1 && (0, util_1.isGoodVersion)(codeqlVersions[0])) {
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0], toolCacheDir, logger);
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
@@ -358,6 +355,15 @@ function getCachedCodeQL() {
return cachedCodeQL;
}
exports.getCachedCodeQL = getCachedCodeQL;
/**
* Get a real, newly created CodeQL instance for testing. The instance refers to
* a non-existent placeholder codeql command, so tests that use this function
* should also stub the toolrunner.ToolRunner constructor.
*/
async function getCodeQLForTesting() {
return getCodeQLForCmd("codeql-for-testing", false);
}
exports.getCodeQLForTesting = getCodeQLForTesting;
async function getCodeQLForCmd(cmd, checkVersion) {
let cachedVersion = undefined;
const codeql = {
@@ -568,6 +574,8 @@ async function getCodeQLForCmd(cmd, checkVersion) {
codeqlArgs.push("--print-metrics-summary");
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_GROUP_RULES))
codeqlArgs.push("--sarif-group-rules-by-pack");
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_CUSTOM_QUERY_HELP))
codeqlArgs.push("--sarif-add-query-help");
if (automationDetailsId !== undefined &&
(await util.codeQlVersionAbove(this, CODEQL_VERSION_SARIF_GROUP))) {
codeqlArgs.push("--sarif-category", automationDetailsId);
@@ -630,12 +638,13 @@ async function getCodeQLForCmd(cmd, checkVersion) {
];
await runTool(cmd, codeqlArgs);
},
async databaseBundle(databasePath, outputFilePath) {
async databaseBundle(databasePath, outputFilePath, databaseName) {
const args = [
"database",
"bundle",
databasePath,
`--output=${outputFilePath}`,
`--name=${databaseName}`,
];
await new toolrunner.ToolRunner(cmd, args).exec();
},

File diff suppressed because one or more lines are too long

23
lib/codeql.test.js generated
View File

@@ -23,9 +23,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const toolcache = __importStar(require("@actions/tool-cache"));
const ava_1 = __importDefault(require("ava"));
const nock_1 = __importDefault(require("nock"));
const sinon = __importStar(require("sinon"));
const codeql = __importStar(require("./codeql"));
const defaults = __importStar(require("./defaults.json"));
const logging_1 = require("./logging");
@@ -217,4 +219,25 @@ ava_1.default.beforeEach(() => {
const repoEnv = codeql.getCodeQLActionRepository(logger);
t.deepEqual(repoEnv, "xxx/yyy");
});
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-query-help for 2.7.0", async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
});
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
});
function stubToolRunnerConstructor() {
const runnerObjectStub = sinon.createStubInstance(toolrunner.ToolRunner);
runnerObjectStub.exec.resolves(0);
const runnerConstructorStub = sinon.stub(toolrunner, "ToolRunner");
runnerConstructorStub.returns(runnerObjectStub);
return runnerConstructorStub;
}
//# sourceMappingURL=codeql.test.js.map

File diff suppressed because one or more lines are too long

56
lib/config-utils.js generated
View File

@@ -25,8 +25,11 @@ const path = __importStar(require("path"));
const yaml = __importStar(require("js-yaml"));
const semver = __importStar(require("semver"));
const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
const externalQueries = __importStar(require("./external-queries"));
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const util_1 = require("./util");
// Property names from the user-supplied config file.
const NAME_PROPERTY = "name";
const DISABLE_DEFAULT_QUERIES_PROPERTY = "disable-default-queries";
@@ -116,11 +119,26 @@ const builtinSuites = ["security-extended", "security-and-quality"];
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
* Throws an error if suiteName is not a valid builtin suite.
*/
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, suiteName, configFile) {
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureFlags, configFile) {
const found = builtinSuites.find((suite) => suite === suiteName);
if (!found) {
throw new Error(getQueryUsesInvalid(configFile, suiteName));
}
// If we're running the JavaScript security-extended analysis (or a superset of it) and the repo
// is opted into the ML-powered queries beta, then add the ML-powered query pack so that we run
// the ML-powered queries.
if (languages.includes("javascript") &&
(found === "security-extended" || found === "security-and-quality") &&
(await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) &&
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) {
if (!packs.javascript) {
packs.javascript = [];
}
packs.javascript.push({
packName: "codeql/javascript-experimental-atm-queries",
version: "~0.0.2",
});
}
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined);
}
@@ -180,7 +198,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
* local paths starting with './', or references to remote repos, or
* a finite set of hardcoded terms for builtin suites.
*/
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, workspacePath, apiDetails, logger, configFile) {
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureFlags, logger, configFile) {
queryUses = queryUses.trim();
if (queryUses === "") {
throw new Error(getQueryUsesInvalid(configFile));
@@ -192,7 +210,7 @@ async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir,
}
// Check for one of the builtin suites
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
await addBuiltinSuiteQueries(languages, codeQL, resultMap, queryUses, configFile);
await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureFlags, configFile);
return;
}
// Otherwise, must be a reference to another repo
@@ -404,12 +422,12 @@ async function getLanguages(codeQL, languagesInput, repository, apiDetails, logg
}
return parsedLanguages;
}
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, workspacePath, apiDetails, logger) {
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureFlags, logger) {
queriesInput = queriesInput.trim();
// "+" means "don't override config file" - see shouldAddConfigFileQueries
queriesInput = queriesInput.replace(/^\+/, "");
for (const query of queriesInput.split(",")) {
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, workspacePath, apiDetails, logger);
await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureFlags, logger);
}
}
// Returns true if either no queries were provided in the workflow.
@@ -425,7 +443,7 @@ function shouldAddConfigFileQueries(queriesInput) {
/**
* Get the default config for when the user has not supplied one.
*/
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
var _a;
const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger);
const queries = {};
@@ -436,10 +454,10 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
};
}
await addDefaultQueries(codeQL, languages, queries);
if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger);
}
const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
if (queriesInput) {
await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
}
return {
languages,
queries,
@@ -452,13 +470,16 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
codeQLCmd: codeQL.getPath(),
gitHubVersion,
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
debugMode,
debugArtifactName,
debugDatabaseName,
};
}
exports.getDefaultConfig = getDefaultConfig;
/**
* Load the config from the given file.
*/
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
var _a;
let parsedYAML;
if (isLocal(configFile)) {
@@ -499,12 +520,13 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
if (!disableDefaultQueries) {
await addDefaultQueries(codeQL, languages, queries);
}
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
// If queries were provided using `with` in the action configuration,
// they should take precedence over the queries in the config file
// unless they're prefixed with "+", in which case they supplement those
// in the config file.
if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger);
await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
}
if (shouldAddConfigFileQueries(queriesInput) &&
QUERIES_PROPERTY in parsedYAML) {
@@ -517,7 +539,7 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueryUsesInvalid(configFile));
}
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, logger, configFile);
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, featureFlags, logger, configFile);
}
}
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
@@ -542,7 +564,6 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
}
}
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
return {
languages,
queries,
@@ -555,6 +576,9 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
codeQLCmd: codeQL.getPath(),
gitHubVersion,
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
debugMode,
debugArtifactName,
debugDatabaseName,
};
}
/**
@@ -680,16 +704,16 @@ function dbLocationOrDefault(dbLocation, tempDir) {
* This will parse the config from the user input if present, or generate
* a default config. The parsed config is then stored to a known location.
*/
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
var _a, _b, _c;
let config;
// If no config file was provided create an empty one
if (!configFile) {
logger.debug("No configuration file was provided");
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
}
else {
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
}
// The list of queries should not be empty for any language. If it is then
// it is a user configuration error.

File diff suppressed because one or more lines are too long

View File

@@ -31,6 +31,7 @@ const sinon = __importStar(require("sinon"));
const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
@@ -88,8 +89,8 @@ function mockListLanguages(languages) {
};
},
});
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger));
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger));
});
});
(0, ava_1.default)("loading config saves config", async (t) => {
@@ -111,7 +112,7 @@ function mockListLanguages(languages) {
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// Sanity check that getConfig returns undefined before we have called initConfig
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
// The saved config file should now exist
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// And that same newly-initialised config should now be returned by getConfig
@@ -122,7 +123,7 @@ function mockListLanguages(languages) {
(0, ava_1.default)("load input outside of workspace", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
try {
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -135,7 +136,7 @@ function mockListLanguages(languages) {
// no filename given, just a repo
const configFile = "octo-org/codeql-config@main";
try {
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -149,7 +150,7 @@ function mockListLanguages(languages) {
const configFile = "input";
t.false(fs.existsSync(path.join(tmpDir, configFile)));
try {
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -214,10 +215,13 @@ function mockListLanguages(languages) {
gitHubVersion,
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: "my-artifact",
debugDatabaseName: "my-db",
};
const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir);
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "my-artifact", "my-db", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
@@ -253,7 +257,7 @@ function mockListLanguages(languages) {
fs.mkdirSync(path.join(tmpDir, "foo"));
const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir);
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolve queries was called correctly
t.deepEqual(resolveQueriesArgs.length, 1);
t.deepEqual(resolveQueriesArgs[0].queries, [
@@ -296,7 +300,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries
// and once for `./foo` from the config file.
@@ -329,7 +333,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries and once for `./override`,
// but won't be called for './foo' from the config file.
@@ -361,7 +365,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly
// It'll be called once for `./workflow-query`,
// but won't be called for the default one since that was disabled
@@ -387,7 +391,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly:
// It'll be called once for the default queries,
// and then once for each of the two queries from the workflow
@@ -426,7 +430,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries,
// once for each of additional1 and additional2,
@@ -465,7 +469,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
try {
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
t.fail("initConfig did not throw error");
}
catch (err) {
@@ -508,7 +512,7 @@ function queriesToResolvedQueryForm(queries) {
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
const configFile = "octo-org/codeql-config/config.yaml@main";
const languages = "javascript";
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
t.assert(spyGetContents.called);
});
});
@@ -518,7 +522,7 @@ function queriesToResolvedQueryForm(queries) {
mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -534,7 +538,7 @@ function queriesToResolvedQueryForm(queries) {
mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -551,7 +555,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
try {
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -563,7 +567,7 @@ function queriesToResolvedQueryForm(queries) {
return await util.withTmpDir(async (tmpDir) => {
const languages = "rubbish,english";
try {
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -591,7 +595,7 @@ function queriesToResolvedQueryForm(queries) {
const configFile = path.join(tmpDir, "codeql-config.yaml");
fs.writeFileSync(configFile, inputFileContents);
const languages = "javascript";
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
t.deepEqual(packs, {
[languages_1.Language.javascript]: [
{
@@ -630,7 +634,7 @@ function queriesToResolvedQueryForm(queries) {
fs.writeFileSync(configFile, inputFileContents);
fs.mkdirSync(path.join(tmpDir, "foo"));
const languages = "javascript,python,cpp";
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
t.deepEqual(packs, {
[languages_1.Language.javascript]: [
{
@@ -683,7 +687,7 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
const inputFile = path.join(tmpDir, configFile);
fs.writeFileSync(inputFile, inputFileContents, "utf8");
try {
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -866,6 +870,50 @@ parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and
(0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
(0, ava_1.default)("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], /remove the '\+'/);
(0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], /"xxx" is not a valid pack/);
// errors
// input w invalid pack name
async function mlPoweredQueriesMacro(t, codeQLVersion, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries) {
return await util.withTmpDir(async (tmpDir) => {
const codeQL = (0, codeql_1.setCodeQL)({
async getVersion() {
return codeQLVersion;
},
async resolveQueries() {
return {
byLanguage: {
javascript: { "fake-query.ql": {} },
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
},
});
const { packs } = await configUtils.initConfig("javascript", queriesInput, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled
? [feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled]
: []), (0, logging_1.getRunnerLogger)(true));
if (shouldRunMlPoweredQueries) {
t.deepEqual(packs, {
[languages_1.Language.javascript]: [
{
packName: "codeql/javascript-experimental-atm-queries",
version: "~0.0.2",
},
],
});
}
else {
t.deepEqual(packs, {});
}
});
}
mlPoweredQueriesMacro.title = (_providedTitle, codeQLVersion, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries) => {
const queriesInputDescription = queriesInput
? `'queries: ${queriesInput}'`
: "default config";
return `ML-powered queries ${shouldRunMlPoweredQueries ? "are" : "aren't"} loaded for ${queriesInputDescription} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`;
};
// macro, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, "security-extended", false);
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, "security-extended", false);
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, false);
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "security-extended", true);
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "security-and-quality", true);
//# sourceMappingURL=config-utils.test.js.map

File diff suppressed because one or more lines are too long

60
lib/database-upload.js generated
View File

@@ -24,8 +24,10 @@ const fs = __importStar(require("fs"));
const actionsUtil = __importStar(require("./actions-util"));
const api_client_1 = require("./api-client");
const codeql_1 = require("./codeql");
const feature_flags_1 = require("./feature-flags");
const util = __importStar(require("./util"));
async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
const util_1 = require("./util");
async function uploadDatabases(repositoryNwo, config, featureFlags, apiDetails, logger) {
if (actionsUtil.getRequiredInput("upload-database") !== "true") {
logger.debug("Database upload disabled in workflow. Skipping upload.");
return;
@@ -40,38 +42,40 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
logger.debug("Not analyzing default branch. Skipping upload.");
return;
}
const client = (0, api_client_1.getApiClient)(apiDetails);
try {
await client.request("GET /repos/:owner/:repo/code-scanning/codeql/databases", {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
});
}
catch (e) {
if (util.isHTTPError(e) && e.status === 404) {
logger.debug("Repository is not opted in to database uploads. Skipping upload.");
}
else {
console.log(e);
logger.info(`Skipping database upload due to unknown error: ${e}`);
}
if (!(await featureFlags.getValue(feature_flags_1.FeatureFlag.DatabaseUploadsEnabled))) {
logger.debug("Repository is not opted in to database uploads. Skipping upload.");
return;
}
const client = (0, api_client_1.getApiClient)(apiDetails);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
const useUploadDomain = await featureFlags.getValue(feature_flags_1.FeatureFlag.UploadsDomainEnabled);
for (const language of config.languages) {
// Bundle the database up into a single zip file
const databasePath = util.getCodeQLDatabasePath(config, language);
const databaseBundlePath = `${databasePath}.zip`;
await codeql.databaseBundle(databasePath, databaseBundlePath);
// Upload the database bundle
const payload = fs.readFileSync(databaseBundlePath);
// Upload the database bundle.
// Although we are uploading arbitrary file contents to the API, it's worth
// noting that it's the API's job to validate that the contents is acceptable.
// This API method is available to anyone with write access to the repo.
const payload = fs.readFileSync(await (0, util_1.bundleDb)(config, language, codeql, language));
try {
await client.request(`PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language`, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
language,
data: payload,
});
if (useUploadDomain) {
await client.request(`POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name`, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
language,
name: `${language}-database`,
data: payload,
headers: {
authorization: `token ${apiDetails.auth}`,
},
});
}
else {
await client.request(`PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language`, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
language,
data: payload,
});
}
logger.debug(`Successfully uploaded database for ${language}`);
}
catch (e) {

View File

@@ -1 +1 @@
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAExB,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,IAAI;QACF,MAAM,MAAM,CAAC,OAAO,CAClB,wDAAwD,EACxD;YACE,KAAK,EAAE,aAAa,CAAC,KAAK;YAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;SACzB,CACF,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3C,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;SACH;aAAM;YACL,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,MAAM,CAAC,IAAI,CAAC,kDAAkD,CAAC,EAAE,CAAC,CAAC;SACpE;QACD,OAAO;KACR;IAED,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,gDAAgD;QAChD,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAClE,MAAM,kBAAkB,GAAG,GAAG,YAAY,MAAM,CAAC;QACjD,MAAM,MAAM,CAAC,cAAc,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC;QAE9D,6BAA6B;QAC7B,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,kBAAkB,CAAC,CAAC;QACpD,IAAI;YACF,MAAM,MAAM,CAAC,OAAO,CAClB,kEAAkE,EAClE;gBACE,KAAK,EAAE,aAAa,CAAC,KAAK;gBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;gBACxB,QAAQ;gBACR,IAAI,EAAE,OAAO;aACd,CACF,CAAC;YACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAtED,0CAsEC"}
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAErC,mDAA4D;AAG5D,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,YAA0B,EAC1B,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,2BAAW,CAAC,sBAAsB,CAAC,CAAC,EAAE;QACtE,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;QACF,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,MAAM,eAAe,GAAG,MAAM,YAAY,CAAC,QAAQ,CACjD,2BAAW,CAAC,oBAAoB,CACjC,CAAC;IAEF,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,8BAA8B;QAC9B,2EAA2E;QAC3E,8EAA8E;QAC9E,wEAAwE;QACxE,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAC7B,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CACnD,CAAC;QACF,IAAI;YACF,IAAI,eAAe,EAAE;gBACnB,MAAM,MAAM,CAAC,OAAO,CAClB,wGAAwG,EACxG;oBACE,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;oBAC5B,IAAI,EAAE,OAAO;oBACb,OAAO,EAAE;wBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;qBAC1C;iBACF,CACF,CAAC;aACH;iBAAM;gBACL,MAAM,MAAM,CAAC,OAAO,CAClB,kEAAkE,EAClE;oBACE,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,OAAO;iBACd,CACF,CAAC;aACH;YACD,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AA9ED,0CA8EC"}

View File

@@ -30,6 +30,7 @@ const actionsUtil = __importStar(require("./actions-util"));
const apiClient = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
const database_upload_1 = require("./database-upload");
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const testing_utils_1 = require("./testing-utils");
const util_1 = require("./util");
@@ -37,6 +38,10 @@ const util_1 = require("./util");
ava_1.default.beforeEach(() => {
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
});
const uploadToUploadsDomainFlags = (0, feature_flags_1.createFeatureFlags)([
feature_flags_1.FeatureFlag.DatabaseUploadsEnabled,
feature_flags_1.FeatureFlag.UploadsDomainEnabled,
]);
const testRepoName = { owner: "github", repo: "example" };
const testApiDetails = {
auth: "1234",
@@ -55,50 +60,24 @@ function getTestConfig(tmpDir) {
gitHubVersion: { type: util_1.GitHubVariant.DOTCOM },
dbLocation: tmpDir,
packs: {},
debugMode: false,
debugArtifactName: util_1.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util_1.DEFAULT_DEBUG_DATABASE_NAME,
};
}
function getRecordingLogger(messages) {
return {
debug: (message) => {
messages.push({ type: "debug", message });
console.debug(message);
},
info: (message) => {
messages.push({ type: "info", message });
console.info(message);
},
warning: (message) => {
messages.push({ type: "warning", message });
console.warn(message);
},
error: (message) => {
messages.push({ type: "error", message });
console.error(message);
},
isDebug: () => true,
startGroup: () => undefined,
endGroup: () => undefined,
};
}
function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
async function mockHttpRequests(featureFlags, databaseUploadStatusCode) {
// Passing an auth token is required, so we just use a dummy value
const client = github.getOctokit("123");
const requestSpy = sinon.stub(client, "request");
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql/databases");
if (optInStatusCode < 300) {
optInSpy.resolves(undefined);
const url = (await featureFlags.getValue(feature_flags_1.FeatureFlag.UploadsDomainEnabled))
? "POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name"
: "PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language";
const databaseUploadSpy = requestSpy.withArgs(url);
if (databaseUploadStatusCode < 300) {
databaseUploadSpy.resolves(undefined);
}
else {
optInSpy.throws(new util_1.HTTPError("some error message", optInStatusCode));
}
if (databaseUploadStatusCode !== undefined) {
const databaseUploadSpy = requestSpy.withArgs("PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language");
if (databaseUploadStatusCode < 300) {
databaseUploadSpy.resolves(undefined);
}
else {
databaseUploadSpy.throws(new util_1.HTTPError("some error message", databaseUploadStatusCode));
}
databaseUploadSpy.throws(new util_1.HTTPError("some error message", databaseUploadStatusCode));
}
sinon.stub(apiClient, "getApiClient").value(() => client);
}
@@ -111,7 +90,7 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
.returns("false");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Database upload disabled in workflow. Skipping upload.") !== undefined);
});
@@ -127,7 +106,7 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
const config = getTestConfig(tmpDir);
config.gitHubVersion = { type: util_1.GitHubVariant.GHES, version: "3.0" };
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, config, (0, feature_flags_1.createFeatureFlags)([]), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Not running against github.com. Skipping upload.") !== undefined);
});
@@ -143,7 +122,7 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
const config = getTestConfig(tmpDir);
config.gitHubVersion = { type: util_1.GitHubVariant.GHAE };
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, config, (0, feature_flags_1.createFeatureFlags)([]), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Not running against github.com. Skipping upload.") !== undefined);
});
@@ -157,12 +136,12 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Not analyzing default branch. Skipping upload.") !== undefined);
});
});
(0, ava_1.default)("Abort database upload if opt-in request returns 404", async (t) => {
(0, ava_1.default)("Abort database upload if feature flag is disabled", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
sinon
@@ -170,40 +149,18 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(404);
(0, codeql_1.setCodeQL)({
async databaseBundle() {
return;
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.UploadsDomainEnabled]), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message ===
"Repository is not opted in to database uploads. Skipping upload.") !== undefined);
});
});
(0, ava_1.default)("Abort database upload if opt-in request fails with something other than 404", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(500);
(0, codeql_1.setCodeQL)({
async databaseBundle() {
return;
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "info" &&
v.message ===
"Skipping database upload due to unknown error: Error: some error message") !== undefined);
});
});
(0, ava_1.default)("Don't crash if uploading a database fails", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
@@ -212,20 +169,23 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(204, 500);
const featureFlags = (0, feature_flags_1.createFeatureFlags)([
feature_flags_1.FeatureFlag.DatabaseUploadsEnabled,
]);
await mockHttpRequests(featureFlags, 500);
(0, codeql_1.setCodeQL)({
async databaseBundle(_, outputFilePath) {
fs.writeFileSync(outputFilePath, "");
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), featureFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "warning" &&
v.message ===
"Failed to upload database for javascript: Error: some error message") !== undefined);
});
});
(0, ava_1.default)("Successfully uploading a database", async (t) => {
(0, ava_1.default)("Successfully uploading a database to api.github.com", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
sinon
@@ -233,14 +193,34 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(204, 201);
await mockHttpRequests(uploadToUploadsDomainFlags, 201);
(0, codeql_1.setCodeQL)({
async databaseBundle(_, outputFilePath) {
fs.writeFileSync(outputFilePath, "");
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Successfully uploaded database for javascript") !== undefined);
});
});
(0, ava_1.default)("Successfully uploading a database to uploads.github.com", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
await mockHttpRequests(uploadToUploadsDomainFlags, 201);
(0, codeql_1.setCodeQL)({
async databaseBundle(_, outputFilePath) {
fs.writeFileSync(outputFilePath, "");
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Successfully uploaded database for javascript") !== undefined);
});

File diff suppressed because one or more lines are too long

View File

@@ -1,3 +1,3 @@
{
"bundleVersion": "codeql-bundle-20211013"
"bundleVersion": "codeql-bundle-20220112"
}

91
lib/feature-flags.js generated Normal file
View File

@@ -0,0 +1,91 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.createFeatureFlags = exports.GitHubFeatureFlags = exports.FeatureFlag = void 0;
const api_client_1 = require("./api-client");
const util = __importStar(require("./util"));
var FeatureFlag;
(function (FeatureFlag) {
FeatureFlag["DatabaseUploadsEnabled"] = "database_uploads_enabled";
FeatureFlag["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
FeatureFlag["UploadsDomainEnabled"] = "uploads_domain_enabled";
})(FeatureFlag = exports.FeatureFlag || (exports.FeatureFlag = {}));
class GitHubFeatureFlags {
constructor(gitHubVersion, apiDetails, repositoryNwo, logger) {
this.gitHubVersion = gitHubVersion;
this.apiDetails = apiDetails;
this.repositoryNwo = repositoryNwo;
this.logger = logger;
}
async getValue(flag) {
const response = (await this.getApiResponse())[flag];
if (response === undefined) {
this.logger.debug(`Feature flag '${flag}' undefined in API response, considering it disabled.`);
return false;
}
return response;
}
async preloadFeatureFlags() {
await this.getApiResponse();
}
async getApiResponse() {
const loadApiResponse = async () => {
// Do nothing when not running against github.com
if (this.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
this.logger.debug("Not running against github.com. Disabling all feature flags.");
return {};
}
const client = (0, api_client_1.getApiClient)(this.apiDetails);
try {
const response = await client.request("GET /repos/:owner/:repo/code-scanning/codeql-action/features", {
owner: this.repositoryNwo.owner,
repo: this.repositoryNwo.repo,
});
return response.data;
}
catch (e) {
// Some feature flags, such as `ml_powered_queries_enabled` affect the produced alerts.
// Considering these feature flags disabled in the event of a transient error could
// therefore lead to alert churn. As a result, we crash if we cannot determine the value of
// the feature flags.
throw new Error(`Encountered an error while trying to load feature flags: ${e}`);
}
};
const apiResponse = this.cachedApiResponse || (await loadApiResponse());
this.cachedApiResponse = apiResponse;
return apiResponse;
}
}
exports.GitHubFeatureFlags = GitHubFeatureFlags;
/**
* Create a feature flags instance with the specified set of enabled flags.
*
* This should be only used within tests.
*/
function createFeatureFlags(enabledFlags) {
return {
getValue: async (flag) => {
return enabledFlags.includes(flag);
},
};
}
exports.createFeatureFlags = createFeatureFlags;
//# sourceMappingURL=feature-flags.js.map

1
lib/feature-flags.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,6CAA8D;AAG9D,6CAA+B;AAM/B,IAAY,WAIX;AAJD,WAAY,WAAW;IACrB,kEAAmD,CAAA;IACnD,qEAAsD,CAAA;IACtD,8DAA+C,CAAA;AACjD,CAAC,EAJW,WAAW,GAAX,mBAAW,KAAX,mBAAW,QAItB;AAUD,MAAa,kBAAkB;IAG7B,YACU,aAAiC,EACjC,UAA4B,EAC5B,aAA4B,EAC5B,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,eAAU,GAAV,UAAU,CAAkB;QAC5B,kBAAa,GAAb,aAAa,CAAe;QAC5B,WAAM,GAAN,MAAM,CAAQ;IACrB,CAAC;IAEJ,KAAK,CAAC,QAAQ,CAAC,IAAiB;QAC9B,MAAM,QAAQ,GAAG,CAAC,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC;QACrD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,iBAAiB,IAAI,uDAAuD,CAC7E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,KAAK,CAAC,mBAAmB;QACvB,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;IAC9B,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE;YACjC,iDAAiD;YACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;gBACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8DAA8D,CAC/D,CAAC;gBACF,OAAO,EAAE,CAAC;aACX;YACD,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC7C,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,8DAA8D,EAC9D;oBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;oBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;iBAC9B,CACF,CAAC;gBACF,OAAO,QAAQ,CAAC,IAAI,CAAC;aACtB;YAAC,OAAO,CAAC,EAAE;gBACV,uFAAuF;gBACvF,mFAAmF;gBACnF,2FAA2F;gBAC3F,qBAAqB;gBACrB,MAAM,IAAI,KAAK,CACb,4DAA4D,CAAC,EAAE,CAChE,CAAC;aACH;QACH,CAAC,CAAC;QAEF,MAAM,WAAW,GAAG,IAAI,CAAC,iBAAiB,IAAI,CAAC,MAAM,eAAe,EAAE,CAAC,CAAC;QACxE,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;CACF;AA3DD,gDA2DC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,YAA2B;IAC5D,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YACvB,OAAO,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;AACJ,CAAC;AAND,gDAMC"}

98
lib/feature-flags.test.js generated Normal file
View File

@@ -0,0 +1,98 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const feature_flags_1 = require("./feature-flags");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const testing_utils_1 = require("./testing-utils");
const util_1 = require("./util");
(0, testing_utils_1.setupTests)(ava_1.default);
ava_1.default.beforeEach(() => {
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
});
const testApiDetails = {
auth: "1234",
url: "https://github.com",
};
const testRepositoryNwo = (0, repository_1.parseRepositoryNwo)("github/example");
const ALL_FEATURE_FLAGS_DISABLED_VARIANTS = [
{
description: "GHES",
gitHubVersion: { type: util_1.GitHubVariant.GHES, version: "3.0.0" },
},
{ description: "GHAE", gitHubVersion: { type: util_1.GitHubVariant.GHAE } },
];
for (const variant of ALL_FEATURE_FLAGS_DISABLED_VARIANTS) {
(0, ava_1.default)(`All feature flags are disabled if running against ${variant.description}`, async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const loggedMessages = [];
const featureFlags = new feature_flags_1.GitHubFeatureFlags(variant.gitHubVersion, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
t.assert((await featureFlags.getValue(flag)) === false);
}
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message ===
"Not running against github.com. Disabling all feature flags.") !== undefined);
});
});
}
(0, ava_1.default)("Feature flags are disabled if they're not returned in API response", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const loggedMessages = [];
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
t.assert((await featureFlags.getValue(flag)) === false);
}
for (const featureFlag of [
"database_uploads_enabled",
"ml_powered_queries_enabled",
"uploads_domain_enabled",
]) {
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message ===
`Feature flag '${featureFlag}' undefined in API response, considering it disabled.`) !== undefined);
}
});
});
(0, ava_1.default)("Feature flags exception is propagated if the API request errors", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, logging_1.getRunnerLogger)(true));
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(500, {});
await t.throwsAsync(async () => featureFlags.preloadFeatureFlags(), {
message: "Encountered an error while trying to load feature flags: Error: some error message",
});
});
});
const FEATURE_FLAGS = [
"database_uploads_enabled",
"ml_powered_queries_enabled",
"uploads_domain_enabled",
];
for (const featureFlag of FEATURE_FLAGS) {
(0, ava_1.default)(`Feature flag '${featureFlag}' is enabled if enabled in the API response`, async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, logging_1.getRunnerLogger)(true));
const expectedFeatureFlags = {};
for (const f of FEATURE_FLAGS) {
expectedFeatureFlags[f] = false;
}
expectedFeatureFlags[featureFlag] = true;
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureFlags);
const actualFeatureFlags = {
database_uploads_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.DatabaseUploadsEnabled),
ml_powered_queries_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled),
uploads_domain_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.UploadsDomainEnabled),
};
t.deepEqual(actualFeatureFlags, expectedFeatureFlags);
});
});
}
//# sourceMappingURL=feature-flags.test.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"feature-flags.test.js","sourceRoot":"","sources":["../src/feature-flags.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAGvB,mDAAkE;AAClE,uCAA4C;AAC5C,6CAAkD;AAClD,mDAMyB;AAEzB,iCAAgF;AAEhF,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,MAAM,cAAc,GAAqB;IACvC,IAAI,EAAE,MAAM;IACZ,GAAG,EAAE,oBAAoB;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAA,+BAAkB,EAAC,gBAAgB,CAAC,CAAC;AAE/D,MAAM,mCAAmC,GAGpC;IACH;QACE,WAAW,EAAE,MAAM;QACnB,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC9D;IACD,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,EAAE;CACrE,CAAC;AAEF,KAAK,MAAM,OAAO,IAAI,mCAAmC,EAAE;IACzD,IAAA,aAAI,EAAC,qDAAqD,OAAO,CAAC,WAAW,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC3F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,OAAO,CAAC,aAAa,EACrB,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;YAEF,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;gBAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;aACzD;YAED,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,8DAA8D,CACnE,KAAK,SAAS,CAChB,CAAC;QACJ,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ;AAED,IAAA,aAAI,EAAC,oEAAoE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI;YACxB,0BAA0B;YAC1B,4BAA4B;YAC5B,wBAAwB;SACzB,EAAE;YACD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,iBAAiB,WAAW,uDAAuD,CACxF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iEAAiE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,MAAM,CAAC,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE,CAAC,YAAY,CAAC,mBAAmB,EAAE,EAAE;YAClE,OAAO,EACL,oFAAoF;SACvF,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,MAAM,aAAa,GAAG;IACpB,0BAA0B;IAC1B,4BAA4B;IAC5B,wBAAwB;CACzB,CAAC;AAEF,KAAK,MAAM,WAAW,IAAI,aAAa,EAAE;IACvC,IAAA,aAAI,EAAC,iBAAiB,WAAW,6CAA6C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC1F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;YAEF,MAAM,oBAAoB,GAAG,EAAE,CAAC;YAChC,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;gBAC7B,oBAAoB,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;aACjC;YACD,oBAAoB,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC;YACzC,IAAA,0CAA0B,EAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;YAEtD,MAAM,kBAAkB,GAAG;gBACzB,wBAAwB,EAAE,MAAM,YAAY,CAAC,QAAQ,CACnD,2BAAW,CAAC,sBAAsB,CACnC;gBACD,0BAA0B,EAAE,MAAM,YAAY,CAAC,QAAQ,CACrD,2BAAW,CAAC,uBAAuB,CACpC;gBACD,sBAAsB,EAAE,MAAM,YAAY,CAAC,QAAQ,CACjD,2BAAW,CAAC,oBAAoB,CACjC;aACF,CAAC;YAEF,CAAC,CAAC,SAAS,CAAC,kBAAkB,EAAE,oBAAoB,CAAC,CAAC;QACxD,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ"}

5
lib/fingerprints.js generated
View File

@@ -226,9 +226,8 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
exports.resolveUriToFile = resolveUriToFile;
// Compute fingerprints for results in the given sarif file
// and return an updated sarif file contents.
async function addFingerprints(sarifContents, sourceRoot, logger) {
async function addFingerprints(sarif, sourceRoot, logger) {
var _a, _b, _c;
const sarif = JSON.parse(sarifContents);
// Gather together results for the same file and construct
// callbacks to accept hashes for that file and update the location
const callbacksByFile = {};
@@ -266,7 +265,7 @@ async function addFingerprints(sarifContents, sourceRoot, logger) {
};
await hash(teeCallback, filepath);
}
return JSON.stringify(sarif);
return sarif;
}
exports.addFingerprints = addFingerprints;
//# sourceMappingURL=fingerprints.js.map

File diff suppressed because one or more lines are too long

View File

@@ -169,30 +169,24 @@ function testResolveUriToFile(uri, index, artifactsURIs) {
});
(0, ava_1.default)("addFingerprints", async (t) => {
// Run an end-to-end test on a test file
let input = fs
const input = JSON.parse(fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
.toString();
let expected = fs
.toString());
const expected = JSON.parse(fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.expected.sarif`)
.toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
.toString());
// The URIs in the SARIF files resolve to files in the testdata directory
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, (0, logging_1.getRunnerLogger)(true)), expected);
});
(0, ava_1.default)("missingRegions", async (t) => {
// Run an end-to-end test on a test file
let input = fs
const input = JSON.parse(fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
.toString();
let expected = fs
.toString());
const expected = JSON.parse(fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.expected.sarif`)
.toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
.toString());
// The URIs in the SARIF files resolve to files in the testdata directory
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, (0, logging_1.getRunnerLogger)(true)), expected);

File diff suppressed because one or more lines are too long

21
lib/init-action.js generated
View File

@@ -23,6 +23,7 @@ const path = __importStar(require("path"));
const core = __importStar(require("@actions/core"));
const actions_util_1 = require("./actions-util");
const codeql_1 = require("./codeql");
const feature_flags_1 = require("./feature-flags");
const init_1 = require("./init");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
@@ -78,6 +79,9 @@ async function run() {
};
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.actions);
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, apiDetails, repositoryNwo, logger);
void featureFlags.preloadFeatureFlags();
try {
const workflowErrors = await (0, actions_util_1.validateWorkflow)();
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
@@ -87,7 +91,11 @@ async function run() {
codeql = initCodeQLResult.codeql;
toolsVersion = initCodeQLResult.toolsVersion;
await (0, util_1.enrichEnvironment)(util_1.Mode.actions, codeql);
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), (0, actions_util_1.getTemporaryDirectory)(), (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE"), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, logger);
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), (0, actions_util_1.getOptionalInput)("debug") === "true", (0, actions_util_1.getOptionalInput)("debug-artifact-name") || util_1.DEFAULT_DEBUG_ARTIFACT_NAME, (0, actions_util_1.getOptionalInput)("debug-database-name") || util_1.DEFAULT_DEBUG_DATABASE_NAME, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE"), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, featureFlags, logger);
if (config.languages.some(languages_1.isTracedLanguage)) {
// We currently do not support tracing on Windows 11 and Windows Server 2022
(0, util_1.checkNotWindows11)();
}
if (config.languages.includes(languages_1.Language.python) &&
(0, actions_util_1.getRequiredInput)("setup-python-dependencies") === "true") {
try {
@@ -113,9 +121,14 @@ async function run() {
core.exportVariable("GOFLAGS", goFlags);
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
}
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
const codeqlRam = process.env["CODEQL_RAM"] || "6500";
core.exportVariable("CODEQL_RAM", codeqlRam);
// Limit RAM and threads for extractors. When running extractors, the CodeQL CLI obeys the
// CODEQL_RAM and CODEQL_THREADS environment variables to decide how much RAM and how many
// threads it would ask extractors to use. See help text for the "--ram" and "--threads"
// options at https://codeql.github.com/docs/codeql-cli/manual/database-trace-command/
// for details.
core.exportVariable("CODEQL_RAM", process.env["CODEQL_RAM"] ||
(0, util_1.getMemoryFlagValue)((0, actions_util_1.getOptionalInput)("ram")).toString());
core.exportVariable("CODEQL_THREADS", (0, util_1.getThreadsFlagValue)((0, actions_util_1.getOptionalInput)("threads"), logger).toString());
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", undefined);
if (tracerConfig !== undefined) {

File diff suppressed because one or more lines are too long

4
lib/init.js generated
View File

@@ -38,9 +38,9 @@ async function initCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant,
return { codeql, toolsVersion };
}
exports.initCodeQL = initCodeQL;
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
logger.startGroup("Load language configuration");
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
analysisPaths.printPathFiltersWarning(config, logger);
logger.endGroup();
return config;

View File

@@ -1 +1 @@
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAlCD,gCAkCC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC;IAEhC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;QAChE,0BAA0B;QAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,SAAS,EAChB,UAAU,EACV,WAAW,EACX,YAAY,CACb,CAAC;KACH;SAAM;QACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACvC,yBAAyB;YACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;SACH;KACF;IAED,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AA9BD,0BA8BC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,YAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,YAAY,EACZ,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA1CD,gCA0CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC;IAEhC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;QAChE,0BAA0B;QAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,SAAS,EAChB,UAAU,EACV,WAAW,EACX,YAAY,CACb,CAAC;KACH;SAAM;QACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACvC,yBAAyB;YACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;SACH;KACF;IAED,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AA9BD,0BA8BC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}

51
lib/runner.js generated
View File

@@ -18,15 +18,20 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path"));
const commander_1 = require("commander");
const del_1 = __importDefault(require("del"));
const analyze_1 = require("./analyze");
const autobuild_1 = require("./autobuild");
const codeql_1 = require("./codeql");
const config_utils_1 = require("./config-utils");
const feature_flags_1 = require("./feature-flags");
const init_1 = require("./init");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
@@ -54,11 +59,14 @@ function getToolsDir(userInput) {
return toolsDir;
}
const codeqlEnvJsonFilename = "codeql-env.json";
function loadTracerEnvironment(config) {
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
return JSON.parse(fs.readFileSync(jsonEnvFile).toString("utf-8"));
}
// Imports the environment from codeqlEnvJsonFilename if not already present
function importTracerEnvironment(config) {
if (!("ODASA_TRACER_CONFIGURATION" in process.env)) {
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
const env = JSON.parse(fs.readFileSync(jsonEnvFile).toString("utf-8"));
const env = loadTracerEnvironment(config);
for (const key of Object.keys(env)) {
process.env[key] = env[key];
}
@@ -116,6 +124,12 @@ program
.option("--debug", "Print more verbose output", false)
.option("--trace-process-name <string>", "(Advanced, windows-only) Inject a windows tracer of this process into a process with the given process name.")
.option("--trace-process-level <number>", "(Advanced, windows-only) Inject a windows tracer of this process into a parent process <number> levels up.")
.option("--ram <number>", "The amount of memory in MB that can be used by CodeQL extractors. " +
"By default, CodeQL extractors will use most of the memory available in the system. " +
'This input also sets the amount of memory that can later be used by the "analyze" command.')
.option("--threads <number>", "The number of threads that can be used by CodeQL extractors. " +
"By default, CodeQL extractors will use all the hardware threads available in the system. " +
'This input also sets the number of threads that can later be used by the "analyze" command.')
.action(async (cmd) => {
const logger = (0, logging_1.getRunnerLogger)(cmd.debug);
try {
@@ -124,7 +138,7 @@ program
const checkoutPath = cmd.checkoutPath || process.cwd();
// Wipe the temp dir
logger.info(`Cleaning temp directory ${tempDir}`);
fs.rmSync(tempDir, { recursive: true, force: true });
await (0, del_1.default)(tempDir, { force: true });
fs.mkdirSync(tempDir, { recursive: true });
const auth = await (0, util_1.getGitHubAuth)(logger, cmd.githubAuth, cmd.githubAuthStdin);
const apiDetails = {
@@ -134,6 +148,13 @@ program
};
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.runner);
// Limit RAM and threads for extractors. When running extractors, the CodeQL CLI obeys the
// CODEQL_RAM and CODEQL_THREADS environment variables to decide how much RAM and how many
// threads it would ask extractors to use. See help text for the "--ram" and "--threads"
// options at https://codeql.github.com/docs/codeql-cli/manual/database-trace-command/
// for details.
process.env["CODEQL_RAM"] = (0, util_1.getMemoryFlagValue)(cmd.ram).toString();
process.env["CODEQL_THREADS"] = (0, util_1.getThreadsFlagValue)(cmd.threads, logger).toString();
let codeql;
if (cmd.codeqlPath !== undefined) {
codeql = await (0, codeql_1.getCodeQL)(cmd.codeqlPath);
@@ -143,7 +164,7 @@ program
}
await (0, util_1.enrichEnvironment)(util_1.Mode.runner, codeql);
const workspacePath = checkoutPath;
const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, logger);
const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, false, "", "", (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
const sourceRoot = checkoutPath;
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, parseTraceProcessName(), parseTraceProcessLevel());
if (tracerConfig === undefined) {
@@ -240,10 +261,15 @@ program
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--no-upload", "Do not upload results after analysis.")
.option("--output-dir <dir>", "Directory to output SARIF files to. Default is in the temp directory.")
.option("--ram <ram>", "Amount of memory to use when running queries. Default is to use all available memory.")
.option("--ram <ram>", "The amount of memory in MB that can be used by CodeQL for database finalization and query execution. " +
'By default, this command will use the same amount of memory as previously set in the "init" command. ' +
'If the "init" command also does not have an explicit "ram" flag, this command will use most of the ' +
"memory available in the system.")
.option("--no-add-snippets", "Specify whether to include code snippets in the sarif output.")
.option("--threads <threads>", "Number of threads to use when running queries. " +
"Default is to use all available cores.")
.option("--threads <threads>", "The number of threads that can be used by CodeQL for database finalization and query execution. " +
'By default, this command will use the same number of threads as previously set in the "init" command. ' +
'If the "init" command also does not have an explicit "threads" flag, this command will use all the ' +
"hardware threads available in the system.")
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
.option("--category <category>", "String used by Code Scanning for matching the analyses.")
.option("--debug", "Print more verbose output", false)
@@ -262,8 +288,15 @@ program
url: (0, util_1.parseGitHubUrl)(cmd.githubUrl),
};
const outputDir = cmd.outputDir || path.join(config.tempDir, "codeql-sarif");
const threads = (0, util_1.getThreadsFlag)(cmd.threads, logger);
const memory = (0, util_1.getMemoryFlag)(cmd.ram);
let initEnv = {};
try {
initEnv = loadTracerEnvironment(config);
}
catch (err) {
// The init command did not generate a tracer environment file
}
const threads = (0, util_1.getThreadsFlag)(cmd.threads || initEnv["CODEQL_THREADS"], logger);
const memory = (0, util_1.getMemoryFlag)(cmd.ram || initEnv["CODEQL_RAM"]);
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
await (0, analyze_1.runQueries)(outputDir, memory, (0, util_1.getAddSnippetsFlag)(cmd.addSnippets), threads, cmd.category, config, logger);
if (!cmd.upload) {

File diff suppressed because one or more lines are too long

49
lib/testing-utils.js generated
View File

@@ -19,9 +19,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.setupActionsVars = exports.setupTests = void 0;
exports.mockFeatureFlagApiEndpoint = exports.getRecordingLogger = exports.setupActionsVars = exports.setupTests = void 0;
const github = __importStar(require("@actions/github"));
const sinon = __importStar(require("sinon"));
const apiClient = __importStar(require("./api-client"));
const CodeQL = __importStar(require("./codeql"));
const util_1 = require("./util");
function wrapOutput(context) {
// Function signature taken from Socket.write.
// Note there are two overloads:
@@ -89,4 +92,48 @@ function setupActionsVars(tempDir, toolsDir) {
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
}
exports.setupActionsVars = setupActionsVars;
function getRecordingLogger(messages) {
return {
debug: (message) => {
messages.push({ type: "debug", message });
console.debug(message);
},
info: (message) => {
messages.push({ type: "info", message });
console.info(message);
},
warning: (message) => {
messages.push({ type: "warning", message });
console.warn(message);
},
error: (message) => {
messages.push({ type: "error", message });
console.error(message);
},
isDebug: () => true,
startGroup: () => undefined,
endGroup: () => undefined,
};
}
exports.getRecordingLogger = getRecordingLogger;
/** Mock the HTTP request to the feature flags enablement API endpoint. */
function mockFeatureFlagApiEndpoint(responseStatusCode, response) {
// Passing an auth token is required, so we just use a dummy value
const client = github.getOctokit("123");
const requestSpy = sinon.stub(client, "request");
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql-action/features");
if (responseStatusCode < 300) {
optInSpy.resolves({
status: responseStatusCode,
data: response,
headers: {},
url: "GET /repos/:owner/:repo/code-scanning/codeql-action/features",
});
}
else {
optInSpy.throws(new util_1.HTTPError("some error message", responseStatusCode));
}
sinon.stub(apiClient, "getApiClient").value(() => client);
}
exports.mockFeatureFlagApiEndpoint = mockFeatureFlagApiEndpoint;
//# sourceMappingURL=testing-utils.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,6CAA+B;AAE/B,iDAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC"}
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}

12
lib/toolcache.js generated
View File

@@ -18,6 +18,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.downloadTool = exports.findAllVersions = exports.find = exports.cacheDir = exports.extractTar = void 0;
const fs = __importStar(require("fs"));
@@ -27,6 +30,7 @@ const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const io = __importStar(require("@actions/io"));
const actionsToolcache = __importStar(require("@actions/tool-cache"));
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
const del_1 = __importDefault(require("del"));
const semver = __importStar(require("semver"));
const uuid_1 = require("uuid");
const util_1 = require("./util");
@@ -123,7 +127,7 @@ async function cacheDir(sourceDir, tool, version, toolCacheDir, logger) {
throw new Error("sourceDir is not a directory");
}
// Create the tool dir
const destPath = createToolPath(tool, version, arch, toolCacheDir, logger);
const destPath = await createToolPath(tool, version, arch, toolCacheDir, logger);
// copy each child item. do not move. move can fail on Windows
// due to anti-virus software having an open handle on a file.
for (const itemName of fs.readdirSync(sourceDir)) {
@@ -232,12 +236,12 @@ function createExtractFolder(tempDir) {
}
return dest;
}
function createToolPath(tool, version, arch, toolCacheDir, logger) {
async function createToolPath(tool, version, arch, toolCacheDir, logger) {
const folderPath = path.join(toolCacheDir, tool, semver.clean(version) || version, arch || "");
logger.debug(`destination ${folderPath}`);
const markerPath = `${folderPath}.complete`;
fs.rmSync(folderPath, { recursive: true, force: true });
fs.rmSync(markerPath, { recursive: true, force: true });
await (0, del_1.default)(folderPath, { force: true });
await (0, del_1.default)(markerPath, { force: true });
fs.mkdirSync(folderPath, { recursive: true });
return folderPath;
}

File diff suppressed because one or more lines are too long

View File

@@ -44,10 +44,6 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
if (((_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout) !== undefined) {
options.listeners.stdout(data);
}
else {
// if no stdout listener was originally defined then we match default behavior of Toolrunner
process.stdout.write(data);
}
},
stderr: (data) => {
var _a;
@@ -55,10 +51,6 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
if (((_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stderr) !== undefined) {
options.listeners.stderr(data);
}
else {
// if no stderr listener was originally defined then we match default behavior of Toolrunner
process.stderr.write(data);
}
},
};
// we capture the original return code or error so that if no match is found we can duplicate the behavior

View File

@@ -1 +1 @@
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;KAC7D;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW;iBAChC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;iBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAzED,wDAyEC"}
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;KAC7D;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW;iBAChC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;iBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAnED,wDAmEC"}

18
lib/tracer-config.js generated
View File

@@ -182,15 +182,15 @@ async function getCombinedTracerConfig(config, codeql) {
tracedLanguageConfigs[language] = await getTracerConfigForLanguage(codeql, config, language);
}
mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
}
// Add a couple more variables
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
const codeQLDir = path.dirname(codeql.getPath());
if (process.platform === "darwin") {
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
}
else if (process.platform !== "win32") {
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
// Add a couple more variables
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
const codeQLDir = path.dirname(codeql.getPath());
if (process.platform === "darwin") {
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
}
else if (process.platform !== "win32") {
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
}
}
// On macos it's necessary to prefix the build command with the runner executable
// on order to trace when System Integrity Protection is enabled.

File diff suppressed because one or more lines are too long

View File

@@ -44,6 +44,9 @@ function getTestConfig(tmpDir) {
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
};
}
// A very minimal setup

File diff suppressed because one or more lines are too long

148
lib/upload-lib.js generated
View File

@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
exports.validateUniqueCategory = exports.waitForProcessing = exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const zlib_1 = __importDefault(require("zlib"));
@@ -54,25 +54,24 @@ function combineSarifFiles(sarifFiles) {
}
combinedSarif.runs.push(...sarifObject.runs);
}
return JSON.stringify(combinedSarif);
return combinedSarif;
}
exports.combineSarifFiles = combineSarifFiles;
// Populates the run.automationDetails.id field using the analysis_key and environment
// and return an updated sarif file contents.
function populateRunAutomationDetails(sarifContents, category, analysis_key, environment) {
if (analysis_key === undefined) {
return sarifContents;
}
function populateRunAutomationDetails(sarif, category, analysis_key, environment) {
const automationID = getAutomationID(category, analysis_key, environment);
const sarif = JSON.parse(sarifContents);
for (const run of sarif.runs || []) {
if (run.automationDetails === undefined) {
run.automationDetails = {
id: automationID,
};
if (automationID !== undefined) {
for (const run of sarif.runs || []) {
if (run.automationDetails === undefined) {
run.automationDetails = {
id: automationID,
};
}
}
return sarif;
}
return JSON.stringify(sarif);
return sarif;
}
exports.populateRunAutomationDetails = populateRunAutomationDetails;
function getAutomationID(category, analysis_key, environment) {
@@ -83,7 +82,11 @@ function getAutomationID(category, analysis_key, environment) {
}
return automationID;
}
return actionsUtil.computeAutomationID(analysis_key, environment);
// analysis_key is undefined for the runner.
if (analysis_key !== undefined) {
return actionsUtil.computeAutomationID(analysis_key, environment);
}
return undefined;
}
// Upload the given payload.
// If the request fails then this will retry a small number of times.
@@ -105,6 +108,7 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, logger) {
});
logger.debug(`response status: ${response.status}`);
logger.info("Successfully uploaded results");
return response.data.id;
}
// Recursively walks a directory and returns all SARIF files it finds.
// Does not follow symlinks.
@@ -243,24 +247,18 @@ exports.buildPayload = buildPayload;
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, sourceRoot, environment, gitHubVersion, apiDetails, logger) {
logger.startGroup("Uploading results");
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
if (util.isActions()) {
// This check only works on actions as env vars don't persist between calls to the runner
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
if (process.env[sentinelEnvVar]) {
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
}
// Validate that the files we were asked to upload are all valid SARIF files
for (const file of sarifFiles) {
validateSarifFileSchema(file, logger);
}
let sarifPayload = combineSarifFiles(sarifFiles);
sarifPayload = await fingerprints.addFingerprints(sarifPayload, sourceRoot, logger);
sarifPayload = populateRunAutomationDetails(sarifPayload, category, analysisKey, environment);
let sarif = combineSarifFiles(sarifFiles);
sarif = await fingerprints.addFingerprints(sarif, sourceRoot, logger);
sarif = populateRunAutomationDetails(sarif, category, analysisKey, environment);
const toolNames = util.getToolNames(sarif);
validateUniqueCategory(sarif);
const sarifPayload = JSON.stringify(sarif);
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = (0, file_url_1.default)(sourceRoot);
const toolNames = util.getToolNames(sarifPayload);
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion);
// Log some useful debug info about the info
const rawUploadSizeBytes = sarifPayload.length;
@@ -270,12 +268,102 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
const numResultInSarif = countResultsInSarif(sarifPayload);
logger.debug(`Number of results in upload: ${numResultInSarif}`);
// Make the upload
await uploadPayload(payload, repositoryNwo, apiDetails, logger);
const sarifID = await uploadPayload(payload, repositoryNwo, apiDetails, logger);
logger.endGroup();
return {
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,
num_results_in_sarif: numResultInSarif,
statusReport: {
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,
num_results_in_sarif: numResultInSarif,
},
sarifID,
};
}
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
// Waits until either the analysis is successfully processed, a processing error is reported, or STATUS_CHECK_TIMEOUT_MILLISECONDS elapses.
async function waitForProcessing(repositoryNwo, sarifID, apiDetails, logger) {
logger.startGroup("Waiting for processing to finish");
const client = api.getApiClient(apiDetails);
const statusCheckingStarted = Date.now();
// eslint-disable-next-line no-constant-condition
while (true) {
if (Date.now() >
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
break;
}
try {
const response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
sarif_id: sarifID,
});
const status = response.data.processing_status;
logger.info(`Analysis upload status is ${status}.`);
if (status === "complete") {
break;
}
else if (status === "failed") {
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
}
}
catch (e) {
if (util.isHTTPError(e)) {
switch (e.status) {
case 404:
logger.debug("Analysis is not found yet...");
break; // Note this breaks from the case statement, not the outer loop.
default:
throw e;
}
}
else {
throw e;
}
}
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
}
logger.endGroup();
}
exports.waitForProcessing = waitForProcessing;
function validateUniqueCategory(sarif) {
var _a, _b, _c;
// This check only works on actions as env vars don't persist between calls to the runner
if (util.isActions()) {
// duplicate categories are allowed in the same sarif file
// but not across multiple sarif files
const categories = {};
for (const run of sarif.runs) {
const id = (_a = run === null || run === void 0 ? void 0 : run.automationDetails) === null || _a === void 0 ? void 0 : _a.id;
const tool = (_c = (_b = run.tool) === null || _b === void 0 ? void 0 : _b.driver) === null || _c === void 0 ? void 0 : _c.name;
const category = `${sanitize(id)}_${sanitize(tool)}`;
categories[category] = { id, tool };
}
for (const [category, { id, tool }] of Object.entries(categories)) {
const sentinelEnvVar = `CODEQL_UPLOAD_SARIF_${category}`;
if (process.env[sentinelEnvVar]) {
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. " +
"The easiest fix is to specify a unique value for the `category` input. " +
`Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})`);
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
}
}
}
exports.validateUniqueCategory = validateUniqueCategory;
/**
* Santizes a string to be used as an environment variable name.
* This will replace all non-alphanumeric characters with underscores.
* There could still be some false category clashes if two uploads
* occur that differ only in their non-alphanumeric characters. This is
* unlikely.
*
* @param str the initial value to sanitize
*/
function sanitize(str) {
return (str !== null && str !== void 0 ? str : "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
}
//# sourceMappingURL=upload-lib.js.map

File diff suppressed because one or more lines are too long

102
lib/upload-lib.test.js generated
View File

@@ -98,9 +98,13 @@ ava_1.default.beforeEach(() => {
});
});
(0, ava_1.default)("populateRunAutomationDetails", (t) => {
let sarif = '{"runs": [{}]}';
let sarif = {
runs: [{}],
};
const analysisKey = ".github/workflows/codeql-analysis.yml:analyze";
let expectedSarif = '{"runs":[{"automationDetails":{"id":"language:javascript/os:linux/"}}]}';
let expectedSarif = {
runs: [{ automationDetails: { id: "language:javascript/os:linux/" } }],
};
// Category has priority over analysis_key/environment
let modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, "language:javascript/os:linux", analysisKey, '{"language": "other", "os": "other"}');
t.deepEqual(modifiedSarif, expectedSarif);
@@ -108,9 +112,99 @@ ava_1.default.beforeEach(() => {
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, "language:javascript/os:linux/", analysisKey, "");
t.deepEqual(modifiedSarif, expectedSarif);
// check that the automation details doesn't get overwritten
sarif = '{"runs":[{"automationDetails":{"id":"my_id"}}]}';
expectedSarif = '{"runs":[{"automationDetails":{"id":"my_id"}}]}';
sarif = { runs: [{ automationDetails: { id: "my_id" } }] };
expectedSarif = { runs: [{ automationDetails: { id: "my_id" } }] };
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, undefined, analysisKey, '{"os": "linux", "language": "javascript"}');
t.deepEqual(modifiedSarif, expectedSarif);
// check multiple runs
sarif = { runs: [{ automationDetails: { id: "my_id" } }, {}] };
expectedSarif = {
runs: [
{ automationDetails: { id: "my_id" } },
{
automationDetails: {
id: ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/",
},
},
],
};
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, undefined, analysisKey, '{"os": "linux", "language": "javascript"}');
t.deepEqual(modifiedSarif, expectedSarif);
});
(0, ava_1.default)("validateUniqueCategory when empty", (t) => {
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif()));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif()));
});
(0, ava_1.default)("validateUniqueCategory for automation details id", (t) => {
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("AbC")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("def")));
// Our category sanitization is not perfect. Here are some examples
// of where we see false clashes
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc/def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc@def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc_def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc def")));
// this one is fine
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc_ def")));
});
(0, ava_1.default)("validateUniqueCategory for tool name", (t) => {
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "AbC")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "def")));
// Our category sanitization is not perfect. Here are some examples
// of where we see false clashes
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc/def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc@def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc_def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc def")));
// this one is fine
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc_ def")));
});
(0, ava_1.default)("validateUniqueCategory for automation details id and tool name", (t) => {
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "abc")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc_", "def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc_", "def")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("ghi", "_jkl")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("ghi", "_jkl")));
// Our category sanitization is not perfect. Here are some examples
// of where we see false clashes
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "_")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "def__")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc_def")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("mno_", "pqr")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("mno", "_pqr")));
});
(0, ava_1.default)("validateUniqueCategory for multiple runs", (t) => {
const sarif1 = createMockSarif("abc", "def");
const sarif2 = createMockSarif("ghi", "jkl");
// duplicate categories are allowed within the same sarif file
const multiSarif = { runs: [sarif1.runs[0], sarif1.runs[0], sarif2.runs[0]] };
t.notThrows(() => uploadLib.validateUniqueCategory(multiSarif));
// should throw if there are duplicate categories in separate validations
t.throws(() => uploadLib.validateUniqueCategory(sarif1));
t.throws(() => uploadLib.validateUniqueCategory(sarif2));
});
function createMockSarif(id, tool) {
return {
runs: [
{
automationDetails: {
id,
},
tool: {
driver: {
name: tool,
},
},
},
],
};
}
//# sourceMappingURL=upload-lib.test.js.map

File diff suppressed because one or more lines are too long

View File

@@ -22,6 +22,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const util_1 = require("./util");
// eslint-disable-next-line import/no-commonjs
@@ -46,8 +47,11 @@ async function run() {
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
};
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
const uploadStats = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
await sendSuccessStatusReport(startedAt, uploadStats);
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
}
await sendSuccessStatusReport(startedAt, uploadResult.statusReport);
}
catch (error) {
const message = error instanceof Error ? error.message : String(error);

View File

@@ -1 +1 @@
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YAClE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

99
lib/util.js generated
View File

@@ -18,20 +18,37 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.GITHUB_DOTCOM_URL = void 0;
exports.checkNotWindows11 = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path"));
const core = __importStar(require("@actions/core"));
const del_1 = __importDefault(require("del"));
const semver = __importStar(require("semver"));
const api_client_1 = require("./api-client");
const apiCompatibility = __importStar(require("./api-compatibility.json"));
const codeql_1 = require("./codeql");
/**
* Specifies bundle versions that are known to be broken
* and will not be used if found in the toolcache.
*/
const BROKEN_VERSIONS = ["0.0.0-20211207"];
/**
* The URL for github.com.
*/
exports.GITHUB_DOTCOM_URL = "https://github.com";
/**
* Default name of the debugging artifact.
*/
exports.DEFAULT_DEBUG_ARTIFACT_NAME = "debug-artifacts";
/**
* Default name of the database in the debugging artifact.
*/
exports.DEFAULT_DEBUG_DATABASE_NAME = "db";
/**
* Get the extra options for the codeql commands.
*/
@@ -55,8 +72,7 @@ exports.getExtraOptionsEnvParam = getExtraOptionsEnvParam;
*
* Returns an array of unique string tool names.
*/
function getToolNames(sarifContents) {
const sarif = JSON.parse(sarifContents);
function getToolNames(sarif) {
const toolNames = {};
for (const run of sarif.runs || []) {
const tool = run.tool || {};
@@ -77,7 +93,7 @@ async function withTmpDir(body) {
const symlinkSubdir = path.join(tmpDir, "symlink");
fs.symlinkSync(realSubdir, symlinkSubdir, "dir");
const result = await body(symlinkSubdir);
fs.rmSync(tmpDir, { recursive: true, force: true });
await (0, del_1.default)(tmpDir, { force: true });
return result;
}
exports.withTmpDir = withTmpDir;
@@ -93,13 +109,13 @@ function getSystemReservedMemoryMegaBytes() {
return 1024 * (process.platform === "win32" ? 1.5 : 1);
}
/**
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
* specified, the total available memory will be used minus a threshold
* reserved for the OS.
* Get the value of the codeql `--ram` flag as configured by the `ram` input.
* If no value was specified, the total available memory will be used minus a
* threshold reserved for the OS.
*
* @returns string
* @returns {number} the amount of RAM to use, in megabytes
*/
function getMemoryFlag(userInput) {
function getMemoryFlagValue(userInput) {
let memoryToUseMegaBytes;
if (userInput) {
memoryToUseMegaBytes = Number(userInput);
@@ -113,7 +129,18 @@ function getMemoryFlag(userInput) {
const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes();
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
}
return `--ram=${Math.floor(memoryToUseMegaBytes)}`;
return Math.floor(memoryToUseMegaBytes);
}
exports.getMemoryFlagValue = getMemoryFlagValue;
/**
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
* specified, the total available memory will be used minus a threshold
* reserved for the OS.
*
* @returns string
*/
function getMemoryFlag(userInput) {
return `--ram=${getMemoryFlagValue(userInput)}`;
}
exports.getMemoryFlag = getMemoryFlag;
/**
@@ -130,14 +157,14 @@ function getAddSnippetsFlag(userInput) {
}
exports.getAddSnippetsFlag = getAddSnippetsFlag;
/**
* Get the codeql `--threads` value specified for the `threads` input.
* If no value was specified, all available threads will be used.
* Get the value of the codeql `--threads` flag specified for the `threads`
* input. If no value was specified, all available threads will be used.
*
* The value will be capped to the number of available CPUs.
*
* @returns string
* @returns {number}
*/
function getThreadsFlag(userInput, logger) {
function getThreadsFlagValue(userInput, logger) {
let numThreads;
const maxThreads = os.cpus().length;
if (userInput) {
@@ -159,7 +186,19 @@ function getThreadsFlag(userInput, logger) {
// Default to using all threads
numThreads = maxThreads;
}
return `--threads=${numThreads}`;
return numThreads;
}
exports.getThreadsFlagValue = getThreadsFlagValue;
/**
* Get the codeql `--threads` flag specified for the `threads` input.
* If no value was specified, all available threads will be used.
*
* The value will be capped to the number of available CPUs.
*
* @returns string
*/
function getThreadsFlag(userInput, logger) {
return `--threads=${getThreadsFlagValue(userInput, logger)}`;
}
exports.getThreadsFlag = getThreadsFlag;
/**
@@ -455,4 +494,34 @@ async function codeQlVersionAbove(codeql, requiredVersion) {
return semver.gte(await codeql.getVersion(), requiredVersion);
}
exports.codeQlVersionAbove = codeQlVersionAbove;
// Create a bundle for the given DB, if it doesn't already exist
async function bundleDb(config, language, codeql, dbName) {
const databasePath = getCodeQLDatabasePath(config, language);
const databaseBundlePath = path.resolve(config.dbLocation, `${dbName}.zip`);
// For a tiny bit of added safety, delete the file if it exists.
// The file is probably from an earlier call to this function, either
// as part of this action step or a previous one, but it could also be
// from somewhere else or someone trying to make the action upload a
// non-database file.
if (fs.existsSync(databaseBundlePath)) {
await (0, del_1.default)(databaseBundlePath, { force: true });
}
await codeql.databaseBundle(databasePath, databaseBundlePath, dbName);
return databaseBundlePath;
}
exports.bundleDb = bundleDb;
async function delay(milliseconds) {
return new Promise((resolve) => setTimeout(resolve, milliseconds));
}
exports.delay = delay;
function isGoodVersion(versionSpec) {
return !BROKEN_VERSIONS.includes(versionSpec);
}
exports.isGoodVersion = isGoodVersion;
function checkNotWindows11() {
if (os.platform() === "win32" && semver.gte(os.release(), "10.0.20348")) {
throw new Error("Tracing builds with CodeQL is currently not supported on Windows 11 and Windows Server 2022. Please modify your Actions workflow to use an earlier version of Windows for this job, for example by setting `runs-on: windows-2019`.");
}
}
exports.checkNotWindows11 = checkNotWindows11;
//# sourceMappingURL=util.js.map

File diff suppressed because one or more lines are too long

2
lib/util.test.js generated
View File

@@ -35,7 +35,7 @@ const util = __importStar(require("./util"));
(0, testing_utils_1.setupTests)(ava_1.default);
(0, ava_1.default)("getToolNames", (t) => {
const input = fs.readFileSync(`${__dirname}/../src/testdata/tool-names.sarif`, "utf8");
const toolNames = util.getToolNames(input);
const toolNames = util.getToolNames(JSON.parse(input));
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
});
(0, ava_1.default)("getMemoryFlag() should return the correct --ram flag", (t) => {

File diff suppressed because one or more lines are too long

2010
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

View File

@@ -1,125 +0,0 @@
# is-plain-object [![NPM version](https://img.shields.io/npm/v/is-plain-object.svg?style=flat)](https://www.npmjs.com/package/is-plain-object) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-plain-object.svg?style=flat)](https://npmjs.org/package/is-plain-object) [![NPM total downloads](https://img.shields.io/npm/dt/is-plain-object.svg?style=flat)](https://npmjs.org/package/is-plain-object) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-plain-object.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-plain-object)
> Returns true if an object was created by the `Object` constructor, or Object.create(null).
Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support.
## Install
Install with [npm](https://www.npmjs.com/):
```sh
$ npm install --save is-plain-object
```
Use [isobject](https://github.com/jonschlinkert/isobject) if you only want to check if the value is an object and not an array or null.
## Usage
with es modules
```js
import { isPlainObject } from 'is-plain-object';
```
or with commonjs
```js
const { isPlainObject } = require('is-plain-object');
```
**true** when created by the `Object` constructor, or Object.create(null).
```js
isPlainObject(Object.create({}));
//=> true
isPlainObject(Object.create(Object.prototype));
//=> true
isPlainObject({foo: 'bar'});
//=> true
isPlainObject({});
//=> true
isPlainObject(null);
//=> true
```
**false** when not created by the `Object` constructor.
```js
isPlainObject(1);
//=> false
isPlainObject(['foo', 'bar']);
//=> false
isPlainObject([]);
//=> false
isPlainObject(new Foo);
//=> false
isPlainObject(Object.create(null));
//=> false
```
## About
<details>
<summary><strong>Contributing</strong></summary>
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
</details>
<details>
<summary><strong>Running Tests</strong></summary>
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
```sh
$ npm install && npm test
```
</details>
<details>
<summary><strong>Building docs</strong></summary>
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
To generate the readme, run the following command:
```sh
$ npm install -g verbose/verb#dev verb-generate-readme && verb
```
</details>
### Related projects
You might also be interested in these projects:
* [is-number](https://www.npmjs.com/package/is-number): Returns true if a number or string value is a finite number. Useful for regex… [more](https://github.com/jonschlinkert/is-number) | [homepage](https://github.com/jonschlinkert/is-number "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.")
* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.")
* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.")
### Contributors
| **Commits** | **Contributor** |
| --- | --- |
| 19 | [jonschlinkert](https://github.com/jonschlinkert) |
| 6 | [TrySound](https://github.com/TrySound) |
| 6 | [stevenvachon](https://github.com/stevenvachon) |
| 3 | [onokumus](https://github.com/onokumus) |
| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) |
### Author
**Jon Schlinkert**
* [GitHub Profile](https://github.com/jonschlinkert)
* [Twitter Profile](https://twitter.com/jonschlinkert)
* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert)
### License
Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert).
Released under the [MIT License](LICENSE).
***
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 28, 2019._

View File

@@ -1,85 +0,0 @@
{
"name": "is-plain-object",
"description": "Returns true if an object was created by the `Object` constructor, or Object.create(null).",
"version": "5.0.0",
"homepage": "https://github.com/jonschlinkert/is-plain-object",
"author": "Jon Schlinkert (https://github.com/jonschlinkert)",
"contributors": [
"Jon Schlinkert (http://twitter.com/jonschlinkert)",
"Osman Nuri Okumuş (http://onokumus.com)",
"Steven Vachon (https://svachon.com)",
"(https://github.com/wtgtybhertgeghgtwtg)",
"Bogdan Chadkin (https://github.com/TrySound)"
],
"repository": "jonschlinkert/is-plain-object",
"bugs": {
"url": "https://github.com/jonschlinkert/is-plain-object/issues"
},
"license": "MIT",
"main": "dist/is-plain-object.js",
"module": "dist/is-plain-object.mjs",
"types": "is-plain-object.d.ts",
"files": [
"is-plain-object.d.ts",
"dist"
],
"exports": {
".": {
"import": "./dist/is-plain-object.mjs",
"require": "./dist/is-plain-object.js"
},
"./package.json": "./package.json"
},
"engines": {
"node": ">=0.10.0"
},
"scripts": {
"build": "rollup -c",
"test_browser": "mocha-headless-chrome --args=disable-web-security -f test/browser.html",
"test_node": "mocha -r esm",
"test": "npm run test_node && npm run build && npm run test_browser",
"prepare": "rollup -c"
},
"devDependencies": {
"chai": "^4.2.0",
"esm": "^3.2.22",
"gulp-format-md": "^1.0.0",
"mocha": "^6.1.4",
"mocha-headless-chrome": "^3.1.0",
"rollup": "^2.22.1"
},
"keywords": [
"check",
"is",
"is-object",
"isobject",
"javascript",
"kind",
"kind-of",
"object",
"plain",
"type",
"typeof",
"value"
],
"verb": {
"toc": false,
"layout": "default",
"tasks": [
"readme"
],
"plugins": [
"gulp-format-md"
],
"related": {
"list": [
"is-number",
"isobject",
"kind-of"
]
},
"lint": {
"reflinks": true
}
}
}

View File

@@ -1,125 +0,0 @@
# is-plain-object [![NPM version](https://img.shields.io/npm/v/is-plain-object.svg?style=flat)](https://www.npmjs.com/package/is-plain-object) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-plain-object.svg?style=flat)](https://npmjs.org/package/is-plain-object) [![NPM total downloads](https://img.shields.io/npm/dt/is-plain-object.svg?style=flat)](https://npmjs.org/package/is-plain-object) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-plain-object.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-plain-object)
> Returns true if an object was created by the `Object` constructor, or Object.create(null).
Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support.
## Install
Install with [npm](https://www.npmjs.com/):
```sh
$ npm install --save is-plain-object
```
Use [isobject](https://github.com/jonschlinkert/isobject) if you only want to check if the value is an object and not an array or null.
## Usage
with es modules
```js
import { isPlainObject } from 'is-plain-object';
```
or with commonjs
```js
const { isPlainObject } = require('is-plain-object');
```
**true** when created by the `Object` constructor, or Object.create(null).
```js
isPlainObject(Object.create({}));
//=> true
isPlainObject(Object.create(Object.prototype));
//=> true
isPlainObject({foo: 'bar'});
//=> true
isPlainObject({});
//=> true
isPlainObject(null);
//=> true
```
**false** when not created by the `Object` constructor.
```js
isPlainObject(1);
//=> false
isPlainObject(['foo', 'bar']);
//=> false
isPlainObject([]);
//=> false
isPlainObject(new Foo);
//=> false
isPlainObject(Object.create(null));
//=> false
```
## About
<details>
<summary><strong>Contributing</strong></summary>
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
</details>
<details>
<summary><strong>Running Tests</strong></summary>
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
```sh
$ npm install && npm test
```
</details>
<details>
<summary><strong>Building docs</strong></summary>
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
To generate the readme, run the following command:
```sh
$ npm install -g verbose/verb#dev verb-generate-readme && verb
```
</details>
### Related projects
You might also be interested in these projects:
* [is-number](https://www.npmjs.com/package/is-number): Returns true if a number or string value is a finite number. Useful for regex… [more](https://github.com/jonschlinkert/is-number) | [homepage](https://github.com/jonschlinkert/is-number "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.")
* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.")
* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.")
### Contributors
| **Commits** | **Contributor** |
| --- | --- |
| 19 | [jonschlinkert](https://github.com/jonschlinkert) |
| 6 | [TrySound](https://github.com/TrySound) |
| 6 | [stevenvachon](https://github.com/stevenvachon) |
| 3 | [onokumus](https://github.com/onokumus) |
| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) |
### Author
**Jon Schlinkert**
* [GitHub Profile](https://github.com/jonschlinkert)
* [Twitter Profile](https://twitter.com/jonschlinkert)
* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert)
### License
Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert).
Released under the [MIT License](LICENSE).
***
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 28, 2019._

View File

@@ -1,38 +0,0 @@
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
/*!
* is-plain-object <https://github.com/jonschlinkert/is-plain-object>
*
* Copyright (c) 2014-2017, Jon Schlinkert.
* Released under the MIT License.
*/
function isObject(o) {
return Object.prototype.toString.call(o) === '[object Object]';
}
function isPlainObject(o) {
var ctor,prot;
if (isObject(o) === false) return false;
// If has modified constructor
ctor = o.constructor;
if (ctor === undefined) return true;
// If has modified prototype
prot = ctor.prototype;
if (isObject(prot) === false) return false;
// If constructor does not have an Object-specific method
if (prot.hasOwnProperty('isPrototypeOf') === false) {
return false;
}
// Most likely a plain Object
return true;
}
exports.isPlainObject = isPlainObject;

View File

@@ -1,34 +0,0 @@
/*!
* is-plain-object <https://github.com/jonschlinkert/is-plain-object>
*
* Copyright (c) 2014-2017, Jon Schlinkert.
* Released under the MIT License.
*/
function isObject(o) {
return Object.prototype.toString.call(o) === '[object Object]';
}
function isPlainObject(o) {
var ctor,prot;
if (isObject(o) === false) return false;
// If has modified constructor
ctor = o.constructor;
if (ctor === undefined) return true;
// If has modified prototype
prot = ctor.prototype;
if (isObject(prot) === false) return false;
// If constructor does not have an Object-specific method
if (prot.hasOwnProperty('isPrototypeOf') === false) {
return false;
}
// Most likely a plain Object
return true;
}
export { isPlainObject };

View File

@@ -1 +0,0 @@
export function isPlainObject(o: any): boolean;

View File

@@ -1,85 +0,0 @@
{
"name": "is-plain-object",
"description": "Returns true if an object was created by the `Object` constructor, or Object.create(null).",
"version": "5.0.0",
"homepage": "https://github.com/jonschlinkert/is-plain-object",
"author": "Jon Schlinkert (https://github.com/jonschlinkert)",
"contributors": [
"Jon Schlinkert (http://twitter.com/jonschlinkert)",
"Osman Nuri Okumuş (http://onokumus.com)",
"Steven Vachon (https://svachon.com)",
"(https://github.com/wtgtybhertgeghgtwtg)",
"Bogdan Chadkin (https://github.com/TrySound)"
],
"repository": "jonschlinkert/is-plain-object",
"bugs": {
"url": "https://github.com/jonschlinkert/is-plain-object/issues"
},
"license": "MIT",
"main": "dist/is-plain-object.js",
"module": "dist/is-plain-object.mjs",
"types": "is-plain-object.d.ts",
"files": [
"is-plain-object.d.ts",
"dist"
],
"exports": {
".": {
"import": "./dist/is-plain-object.mjs",
"require": "./dist/is-plain-object.js"
},
"./package.json": "./package.json"
},
"engines": {
"node": ">=0.10.0"
},
"scripts": {
"build": "rollup -c",
"test_browser": "mocha-headless-chrome --args=disable-web-security -f test/browser.html",
"test_node": "mocha -r esm",
"test": "npm run test_node && npm run build && npm run test_browser",
"prepare": "rollup -c"
},
"devDependencies": {
"chai": "^4.2.0",
"esm": "^3.2.22",
"gulp-format-md": "^1.0.0",
"mocha": "^6.1.4",
"mocha-headless-chrome": "^3.1.0",
"rollup": "^2.22.1"
},
"keywords": [
"check",
"is",
"is-object",
"isobject",
"javascript",
"kind",
"kind-of",
"object",
"plain",
"type",
"typeof",
"value"
],
"verb": {
"toc": false,
"layout": "default",
"tasks": [
"readme"
],
"plugins": [
"gulp-format-md"
],
"related": {
"list": [
"is-number",
"isobject",
"kind-of"
]
},
"lint": {
"reflinks": true
}
}
}

3
node_modules/@types/del/README.md generated vendored Normal file
View File

@@ -0,0 +1,3 @@
This is a stub types definition for del (https://github.com/sindresorhus/del).
del provides its own type definitions, so you don't need @types/del installed!

14
node_modules/@types/del/package.json generated vendored Normal file
View File

@@ -0,0 +1,14 @@
{
"name": "@types/del",
"version": "4.0.0",
"typings": null,
"description": "Stub TypeScript definitions entry for del, which provides its own types definitions",
"main": "",
"scripts": {},
"author": "",
"repository": "https://github.com/sindresorhus/del",
"license": "MIT",
"dependencies": {
"del": "*"
}
}

16
node_modules/@types/events/README.md generated vendored
View File

@@ -1,16 +0,0 @@
# Installation
> `npm install --save @types/events`
# Summary
This package contains type definitions for events (https://github.com/Gozala/events).
# Details
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/events
Additional Details
* Last updated: Thu, 24 Jan 2019 03:19:08 GMT
* Dependencies: none
* Global values: none
# Credits
These definitions were written by Yasunori Ohoka <https://github.com/yasupeke>, Shenwei Wang <https://github.com/weareoutman>.

View File

@@ -1,28 +0,0 @@
// Type definitions for events 3.0
// Project: https://github.com/Gozala/events
// Definitions by: Yasunori Ohoka <https://github.com/yasupeke>
// Shenwei Wang <https://github.com/weareoutman>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
export type Listener = (...args: any[]) => void;
export class EventEmitter {
static listenerCount(emitter: EventEmitter, type: string | number): number;
static defaultMaxListeners: number;
eventNames(): Array<string | number>;
setMaxListeners(n: number): this;
getMaxListeners(): number;
emit(type: string | number, ...args: any[]): boolean;
addListener(type: string | number, listener: Listener): this;
on(type: string | number, listener: Listener): this;
once(type: string | number, listener: Listener): this;
prependListener(type: string | number, listener: Listener): this;
prependOnceListener(type: string | number, listener: Listener): this;
removeListener(type: string | number, listener: Listener): this;
off(type: string | number, listener: Listener): this;
removeAllListeners(type?: string | number): this;
listeners(type: string | number): Listener[];
listenerCount(type: string | number): number;
rawListeners(type: string | number): Listener[];
}

View File

@@ -1,28 +0,0 @@
{
"name": "@types/events",
"version": "3.0.0",
"description": "TypeScript definitions for events",
"license": "MIT",
"contributors": [
{
"name": "Yasunori Ohoka",
"url": "https://github.com/yasupeke",
"githubUsername": "yasupeke"
},
{
"name": "Shenwei Wang",
"url": "https://github.com/weareoutman",
"githubUsername": "weareoutman"
}
],
"main": "",
"types": "index",
"repository": {
"type": "git",
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git"
},
"scripts": {},
"dependencies": {},
"typesPublisherContentHash": "ae078136220837864b64cc7c1c5267ca1ceb809166fb74569e637bc7de9f2e12",
"typeScriptVersion": "2.0"
}

21
node_modules/@types/glob/LICENSE generated vendored
View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) Microsoft Corporation. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE

16
node_modules/@types/glob/README.md generated vendored
View File

@@ -1,16 +0,0 @@
# Installation
> `npm install --save @types/glob`
# Summary
This package contains type definitions for Glob (https://github.com/isaacs/node-glob).
# Details
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/glob
Additional Details
* Last updated: Thu, 27 Sep 2018 12:34:19 GMT
* Dependencies: events, minimatch, node
* Global values: none
# Credits
These definitions were written by vvakame <https://github.com/vvakame>, voy <https://github.com/voy>, Klaus Meinhardt <https://github.com/ajafff>.

87
node_modules/@types/glob/index.d.ts generated vendored
View File

@@ -1,87 +0,0 @@
// Type definitions for Glob 7.1
// Project: https://github.com/isaacs/node-glob
// Definitions by: vvakame <https://github.com/vvakame>
// voy <https://github.com/voy>
// Klaus Meinhardt <https://github.com/ajafff>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
/// <reference types="node" />
import events = require("events");
import minimatch = require("minimatch");
declare function G(pattern: string, cb: (err: Error | null, matches: string[]) => void): void;
declare function G(pattern: string, options: G.IOptions, cb: (err: Error | null, matches: string[]) => void): void;
declare namespace G {
function __promisify__(pattern: string, options?: IOptions): Promise<string[]>;
function sync(pattern: string, options?: IOptions): string[];
function hasMagic(pattern: string, options?: IOptions): boolean;
let Glob: IGlobStatic;
let GlobSync: IGlobSyncStatic;
interface IOptions extends minimatch.IOptions {
cwd?: string;
root?: string;
dot?: boolean;
nomount?: boolean;
mark?: boolean;
nosort?: boolean;
stat?: boolean;
silent?: boolean;
strict?: boolean;
cache?: { [path: string]: boolean | 'DIR' | 'FILE' | ReadonlyArray<string> };
statCache?: { [path: string]: false | { isDirectory(): boolean} | undefined };
symlinks?: { [path: string]: boolean | undefined };
realpathCache?: { [path: string]: string };
sync?: boolean;
nounique?: boolean;
nonull?: boolean;
debug?: boolean;
nobrace?: boolean;
noglobstar?: boolean;
noext?: boolean;
nocase?: boolean;
matchBase?: any;
nodir?: boolean;
ignore?: string | ReadonlyArray<string>;
follow?: boolean;
realpath?: boolean;
nonegate?: boolean;
nocomment?: boolean;
absolute?: boolean;
}
interface IGlobStatic extends events.EventEmitter {
new (pattern: string, cb?: (err: Error | null, matches: string[]) => void): IGlob;
new (pattern: string, options: IOptions, cb?: (err: Error | null, matches: string[]) => void): IGlob;
prototype: IGlob;
}
interface IGlobSyncStatic {
new (pattern: string, options?: IOptions): IGlobBase;
prototype: IGlobBase;
}
interface IGlobBase {
minimatch: minimatch.IMinimatch;
options: IOptions;
aborted: boolean;
cache: { [path: string]: boolean | 'DIR' | 'FILE' | ReadonlyArray<string> };
statCache: { [path: string]: false | { isDirectory(): boolean; } | undefined };
symlinks: { [path: string]: boolean | undefined };
realpathCache: { [path: string]: string };
found: string[];
}
interface IGlob extends IGlobBase, events.EventEmitter {
pause(): void;
resume(): void;
abort(): void;
}
}
export = G;

View File

@@ -1,36 +0,0 @@
{
"name": "@types/glob",
"version": "7.1.1",
"description": "TypeScript definitions for Glob",
"license": "MIT",
"contributors": [
{
"name": "vvakame",
"url": "https://github.com/vvakame",
"githubUsername": "vvakame"
},
{
"name": "voy",
"url": "https://github.com/voy",
"githubUsername": "voy"
},
{
"name": "Klaus Meinhardt",
"url": "https://github.com/ajafff",
"githubUsername": "ajafff"
}
],
"main": "",
"repository": {
"type": "git",
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git"
},
"scripts": {},
"dependencies": {
"@types/events": "*",
"@types/minimatch": "*",
"@types/node": "*"
},
"typesPublisherContentHash": "43019f2af91c7a4ca3453c4b806a01c521ca3008ffe1bfefd37c5f9d6135660e",
"typeScriptVersion": "2.0"
}

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) Microsoft Corporation. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE

View File

@@ -1,16 +0,0 @@
# Installation
> `npm install --save @types/minimatch`
# Summary
This package contains type definitions for Minimatch (https://github.com/isaacs/minimatch).
# Details
Files were exported from https://www.github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/minimatch
Additional Details
* Last updated: Thu, 04 Jan 2018 23:26:01 GMT
* Dependencies: none
* Global values: none
# Credits
These definitions were written by vvakame <https://github.com/vvakame>, Shant Marouti <https://github.com/shantmarouti>.

View File

@@ -1,214 +0,0 @@
// Type definitions for Minimatch 3.0
// Project: https://github.com/isaacs/minimatch
// Definitions by: vvakame <https://github.com/vvakame>
// Shant Marouti <https://github.com/shantmarouti>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
/**
* Tests a path against the pattern using the options.
*/
declare function M(target: string, pattern: string, options?: M.IOptions): boolean;
declare namespace M {
/**
* Match against the list of files, in the style of fnmatch or glob.
* If nothing is matched, and options.nonull is set,
* then return a list containing the pattern itself.
*/
function match(list: ReadonlyArray<string>, pattern: string, options?: IOptions): string[];
/**
* Returns a function that tests its supplied argument, suitable for use with Array.filter
*/
function filter(pattern: string, options?: IOptions): (element: string, indexed: number, array: ReadonlyArray<string>) => boolean;
/**
* Make a regular expression object from the pattern.
*/
function makeRe(pattern: string, options?: IOptions): RegExp;
let Minimatch: IMinimatchStatic;
interface IOptions {
/**
* Dump a ton of stuff to stderr.
*
* @default false
*/
debug?: boolean;
/**
* Do not expand {a,b} and {1..3} brace sets.
*
* @default false
*/
nobrace?: boolean;
/**
* Disable ** matching against multiple folder names.
*
* @default false
*/
noglobstar?: boolean;
/**
* Allow patterns to match filenames starting with a period,
* even if the pattern does not explicitly have a period in that spot.
*
* @default false
*/
dot?: boolean;
/**
* Disable "extglob" style patterns like +(a|b).
*
* @default false
*/
noext?: boolean;
/**
* Perform a case-insensitive match.
*
* @default false
*/
nocase?: boolean;
/**
* When a match is not found by minimatch.match,
* return a list containing the pattern itself if this option is set.
* Otherwise, an empty list is returned if there are no matches.
*
* @default false
*/
nonull?: boolean;
/**
* If set, then patterns without slashes will be matched against
* the basename of the path if it contains slashes.
*
* @default false
*/
matchBase?: boolean;
/**
* Suppress the behavior of treating #
* at the start of a pattern as a comment.
*
* @default false
*/
nocomment?: boolean;
/**
* Suppress the behavior of treating a leading ! character as negation.
*
* @default false
*/
nonegate?: boolean;
/**
* Returns from negate expressions the same as if they were not negated.
* (Ie, true on a hit, false on a miss.)
*
* @default false
*/
flipNegate?: boolean;
}
interface IMinimatchStatic {
new(pattern: string, options?: IOptions): IMinimatch;
prototype: IMinimatch;
}
interface IMinimatch {
/**
* The original pattern the minimatch object represents.
*/
pattern: string;
/**
* The options supplied to the constructor.
*/
options: IOptions;
/**
* A 2-dimensional array of regexp or string expressions.
*/
set: any[][]; // (RegExp | string)[][]
/**
* A single regular expression expressing the entire pattern.
* Created by the makeRe method.
*/
regexp: RegExp;
/**
* True if the pattern is negated.
*/
negate: boolean;
/**
* True if the pattern is a comment.
*/
comment: boolean;
/**
* True if the pattern is ""
*/
empty: boolean;
/**
* Generate the regexp member if necessary, and return it.
* Will return false if the pattern is invalid.
*/
makeRe(): RegExp; // regexp or boolean
/**
* Return true if the filename matches the pattern, or false otherwise.
*/
match(fname: string): boolean;
/**
* Take a /-split filename, and match it against a single row in the regExpSet.
* This method is mainly for internal use, but is exposed so that it can be used
* by a glob-walker that needs to avoid excessive filesystem calls.
*/
matchOne(files: string[], pattern: string[], partial: boolean): boolean;
/**
* Deprecated. For internal use.
*
* @private
*/
debug(): void;
/**
* Deprecated. For internal use.
*
* @private
*/
make(): void;
/**
* Deprecated. For internal use.
*
* @private
*/
parseNegate(): void;
/**
* Deprecated. For internal use.
*
* @private
*/
braceExpand(pattern: string, options: IOptions): void;
/**
* Deprecated. For internal use.
*
* @private
*/
parse(pattern: string, isSub?: boolean): void;
}
}
export = M;

View File

@@ -1,27 +0,0 @@
{
"name": "@types/minimatch",
"version": "3.0.3",
"description": "TypeScript definitions for Minimatch",
"license": "MIT",
"contributors": [
{
"name": "vvakame",
"url": "https://github.com/vvakame",
"githubUsername": "vvakame"
},
{
"name": "Shant Marouti",
"url": "https://github.com/shantmarouti",
"githubUsername": "shantmarouti"
}
],
"main": "",
"repository": {
"type": "git",
"url": "https://www.github.com/DefinitelyTyped/DefinitelyTyped.git"
},
"scripts": {},
"dependencies": {},
"typesPublisherContentHash": "e768e36348874adcc93ac67e9c3c7b5fcbd39079c0610ec16e410b8f851308d1",
"typeScriptVersion": "2.0"
}

0
node_modules/@types/node/LICENSE generated vendored Executable file → Normal file
View File

8
node_modules/@types/node/README.md generated vendored Executable file → Normal file
View File

@@ -5,12 +5,12 @@
This package contains type definitions for Node.js (http://nodejs.org/).
# Details
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node.
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node/v12.
### Additional Details
* Last updated: Wed, 28 Jul 2021 19:31:18 GMT
* Last updated: Wed, 21 Oct 2020 17:47:46 GMT
* Dependencies: none
* Global values: `AbortController`, `AbortSignal`, `__dirname`, `__filename`, `console`, `exports`, `gc`, `global`, `module`, `process`, `require`
* Global values: `Buffer`, `NodeJS`, `__dirname`, `__filename`, `clearImmediate`, `clearInterval`, `clearTimeout`, `console`, `exports`, `global`, `module`, `process`, `queueMicrotask`, `require`, `setImmediate`, `setInterval`, `setTimeout`
# Credits
These definitions were written by [Microsoft TypeScript](https://github.com/Microsoft), [DefinitelyTyped](https://github.com/DefinitelyTyped), [Alberto Schiabel](https://github.com/jkomyno), [Alvis HT Tang](https://github.com/alvis), [Andrew Makarov](https://github.com/r3nya), [Benjamin Toueg](https://github.com/btoueg), [Chigozirim C.](https://github.com/smac89), [David Junger](https://github.com/touffy), [Deividas Bakanas](https://github.com/DeividasBakanas), [Eugene Y. Q. Shen](https://github.com/eyqs), [Hannes Magnusson](https://github.com/Hannes-Magnusson-CK), [Hoàng Văn Khải](https://github.com/KSXGitHub), [Huw](https://github.com/hoo29), [Kelvin Jin](https://github.com/kjin), [Klaus Meinhardt](https://github.com/ajafff), [Lishude](https://github.com/islishude), [Mariusz Wiktorczyk](https://github.com/mwiktorczyk), [Mohsen Azimi](https://github.com/mohsen1), [Nicolas Even](https://github.com/n-e), [Nikita Galkin](https://github.com/galkin), [Parambir Singh](https://github.com/parambirs), [Sebastian Silbermann](https://github.com/eps1lon), [Simon Schick](https://github.com/SimonSchick), [Thomas den Hollander](https://github.com/ThomasdenH), [Wilco Bakker](https://github.com/WilcoBakker), [wwwy3y3](https://github.com/wwwy3y3), [Samuel Ainsworth](https://github.com/samuela), [Kyle Uehlein](https://github.com/kuehlein), [Thanik Bhongbhibhat](https://github.com/bhongy), [Marcin Kopacz](https://github.com/chyzwar), [Trivikram Kamat](https://github.com/trivikr), [Minh Son Nguyen](https://github.com/nguymin4), [Junxiao Shi](https://github.com/yoursunny), [Ilia Baryshnikov](https://github.com/qwelias), [ExE Boss](https://github.com/ExE-Boss), [Surasak Chaisurin](https://github.com/Ryan-Willpower), [Piotr Błażejewicz](https://github.com/peterblazejewicz), [Anna Henningsen](https://github.com/addaleax), [Jason Kwok](https://github.com/JasonHK), [Victor Perin](https://github.com/victorperin), [Yongsheng Zhang](https://github.com/ZYSzys), and [NodeJS Contributors](https://github.com/NodeJS).
These definitions were written by [Microsoft TypeScript](https://github.com/Microsoft), [DefinitelyTyped](https://github.com/DefinitelyTyped), [Alberto Schiabel](https://github.com/jkomyno), [Alexander T.](https://github.com/a-tarasyuk), [Alvis HT Tang](https://github.com/alvis), [Andrew Makarov](https://github.com/r3nya), [Benjamin Toueg](https://github.com/btoueg), [Bruno Scheufler](https://github.com/brunoscheufler), [Chigozirim C.](https://github.com/smac89), [David Junger](https://github.com/touffy), [Deividas Bakanas](https://github.com/DeividasBakanas), [Eugene Y. Q. Shen](https://github.com/eyqs), [Flarna](https://github.com/Flarna), [Hannes Magnusson](https://github.com/Hannes-Magnusson-CK), [Hoàng Văn Khải](https://github.com/KSXGitHub), [Huw](https://github.com/hoo29), [Kelvin Jin](https://github.com/kjin), [Klaus Meinhardt](https://github.com/ajafff), [Lishude](https://github.com/islishude), [Mariusz Wiktorczyk](https://github.com/mwiktorczyk), [Mohsen Azimi](https://github.com/mohsen1), [Nicolas Even](https://github.com/n-e), [Nikita Galkin](https://github.com/galkin), [Parambir Singh](https://github.com/parambirs), [Sebastian Silbermann](https://github.com/eps1lon), [Simon Schick](https://github.com/SimonSchick), [Thomas den Hollander](https://github.com/ThomasdenH), [Wilco Bakker](https://github.com/WilcoBakker), [wwwy3y3](https://github.com/wwwy3y3), [Zane Hannan AU](https://github.com/ZaneHannanAU), [Samuel Ainsworth](https://github.com/samuela), [Kyle Uehlein](https://github.com/kuehlein), [Jordi Oliveras Rovira](https://github.com/j-oliveras), [Thanik Bhongbhibhat](https://github.com/bhongy), [Marcin Kopacz](https://github.com/chyzwar), [Trivikram Kamat](https://github.com/trivikr), [Minh Son Nguyen](https://github.com/nguymin4), [Junxiao Shi](https://github.com/yoursunny), [Ilia Baryshnikov](https://github.com/qwelias), [ExE Boss](https://github.com/ExE-Boss), and [Jason Kwok](https://github.com/JasonHK).

1492
node_modules/@types/node/assert.d.ts generated vendored Executable file → Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +0,0 @@
declare module 'assert/strict' {
import { strict } from 'node:assert';
export = strict;
}
declare module 'node:assert/strict' {
import { strict } from 'node:assert';
export = strict;
}

498
node_modules/@types/node/async_hooks.d.ts generated vendored Executable file → Normal file
View File

@@ -1,47 +1,16 @@
/**
* The `async_hooks` module provides an API to track asynchronous resources. It
* can be accessed using:
*
* ```js
* const async_hooks = require('async_hooks');
* ```
* @experimental
* @see [source](https://github.com/nodejs/node/blob/v16.4.2/lib/async_hooks.js)
* Async Hooks module: https://nodejs.org/api/async_hooks.html
*/
declare module 'async_hooks' {
declare module "async_hooks" {
/**
* ```js
* const async_hooks = require('async_hooks');
*
* console.log(async_hooks.executionAsyncId()); // 1 - bootstrap
* fs.open(path, 'r', (err, fd) => {
* console.log(async_hooks.executionAsyncId()); // 6 - open()
* });
* ```
*
* The ID returned from `executionAsyncId()` is related to execution timing, not
* causality (which is covered by `triggerAsyncId()`):
*
* ```js
* const server = net.createServer((conn) => {
* // Returns the ID of the server, not of the new connection, because the
* // callback runs in the execution scope of the server's MakeCallback().
* async_hooks.executionAsyncId();
*
* }).listen(port, () => {
* // Returns the ID of a TickObject (process.nextTick()) because all
* // callbacks passed to .listen() are wrapped in a nextTick().
* async_hooks.executionAsyncId();
* });
* ```
*
* Promise contexts may not get precise `executionAsyncIds` by default.
* See the section on `promise execution tracking`.
* @since v8.1.0
* @return The `asyncId` of the current execution context. Useful to track when something calls.
* Returns the asyncId of the current execution context.
*/
function executionAsyncId(): number;
/**
* The resource representing the current execution.
* Useful to store data within the resource.
*
* Resource objects returned by `executionAsyncResource()` are most often internal
* Node.js handle objects with undocumented APIs. Using any functions or properties
* on the object is likely to crash your application and should be avoided.
@@ -49,70 +18,14 @@ declare module 'async_hooks' {
* Using `executionAsyncResource()` in the top-level execution context will
* return an empty object as there is no handle or request object to use,
* but having an object representing the top-level can be helpful.
*
* ```js
* const { open } = require('fs');
* const { executionAsyncId, executionAsyncResource } = require('async_hooks');
*
* console.log(executionAsyncId(), executionAsyncResource()); // 1 {}
* open(__filename, 'r', (err, fd) => {
* console.log(executionAsyncId(), executionAsyncResource()); // 7 FSReqWrap
* });
* ```
*
* This can be used to implement continuation local storage without the
* use of a tracking `Map` to store the metadata:
*
* ```js
* const { createServer } = require('http');
* const {
* executionAsyncId,
* executionAsyncResource,
* createHook
* } = require('async_hooks');
* const sym = Symbol('state'); // Private symbol to avoid pollution
*
* createHook({
* init(asyncId, type, triggerAsyncId, resource) {
* const cr = executionAsyncResource();
* if (cr) {
* resource[sym] = cr[sym];
* }
* }
* }).enable();
*
* const server = createServer((req, res) => {
* executionAsyncResource()[sym] = { state: req.url };
* setTimeout(function() {
* res.end(JSON.stringify(executionAsyncResource()[sym]));
* }, 100);
* }).listen(3000);
* ```
* @since v13.9.0, v12.17.0
* @return The resource representing the current execution. Useful to store data within the resource.
*/
function executionAsyncResource(): object;
/**
* ```js
* const server = net.createServer((conn) => {
* // The resource that caused (or triggered) this callback to be called
* // was that of the new connection. Thus the return value of triggerAsyncId()
* // is the asyncId of "conn".
* async_hooks.triggerAsyncId();
*
* }).listen(port, () => {
* // Even though all callbacks passed to .listen() are wrapped in a nextTick()
* // the callback itself exists because the call to the server's .listen()
* // was made. So the return value would be the ID of the server.
* async_hooks.triggerAsyncId();
* });
* ```
*
* Promise contexts may not get valid `triggerAsyncId`s by default. See
* the section on `promise execution tracking`.
* @return The ID of the resource responsible for calling the callback that is currently being executed.
* Returns the ID of the resource responsible for calling the callback that is currently being executed.
*/
function triggerAsyncId(): number;
interface HookCallbacks {
/**
* Called when a class is constructed that has the possibility to emit an asynchronous event.
@@ -122,133 +35,73 @@ declare module 'async_hooks' {
* @param resource reference to the resource representing the async operation, needs to be released during destroy
*/
init?(asyncId: number, type: string, triggerAsyncId: number, resource: object): void;
/**
* When an asynchronous operation is initiated or completes a callback is called to notify the user.
* The before callback is called just before said callback is executed.
* @param asyncId the unique identifier assigned to the resource about to execute the callback.
*/
before?(asyncId: number): void;
/**
* Called immediately after the callback specified in before is completed.
* @param asyncId the unique identifier assigned to the resource which has executed the callback.
*/
after?(asyncId: number): void;
/**
* Called when a promise has resolve() called. This may not be in the same execution id
* as the promise itself.
* @param asyncId the unique id for the promise that was resolve()d.
*/
promiseResolve?(asyncId: number): void;
/**
* Called after the resource corresponding to asyncId is destroyed
* @param asyncId a unique ID for the async resource
*/
destroy?(asyncId: number): void;
}
interface AsyncHook {
/**
* Enable the callbacks for a given AsyncHook instance. If no callbacks are provided enabling is a noop.
*/
enable(): this;
/**
* Disable the callbacks for a given AsyncHook instance from the global pool of AsyncHook callbacks to be executed. Once a hook has been disabled it will not be called again until enabled.
*/
disable(): this;
}
/**
* Registers functions to be called for different lifetime events of each async
* operation.
*
* The callbacks `init()`/`before()`/`after()`/`destroy()` are called for the
* respective asynchronous event during a resource's lifetime.
*
* All callbacks are optional. For example, if only resource cleanup needs to
* be tracked, then only the `destroy` callback needs to be passed. The
* specifics of all functions that can be passed to `callbacks` is in the `Hook Callbacks` section.
*
* ```js
* const async_hooks = require('async_hooks');
*
* const asyncHook = async_hooks.createHook({
* init(asyncId, type, triggerAsyncId, resource) { },
* destroy(asyncId) { }
* });
* ```
*
* The callbacks will be inherited via the prototype chain:
*
* ```js
* class MyAsyncCallbacks {
* init(asyncId, type, triggerAsyncId, resource) { }
* destroy(asyncId) {}
* }
*
* class MyAddedCallbacks extends MyAsyncCallbacks {
* before(asyncId) { }
* after(asyncId) { }
* }
*
* const asyncHook = async_hooks.createHook(new MyAddedCallbacks());
* ```
*
* Because promises are asynchronous resources whose lifecycle is tracked
* via the async hooks mechanism, the `init()`, `before()`, `after()`, and`destroy()` callbacks _must not_ be async functions that return promises.
* @since v8.1.0
* @param callbacks The `Hook Callbacks` to register
* @return Instance used for disabling and enabling hooks
* Registers functions to be called for different lifetime events of each async operation.
* @param options the callbacks to register
* @return an AsyncHooks instance used for disabling and enabling hooks
*/
function createHook(callbacks: HookCallbacks): AsyncHook;
function createHook(options: HookCallbacks): AsyncHook;
interface AsyncResourceOptions {
/**
* The ID of the execution context that created this async event.
* @default executionAsyncId()
*/
triggerAsyncId?: number | undefined;
/**
* Disables automatic `emitDestroy` when the object is garbage collected.
* This usually does not need to be set (even if `emitDestroy` is called
* manually), unless the resource's `asyncId` is retrieved and the
* sensitive API's `emitDestroy` is called with it.
* @default false
*/
requireManualDestroy?: boolean | undefined;
/**
* The ID of the execution context that created this async event.
* Default: `executionAsyncId()`
*/
triggerAsyncId?: number;
/**
* Disables automatic `emitDestroy` when the object is garbage collected.
* This usually does not need to be set (even if `emitDestroy` is called
* manually), unless the resource's `asyncId` is retrieved and the
* sensitive API's `emitDestroy` is called with it.
* Default: `false`
*/
requireManualDestroy?: boolean;
}
/**
* The class `AsyncResource` is designed to be extended by the embedder's async
* resources. Using this, users can easily trigger the lifetime events of their
* own resources.
*
* The `init` hook will trigger when an `AsyncResource` is instantiated.
*
* The following is an overview of the `AsyncResource` API.
*
* ```js
* const { AsyncResource, executionAsyncId } = require('async_hooks');
*
* // AsyncResource() is meant to be extended. Instantiating a
* // new AsyncResource() also triggers init. If triggerAsyncId is omitted then
* // async_hook.executionAsyncId() is used.
* const asyncResource = new AsyncResource(
* type, { triggerAsyncId: executionAsyncId(), requireManualDestroy: false }
* );
*
* // Run a function in the execution context of the resource. This will
* // * establish the context of the resource
* // * trigger the AsyncHooks before callbacks
* // * call the provided function `fn` with the supplied arguments
* // * trigger the AsyncHooks after callbacks
* // * restore the original execution context
* asyncResource.runInAsyncScope(fn, thisArg, ...args);
*
* // Call AsyncHooks destroy callbacks.
* asyncResource.emitDestroy();
*
* // Return the unique ID assigned to the AsyncResource instance.
* asyncResource.asyncId();
*
* // Return the trigger ID for the AsyncResource instance.
* asyncResource.triggerAsyncId();
* ```
* The class AsyncResource was designed to be extended by the embedder's async resources.
* Using this users can easily trigger the lifetime events of their own resources.
*/
class AsyncResource {
/**
@@ -260,238 +113,135 @@ declare module 'async_hooks' {
* this async event (default: `executionAsyncId()`), or an
* AsyncResourceOptions object (since 9.3)
*/
constructor(type: string, triggerAsyncId?: number | AsyncResourceOptions);
constructor(type: string, triggerAsyncId?: number|AsyncResourceOptions);
/**
* Binds the given function to the current execution context.
*
* The returned function will have an `asyncResource` property referencing
* the `AsyncResource` to which the function is bound.
* @since v14.8.0, v12.19.0
* @param fn The function to bind to the current execution context.
* @param type An optional name to associate with the underlying `AsyncResource`.
*/
static bind<Func extends (this: ThisArg, ...args: any[]) => any, ThisArg>(
fn: Func,
type?: string,
thisArg?: ThisArg
): Func & {
asyncResource: AsyncResource;
};
/**
* Binds the given function to execute to this `AsyncResource`'s scope.
*
* The returned function will have an `asyncResource` property referencing
* the `AsyncResource` to which the function is bound.
* @since v14.8.0, v12.19.0
* @param fn The function to bind to the current `AsyncResource`.
*/
bind<Func extends (...args: any[]) => any>(
fn: Func
): Func & {
asyncResource: AsyncResource;
};
/**
* Call the provided function with the provided arguments in the execution context
* of the async resource. This will establish the context, trigger the AsyncHooks
* before callbacks, call the function, trigger the AsyncHooks after callbacks, and
* then restore the original execution context.
* @since v9.6.0
* @param fn The function to call in the execution context of this async resource.
* Call the provided function with the provided arguments in the
* execution context of the async resource. This will establish the
* context, trigger the AsyncHooks before callbacks, call the function,
* trigger the AsyncHooks after callbacks, and then restore the original
* execution context.
* @param fn The function to call in the execution context of this
* async resource.
* @param thisArg The receiver to be used for the function call.
* @param args Optional arguments to pass to the function.
*/
runInAsyncScope<This, Result>(fn: (this: This, ...args: any[]) => Result, thisArg?: This, ...args: any[]): Result;
/**
* Call all `destroy` hooks. This should only ever be called once. An error will
* be thrown if it is called more than once. This **must** be manually called. If
* the resource is left to be collected by the GC then the `destroy` hooks will
* never be called.
* @return A reference to `asyncResource`.
* Call AsyncHooks destroy callbacks.
*/
emitDestroy(): this;
emitDestroy(): void;
/**
* @return The unique `asyncId` assigned to the resource.
* @return the unique ID assigned to this AsyncResource instance.
*/
asyncId(): number;
/**
*
* @return The same `triggerAsyncId` that is passed to the `AsyncResource` constructor.
* @return the trigger ID for this AsyncResource instance.
*/
triggerAsyncId(): number;
}
/**
* This class creates stores that stay coherent through asynchronous operations.
*
* While you can create your own implementation on top of the `async_hooks` module,`AsyncLocalStorage` should be preferred as it is a performant and memory safe
* implementation that involves significant optimizations that are non-obvious to
* implement.
*
* The following example uses `AsyncLocalStorage` to build a simple logger
* that assigns IDs to incoming HTTP requests and includes them in messages
* logged within each request.
*
* ```js
* const http = require('http');
* const { AsyncLocalStorage } = require('async_hooks');
*
* const asyncLocalStorage = new AsyncLocalStorage();
*
* function logWithId(msg) {
* const id = asyncLocalStorage.getStore();
* console.log(`${id !== undefined ? id : '-'}:`, msg);
* }
*
* let idSeq = 0;
* http.createServer((req, res) => {
* asyncLocalStorage.run(idSeq++, () => {
* logWithId('start');
* // Imagine any chain of async operations here
* setImmediate(() => {
* logWithId('finish');
* res.end();
* });
* });
* }).listen(8080);
*
* http.get('http://localhost:8080');
* http.get('http://localhost:8080');
* // Prints:
* // 0: start
* // 1: start
* // 0: finish
* // 1: finish
* ```
*
* Each instance of `AsyncLocalStorage` maintains an independent storage context.
* Multiple instances can safely exist simultaneously without risk of interfering
* with each other data.
* @since v13.10.0, v12.17.0
* When having multiple instances of `AsyncLocalStorage`, they are independent
* from each other. It is safe to instantiate this class multiple times.
*/
class AsyncLocalStorage<T> {
/**
* Disables the instance of `AsyncLocalStorage`. All subsequent calls
* to `asyncLocalStorage.getStore()` will return `undefined` until`asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()` is called again.
* This method disables the instance of `AsyncLocalStorage`. All subsequent calls
* to `asyncLocalStorage.getStore()` will return `undefined` until
* `asyncLocalStorage.run()` or `asyncLocalStorage.runSyncAndReturn()`
* is called again.
*
* When calling `asyncLocalStorage.disable()`, all current contexts linked to the
* instance will be exited.
*
* Calling `asyncLocalStorage.disable()` is required before the`asyncLocalStorage` can be garbage collected. This does not apply to stores
* Calling `asyncLocalStorage.disable()` is required before the
* `asyncLocalStorage` can be garbage collected. This does not apply to stores
* provided by the `asyncLocalStorage`, as those objects are garbage collected
* along with the corresponding async resources.
*
* Use this method when the `asyncLocalStorage` is not in use anymore
* This method is to be used when the `asyncLocalStorage` is not in use anymore
* in the current process.
* @since v13.10.0, v12.17.0
* @experimental
*/
disable(): void;
/**
* Returns the current store.
* If called outside of an asynchronous context initialized by
* calling `asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()`, it
* returns `undefined`.
* @since v13.10.0, v12.17.0
* This method returns the current store.
* If this method is called outside of an asynchronous context initialized by
* calling `asyncLocalStorage.run` or `asyncLocalStorage.runAndReturn`, it will
* return `undefined`.
*/
getStore(): T | undefined;
/**
* Runs a function synchronously within a context and returns its
* Calling `asyncLocalStorage.run(callback)` will create a new asynchronous
* context.
* Within the callback function and the asynchronous operations from the callback,
* `asyncLocalStorage.getStore()` will return an instance of `Map` known as
* "the store". This store will be persistent through the following
* asynchronous calls.
*
* The callback will be ran asynchronously. Optionally, arguments can be passed
* to the function. They will be passed to the callback function.
*
* If an error is thrown by the callback function, it will not be caught by
* a `try/catch` block as the callback is ran in a new asynchronous resource.
* Also, the stacktrace will be impacted by the asynchronous call.
*/
// TODO: Apply generic vararg once available
run(store: T, callback: (...args: any[]) => void, ...args: any[]): void;
/**
* Calling `asyncLocalStorage.exit(callback)` will create a new asynchronous
* context.
* Within the callback function and the asynchronous operations from the callback,
* `asyncLocalStorage.getStore()` will return `undefined`.
*
* The callback will be ran asynchronously. Optionally, arguments can be passed
* to the function. They will be passed to the callback function.
*
* If an error is thrown by the callback function, it will not be caught by
* a `try/catch` block as the callback is ran in a new asynchronous resource.
* Also, the stacktrace will be impacted by the asynchronous call.
*/
exit(callback: (...args: any[]) => void, ...args: any[]): void;
/**
* This methods runs a function synchronously within a context and return its
* return value. The store is not accessible outside of the callback function or
* the asynchronous operations created within the callback.
*
* The optional `args` are passed to the callback function.
* Optionally, arguments can be passed to the function. They will be passed to
* the callback function.
*
* If the callback function throws an error, the error is thrown by `run()` too.
* The stacktrace is not impacted by this call and the context is exited.
*
* Example:
*
* ```js
* const store = { id: 2 };
* try {
* asyncLocalStorage.run(store, () => {
* asyncLocalStorage.getStore(); // Returns the store object
* throw new Error();
* });
* } catch (e) {
* asyncLocalStorage.getStore(); // Returns undefined
* // The error will be caught here
* }
* ```
* @since v13.10.0, v12.17.0
* If the callback function throws an error, it will be thrown by
* `runSyncAndReturn` too. The stacktrace will not be impacted by this call and
* the context will be exited.
*/
run<R, TArgs extends any[]>(store: T, callback: (...args: TArgs) => R, ...args: TArgs): R;
runSyncAndReturn<R>(store: T, callback: (...args: any[]) => R, ...args: any[]): R;
/**
* Runs a function synchronously outside of a context and returns its
* This methods runs a function synchronously outside of a context and return its
* return value. The store is not accessible within the callback function or
* the asynchronous operations created within the callback. Any `getStore()`call done within the callback function will always return `undefined`.
* the asynchronous operations created within the callback.
*
* The optional `args` are passed to the callback function.
* Optionally, arguments can be passed to the function. They will be passed to
* the callback function.
*
* If the callback function throws an error, the error is thrown by `exit()` too.
* The stacktrace is not impacted by this call and the context is re-entered.
*
* Example:
*
* ```js
* // Within a call to run
* try {
* asyncLocalStorage.getStore(); // Returns the store object or value
* asyncLocalStorage.exit(() => {
* asyncLocalStorage.getStore(); // Returns undefined
* throw new Error();
* });
* } catch (e) {
* asyncLocalStorage.getStore(); // Returns the same object or value
* // The error will be caught here
* }
* ```
* @since v13.10.0, v12.17.0
* @experimental
* If the callback function throws an error, it will be thrown by
* `exitSyncAndReturn` too. The stacktrace will not be impacted by this call and
* the context will be re-entered.
*/
exit<R, TArgs extends any[]>(callback: (...args: TArgs) => R, ...args: TArgs): R;
exitSyncAndReturn<R>(callback: (...args: any[]) => R, ...args: any[]): R;
/**
* Transitions into the context for the remainder of the current
* synchronous execution and then persists the store through any following
* asynchronous calls.
*
* Example:
*
* ```js
* const store = { id: 1 };
* // Replaces previous store with the given store object
* asyncLocalStorage.enterWith(store);
* asyncLocalStorage.getStore(); // Returns the store object
* someAsyncOperation(() => {
* asyncLocalStorage.getStore(); // Returns the same object
* });
* ```
*
* This transition will continue for the _entire_ synchronous execution.
* This means that if, for example, the context is entered within an event
* handler subsequent event handlers will also run within that context unless
* specifically bound to another context with an `AsyncResource`. That is why`run()` should be preferred over `enterWith()` unless there are strong reasons
* to use the latter method.
*
* ```js
* const store = { id: 1 };
*
* emitter.on('my-event', () => {
* asyncLocalStorage.enterWith(store);
* });
* emitter.on('my-event', () => {
* asyncLocalStorage.getStore(); // Returns the same object
* });
*
* asyncLocalStorage.getStore(); // Returns undefined
* emitter.emit('my-event');
* asyncLocalStorage.getStore(); // Returns the same object
* ```
* @since v13.11.0, v12.17.0
* @experimental
* Calling `asyncLocalStorage.enterWith(store)` will transition into the context
* for the remainder of the current synchronous execution and will persist
* through any following asynchronous calls.
*/
enterWith(store: T): void;
}
}
declare module 'node:async_hooks' {
export * from 'async_hooks';
}

Some files were not shown because too many files have changed in this diff Show More