Compare commits

...

229 Commits

Author SHA1 Message Date
Chuan-kai Lin
2b46439dd5 Merge pull request #913 from github/update-v1.0.32-4eb03fb6
Merge main into v1
2022-02-07 10:21:10 -08:00
github-actions[bot]
6d8390b7cb 1.0.32 2022-02-07 17:21:16 +00:00
Henry Mercer
4eb03fb6f3 Merge pull request #907 from github/henrymercer/report-ml-powered-query-enablement
Report ML-powered query enablement in the `init` status report
2022-02-07 17:16:25 +00:00
Henry Mercer
03c64ef07d Add more documentation for ML-powered JS queries status report
Also be more explicit about which version strings are reportable in
the code.
2022-02-07 16:46:53 +00:00
Henry Mercer
cc622a02a9 Merge branch 'main' into henrymercer/report-ml-powered-query-enablement 2022-02-07 14:39:20 +00:00
Henry Mercer
c95a3d854c Limit cardinality of ML-powered JS queries status report
Some platforms that ingest this status report charge based on the
cardinality of the fields, so here we restrict the version strings we
support to a fixed set.
2022-02-07 14:36:40 +00:00
Henry Mercer
f888be73ce Nit: Simplify code with optional chaining 2022-02-07 14:24:40 +00:00
Chuan-kai Lin
16d4068732 Merge pull request #911 from github/cklin/codeql-cli-2.8.0
Update default CodeQL version to 2.8.0
2022-02-04 13:03:16 -08:00
Chuan-kai Lin
aab545260e Update default CodeQL version to 2.8.0 2022-02-04 11:24:40 -08:00
Henry Mercer
501fe7ff12 Update getMlPoweredJsQueriesStatus doc 2022-02-04 17:16:25 +00:00
Henry Mercer
ad40e4a8f8 Merge branch 'main' into henrymercer/report-ml-powered-query-enablement 2022-02-04 16:38:18 +00:00
Henry Mercer
537b2f873a Add "multiple" report for ML-powered JS query enablement
When multiple ML-powered JS packs are in scope (an unsupported
scenario), the status report is "multiple".
2022-02-04 16:37:26 +00:00
Henry Mercer
9f32fc9b9d Only add ML-powered queries pack if the user didn't manually request it 2022-02-04 16:34:17 +00:00
Thomas Horstmeyer
904d0acf90 Merge pull request #858 from github/use-better-base-sha
Declare the merge base as base for code scanning comparisons
2022-02-04 12:37:55 +00:00
Thomas Horstmeyer
9b14aa7c84 Merge branch 'main' into use-better-base-sha 2022-02-04 12:04:41 +00:00
Henry Mercer
1cddec9558 Add ML-powered queries enablement to init status report
We report this information in the `init` status report rather than the
`analyze` status report so we can gather data about timeouts.
2022-02-03 16:29:28 +00:00
Henry Mercer
a005206838 Convert status report comments to documentation 2022-02-03 11:52:49 +00:00
Henry Mercer
ff33f031e8 Merge pull request #822 from github/dependabot/npm_and_yarn/ava/typescript-3.0.1
Upgrade AVA to v4
2022-02-02 18:01:27 +00:00
Henry Mercer
6a6a3203dd Merge branch 'main' into dependabot/npm_and_yarn/ava/typescript-3.0.1 2022-02-02 17:24:20 +00:00
Henry Mercer
942b34d547 Merge pull request #906 from github/aibaars/fix-typo
Fix typo in error message
2022-02-02 16:25:19 +00:00
Thomas Horstmeyer
3469c69bba Merge branch 'main' into use-better-base-sha 2022-02-02 13:52:13 +00:00
Arthur Baars
d57c2761c9 Fix typo in error message 2022-02-02 13:51:48 +01:00
Henry Mercer
6081b90eae Merge pull request #905 from github/henrymercer/fix-changelog
Fix changelog
2022-02-02 12:02:44 +00:00
Henry Mercer
78eb2c9c00 Fix changelog
Move a couple of entries for #889 that should have been in the
unreleased section but were inadvertently moved into the 1.0.31
release.
2022-02-02 11:13:24 +00:00
Andrew Eisenberg
b2af0740e4 Remove security-events: write from tests
This is not necessary.
2022-02-01 19:17:15 -08:00
Andrew Eisenberg
77194581b7 Merge pull request #904 from cw-acroteau/add-ref-input
Add ref input
2022-02-01 19:15:33 -08:00
cw-acroteau
1a5b604256 Merge branch 'main' into add-ref-input 2022-02-01 20:28:42 -05:00
cw-acroteau
9bfa05fb03 Merge pull request #3 from aeisenberg/aeisenberg/add-ref-input
Update the description of new inputs
2022-02-01 17:19:33 -05:00
Henry Mercer
e9aa623c5d Merge branch 'main' into dependabot/npm_and_yarn/ava/typescript-3.0.1 2022-02-01 18:57:02 +00:00
Henry Mercer
ce89f1b611 Upgrade Ava to v4 2022-02-01 18:56:42 +00:00
Andrew Eisenberg
57f34a1569 Merge pull request #902 from github/aeisenberg/permissions
Add a permissions block for generated workflows
2022-02-01 10:54:05 -08:00
Andrew Eisenberg
941e382c65 Merge branch 'main' into aeisenberg/permissions 2022-02-01 10:37:13 -08:00
Andrew Eisenberg
6c6b8c33c4 Update the description of new inputs
These new inputs will not work on pull requests from forks.
2022-02-01 10:34:45 -08:00
Andrew Eisenberg
13f97c81fe Merge branch 'aeisenberg/permissions' into add-ref-input 2022-02-01 10:31:14 -08:00
Andrew Eisenberg
36419a79c1 Avoid sending status reports in test mode 2022-02-01 10:12:35 -08:00
github-actions[bot]
9a40cc5274 Update checked-in dependencies 2022-02-01 17:46:08 +00:00
Thomas Horstmeyer
ec0b3ae7ff remove some debug info 2022-02-01 15:39:11 +00:00
Thomas Horstmeyer
e836f97769 Detect merge base as base_sha for upload 2022-02-01 15:38:43 +00:00
dependabot[bot]
11639426e6 Bump @ava/typescript from 2.0.0 to 3.0.1
Bumps [@ava/typescript](https://github.com/avajs/typescript) from 2.0.0 to 3.0.1.
- [Release notes](https://github.com/avajs/typescript/releases)
- [Commits](https://github.com/avajs/typescript/compare/v2.0.0...v3.0.1)

---
updated-dependencies:
- dependency-name: "@ava/typescript"
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-02-01 13:33:29 +00:00
Henry Mercer
a9da9fc959 Merge pull request #901 from github/dependabot/npm_and_yarn/long-5.2.0
Bump long from 4.0.0 to 5.2.0
2022-02-01 13:31:41 +00:00
Henry Mercer
ae8741006b Merge branch 'main' into dependabot/npm_and_yarn/long-5.2.0 2022-02-01 11:11:25 +00:00
Alex Croteau
72f9a88bc5 Regenerates test workflows 2022-01-31 20:07:55 -05:00
Alex Croteau
9f36b75178 Splits integration tests 2022-01-31 20:06:18 -05:00
Alex Croteau
dfe2bc4e49 Changes to commit hash in main branch 2022-01-31 20:06:18 -05:00
Alex Croteau
63d0c7856c Fixes integration tests referred repo 2022-01-31 20:06:18 -05:00
Alex Croteau
3cc87990f0 Updates javascript files 2022-01-31 20:06:18 -05:00
Alex Croteau
260b4d5dc4 Fixes integration test 2022-01-31 20:06:18 -05:00
Alex Croteau
1bfa9ace60 Adds integration test and fixes linting 2022-01-31 20:06:18 -05:00
Alex Croteau
1eaaf07b91 Adds check on inputs and compiled files 2022-01-31 20:06:17 -05:00
Alex Croteau
5916f9896d Applies recomendation in upload-sarif/action.yml 2022-01-31 20:06:17 -05:00
cw-acroteau
0dd4dbf9d9 Apply documentation suggestions from code review
Co-authored-by: Andrew Eisenberg <aeisenberg@github.com>
2022-01-31 20:06:17 -05:00
Alex Croteau
980fd4ed38 Adds ref and SHA as inputs, and sarif-id as output 2022-01-31 20:06:17 -05:00
Andrew Eisenberg
e9aa2c6f62 Add a permissions block for generated workflows
Ensure that all workflows are able to write security events.
2022-01-31 16:11:00 -08:00
Andrew Eisenberg
e9d52340a3 Merge pull request #900 from github/mergeback/v1.0.31-to-main-1a927e93
Mergeback v1.0.31 refs/heads/v1 into main
2022-01-31 13:36:54 -08:00
github-actions[bot]
392931027a Update checked-in dependencies 2022-01-31 18:36:27 +00:00
dependabot[bot]
ba2b46d53e Bump long from 4.0.0 to 5.2.0
Bumps [long](https://github.com/dcodeIO/long.js) from 4.0.0 to 5.2.0.
- [Release notes](https://github.com/dcodeIO/long.js/releases)
- [Commits](https://github.com/dcodeIO/long.js/compare/4.0.0...v5.2.0)

---
updated-dependencies:
- dependency-name: long
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-01-31 18:32:15 +00:00
github-actions[bot]
a602dbedb7 Update checked-in dependencies 2022-01-31 17:42:59 +00:00
Andrew Eisenberg
66777347f6 Merge branch 'main' into mergeback/v1.0.31-to-main-1a927e93 2022-01-31 08:42:20 -08:00
github-actions[bot]
ce6e94b1b8 1.0.32 2022-01-31 16:41:57 +00:00
github-actions[bot]
30790fe430 Update changelog and version after v1.0.31 2022-01-31 16:41:55 +00:00
Andrew Eisenberg
1a927e9307 Merge pull request #899 from github/update-v1.0.31-3a741b6c
Merge main into v1
2022-01-31 08:40:43 -08:00
Henry Mercer
ea2ef8554d Merge pull request #893 from github/dependabot/npm_and_yarn/trim-off-newlines-1.0.3
Bump trim-off-newlines from 1.0.2 to 1.0.3
2022-01-31 16:26:49 +00:00
github-actions[bot]
bbb9c53c65 1.0.31 2022-01-31 16:08:02 +00:00
Henry Mercer
9664bae55e Merge branch 'main' into dependabot/npm_and_yarn/trim-off-newlines-1.0.3 2022-01-31 15:59:28 +00:00
Henry Mercer
3a741b6cf5 Merge pull request #892 from github/dependabot/npm_and_yarn/node-fetch-2.6.7
Bump node-fetch from 2.6.1 to 2.6.7
2022-01-31 15:48:18 +00:00
Henry Mercer
1498bce660 Merge branch 'main' into dependabot/npm_and_yarn/node-fetch-2.6.7 2022-01-31 14:51:48 +00:00
Henry Mercer
c260382f36 Merge pull request #894 from github/dependabot/npm_and_yarn/sinon-13.0.0
Bump sinon from 11.1.2 to 13.0.0
2022-01-31 14:51:33 +00:00
github-actions[bot]
6410c0691e Update checked-in dependencies 2022-01-29 01:49:46 +00:00
github-actions[bot]
8e07ec6ce2 Update checked-in dependencies 2022-01-29 01:33:45 +00:00
github-actions[bot]
c337e5f2f3 Update checked-in dependencies 2022-01-29 01:21:58 +00:00
dependabot[bot]
9673b562d9 Bump sinon from 11.1.2 to 13.0.0
Bumps [sinon](https://github.com/sinonjs/sinon) from 11.1.2 to 13.0.0.
- [Release notes](https://github.com/sinonjs/sinon/releases)
- [Changelog](https://github.com/sinonjs/sinon/blob/master/docs/changelog.md)
- [Commits](https://github.com/sinonjs/sinon/compare/v11.1.2...v13.0.0)

---
updated-dependencies:
- dependency-name: sinon
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-01-29 01:16:14 +00:00
dependabot[bot]
253bc84963 Bump trim-off-newlines from 1.0.2 to 1.0.3
Bumps [trim-off-newlines](https://github.com/stevemao/trim-off-newlines) from 1.0.2 to 1.0.3.
- [Release notes](https://github.com/stevemao/trim-off-newlines/releases)
- [Commits](https://github.com/stevemao/trim-off-newlines/compare/v1.0.2...v1.0.3)

---
updated-dependencies:
- dependency-name: trim-off-newlines
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-01-29 01:15:48 +00:00
dependabot[bot]
bc4366e948 Bump node-fetch from 2.6.1 to 2.6.7
Bumps [node-fetch](https://github.com/node-fetch/node-fetch) from 2.6.1 to 2.6.7.
- [Release notes](https://github.com/node-fetch/node-fetch/releases)
- [Commits](https://github.com/node-fetch/node-fetch/compare/v2.6.1...v2.6.7)

---
updated-dependencies:
- dependency-name: node-fetch
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-01-29 01:15:47 +00:00
Andrew Eisenberg
3b831aafd9 Merge pull request #885 from github/dependabot/npm_and_yarn/runner/node-fetch-2.6.7
Bump node-fetch from 2.6.1 to 2.6.7 in /runner
2022-01-28 17:15:15 -08:00
Andrew Eisenberg
14b1fecf33 Merge branch 'main' into dependabot/npm_and_yarn/runner/node-fetch-2.6.7 2022-01-28 15:42:46 -08:00
Robin Neatherway
5e23536180 Merge pull request #887 from github/rneatherway/content-type
Set contentType for database uploads
2022-01-25 18:40:57 +00:00
Robin Neatherway
5f30e2466f Merge branch 'rneatherway/content-type' of github.com:github/codeql-action into rneatherway/content-type 2022-01-25 17:17:21 +00:00
Andrew Eisenberg
05981c5829 Merge branch 'main' into dependabot/npm_and_yarn/runner/node-fetch-2.6.7 2022-01-25 08:57:02 -08:00
Robin Neatherway
486633d442 Try string literal key 2022-01-25 16:33:09 +00:00
Robin Neatherway
d6360c9075 Merge branch 'main' into rneatherway/content-type 2022-01-25 15:21:13 +00:00
Robin Neatherway
d3a0787934 Merge pull request #886 from github/rneatherway/remove-old-upload-path
Remove old upload path
2022-01-25 14:08:16 +00:00
Robin Neatherway
e13c8bbfb7 Merge branch 'main' into rneatherway/remove-old-upload-path 2022-01-25 12:36:23 +00:00
Andrew Eisenberg
1c9a1f5d01 Merge pull request #888 from github/aeisenberg/remove-experiemental-message
Remove `experimental` warning message for custom packs
2022-01-24 17:03:09 -08:00
Andrew Eisenberg
1f7dab4ba2 Merge branch 'main' into aeisenberg/remove-experiemental-message 2022-01-24 13:30:45 -08:00
Andrew Eisenberg
8a36468d11 Merge pull request #882 from github/aeisenberg/better-error
Ensure loadApiError is caught
2022-01-24 12:05:09 -08:00
Andrew Eisenberg
f8c38c1af3 Update changelog 2022-01-24 09:54:17 -08:00
Robin Neatherway
10249d1591 Update tests to remove feature flag 2022-01-24 17:53:09 +00:00
Andrew Eisenberg
e6bcd71529 Remove experimental warning message for custom packs 2022-01-24 09:40:46 -08:00
Andrew Eisenberg
806fc12eb2 Reword changelog entry and add back test 2022-01-24 09:25:52 -08:00
Andrew Eisenberg
ba352d365b Merge branch 'main' into aeisenberg/better-error 2022-01-24 08:56:14 -08:00
Robin Neatherway
751af2a9e3 Set contentType for database uploads 2022-01-24 15:54:46 +00:00
Robin Neatherway
1a686e7d76 Remove old upload path
The `useUploadDomain` approach is now fully enabled
2022-01-24 15:47:08 +00:00
dependabot[bot]
0479586f61 Bump node-fetch from 2.6.1 to 2.6.7 in /runner
Bumps [node-fetch](https://github.com/node-fetch/node-fetch) from 2.6.1 to 2.6.7.
- [Release notes](https://github.com/node-fetch/node-fetch/releases)
- [Changelog](https://github.com/node-fetch/node-fetch/blob/main/docs/CHANGELOG.md)
- [Commits](https://github.com/node-fetch/node-fetch/compare/v2.6.1...v2.6.7)

---
updated-dependencies:
- dependency-name: node-fetch
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-01-24 13:34:40 +00:00
Edoardo Pirovano
708446c6e4 Merge pull request #884 from github/mergeback/v1.0.30-to-main-8b37404d
Mergeback v1.0.30 refs/heads/v1 into main
2022-01-24 13:33:55 +00:00
github-actions[bot]
392316b555 Update checked-in dependencies 2022-01-24 13:16:43 +00:00
github-actions[bot]
5604c0a3ad 1.0.31 2022-01-24 13:01:42 +00:00
github-actions[bot]
7d8ca8fa93 Update changelog and version after v1.0.30 2022-01-24 13:01:40 +00:00
Edoardo Pirovano
8b37404d56 Merge pull request #883 from github/update-v1.0.30-a7adbce2
Merge main into v1
2022-01-24 13:00:42 +00:00
github-actions[bot]
c180f23bb1 1.0.30 2022-01-24 11:30:41 +00:00
Edoardo Pirovano
a7adbce22c Merge pull request #877 from github/codeql-cli-2.7.6
Update default CodeQL version to 2.7.6
2022-01-24 11:18:52 +00:00
Edoardo Pirovano
3b4e4d44dc Update default CodeQL version to 2.7.6 2022-01-24 09:45:48 +00:00
Andrew Eisenberg
f18151cc59 Update error message and remove feature flag preloading
Discussion here https://github.com/github/codeql-action/pull/882#discussion_r789924177
shows that properly handling preloading feature flag errors is complex
and the benefit we get from it does not offset the complexity.
2022-01-21 11:20:48 -08:00
Andrew Eisenberg
e175dea369 Update changelog 2022-01-21 10:05:52 -08:00
Andrew Eisenberg
752ae5743f Ensure loadApiError is caught
And add a better error message.

By using `void` instead of `await`, any error thrown is not caught
by surrounding try-catch blocks.

I could continue to use `void` and explicitly handle any thrown errors
by using `.catch`, but most likely the time savings is minimal and
this makes the code more complex.
2022-01-21 10:04:08 -08:00
Andrew Eisenberg
0dabead789 Merge pull request #876 from github/aeisenberg/multi-init
Include better error message
2022-01-21 08:42:06 -08:00
Andrew Eisenberg
5e69ce82f8 Merge branch 'main' into aeisenberg/multi-init 2022-01-21 08:04:13 -08:00
Edoardo Pirovano
fdb92bbffe Merge pull request #881 from github/edoardo/respect-env
Respect extra options in a few `codeql` calls
2022-01-21 14:02:21 +00:00
Edoardo Pirovano
14b4839253 Respect extra options in a few codeql calls 2022-01-21 13:44:52 +00:00
Edoardo Pirovano
d76304cd8e Merge pull request #880 from github/mergeback/v1.0.29-to-main-384cfc42
Mergeback v1.0.29 refs/heads/v1 into main
2022-01-21 11:53:08 +00:00
github-actions[bot]
57c4e974c2 Update checked-in dependencies 2022-01-21 11:23:59 +00:00
github-actions[bot]
c2fb041dc1 1.0.30 2022-01-21 11:12:16 +00:00
github-actions[bot]
ac9d34fbc6 Update changelog and version after v1.0.29 2022-01-21 11:12:14 +00:00
Edoardo Pirovano
384cfc42b2 Merge pull request #879 from github/update-v1.0.29-67c0353a
Merge main into v1
2022-01-21 11:11:07 +00:00
github-actions[bot]
5a1e31dc6a 1.0.29 2022-01-21 10:47:33 +00:00
Edoardo Pirovano
67c0353a8c Merge pull request #878 from github/revert-wait-for-processing
Revert "Start waiting for processing by default."
2022-01-21 10:45:30 +00:00
Chris Gavin
7ec25e02e3 Add a changelog note. 2022-01-21 10:26:18 +00:00
Chris Gavin
713eacdf6c Revert "Start waiting for processing by default."
This reverts commit b661ef1697.
2022-01-21 10:25:57 +00:00
Andrew Eisenberg
c8290d07f0 Update changelog
Add a line item for using a better error message.

Also, add a link for an entry that didn't have one before.
2022-01-20 10:31:43 -08:00
Andrew Eisenberg
51126e5cd1 Include better error message
When users call init multiple times.
2022-01-20 10:28:11 -08:00
Edoardo Pirovano
c0b507e521 Merge pull request #875 from github/mergeback/v1.0.28-to-main-8a4b243f
Mergeback v1.0.28 refs/heads/v1 into main
2022-01-18 22:21:27 +00:00
github-actions[bot]
d563b098d7 Update checked-in dependencies 2022-01-18 21:54:31 +00:00
github-actions[bot]
fca047627b 1.0.29 2022-01-18 21:35:32 +00:00
github-actions[bot]
28fe8e7028 Update changelog and version after v1.0.28 2022-01-18 21:35:30 +00:00
Andrew Eisenberg
8a4b243fbf Merge pull request #874 from github/update-v1.0.28-b31df3ff
Merge main into v1
2022-01-18 13:33:47 -08:00
Andrew Eisenberg
19970ae6b5 Update changelog
Add an entry and move an entry.
2022-01-18 12:44:43 -08:00
github-actions[bot]
ec1b16574e 1.0.28 2022-01-18 19:13:38 +00:00
Henning Makholm
b31df3ff95 Merge pull request #866 from github/hmakholm/pr/2.7.5
Bump CodeQL version to 2.7.5
2022-01-17 19:45:58 +01:00
Henning Makholm
776db51d2e Merge remote-tracking branch 'origin/main' into hmakholm/pr/2.7.5 2022-01-17 18:27:39 +01:00
Andrew Eisenberg
b886234637 Merge pull request #872 from github/aeisenberg/category-with-tool
Change category uniqueness test
2022-01-17 09:19:39 -08:00
Henning Makholm
9913c9bfa5 Merge remote-tracking branch 'origin/main' into hmakholm/pr/2.7.5 2022-01-17 18:06:10 +01:00
Andrew Eisenberg
8de62beb50 Merge branch 'main' into aeisenberg/category-with-tool 2022-01-17 09:00:14 -08:00
Andrew Eisenberg
b6fbccaba1 Merge pull request #873 from github/nickrolfe/ruby
Update warning about interpreted languages to mention Ruby
2022-01-14 09:05:50 -08:00
Nick Rolfe
df0c306daf Update warning about interpreted languages to mention Ruby 2022-01-14 11:57:29 +00:00
Andrew Eisenberg
ab1f709732 Allow duplicate categories in the same validation step
A single SARIF file should be allowed to have duplicated
categories.
2022-01-13 10:35:03 -08:00
Andrew Eisenberg
8454e21c9c Change category uniqueness test
Turboscan only allows a single combination of tool name and automation
details id for testing category uniqueness.

Previously, the check in the action was not entirely correct since it
only looked at the _category_ and not the combination of the category
and the tool name.

It's even more precise now since it is looking at the actual, computed
value of the automation details id, rather than an inputted value of
the category.

This change also includes a refactoring where the action is now avoiding
multiple parsing/stringifying of the sarif files. Instead, sarif is
parsed once at the start of the process and stringified once, after
sarif processing is completely finished.
2022-01-12 15:26:34 -08:00
Henning Makholm
d85c3e58ec Bump CodeQL version to 2.7.5 2022-01-12 19:36:20 +01:00
Edoardo Pirovano
cbabe47a0b Merge pull request #871 from github/mergeback/v1.0.27-to-main-cd783c8a
Mergeback v1.0.27 refs/heads/v1 into main
2022-01-11 22:22:02 +00:00
github-actions[bot]
f8a48f464d Update checked-in dependencies 2022-01-11 21:57:25 +00:00
github-actions[bot]
f6f23f8671 1.0.28 2022-01-11 21:43:35 +00:00
github-actions[bot]
c2a7379048 Update changelog and version after v1.0.27 2022-01-11 21:43:33 +00:00
Edoardo Pirovano
cd783c8a29 Merge pull request #870 from github/update-v1.0.27-faa9ba73
Merge main into v1
2022-01-11 21:42:44 +00:00
github-actions[bot]
300c8b6dcb 1.0.27 2022-01-11 20:35:30 +00:00
Edoardo Pirovano
faa9ba7363 Merge pull request #869 from github/edoardo/windows-11-error
Refuse to run on Windows 11
2022-01-11 20:34:11 +00:00
Edoardo Pirovano
d2a0fc83dc Refuse to run on Windows 11 2022-01-11 18:34:33 +00:00
Edoardo Pirovano
71112ab35d Merge pull request #868 from edoardopirovano/debug-artifact-name
Make name of debugging artifact and DB within it configurable
2022-01-07 17:54:41 +00:00
Edoardo Pirovano
e677af3fd0 Make name of debugging artifact and DB within it configurable 2022-01-07 15:10:26 +00:00
Henry Mercer
848e5140d4 Merge pull request #857 from github/henrymercer/ml-powered-queries
Add support for running ML-powered queries for JS `security-extended` behind `ml_powered_queries` feature flag
2022-01-06 17:55:06 +00:00
Henry Mercer
e7fe6da378 Allow patch version of ML-powered queries pack to be bumped 2022-01-06 11:58:03 +00:00
Henry Mercer
2159631658 Only run ML-powered queries with v2.7.5 or newer of the CLI 2022-01-06 11:58:03 +00:00
Henry Mercer
9de1702400 Document use of redundant feature flag API call 2022-01-06 11:58:02 +00:00
Henry Mercer
efded22908 Bump the version of the ATM query pack to 0.0.2 2022-01-06 11:57:33 +00:00
Henry Mercer
5602bd50bf Test loading of ML-powered queries 2022-01-06 11:57:33 +00:00
Henry Mercer
2f4be8e34b Run ML-powered queries for JS security-extended behind feature flag 2022-01-06 11:57:33 +00:00
Edoardo Pirovano
9763bdd6ec Merge pull request #860 from edoardopirovano/always-upload-db
Always upload DB when in debug mode
2022-01-04 18:25:33 +00:00
Edoardo Pirovano
00d4d60204 Always upload DB when in debug mode 2022-01-04 16:49:31 +00:00
Edoardo Pirovano
e5d84de18b Merge pull request #861 from github/remove-debug-output
Remove debugging output
2022-01-02 10:05:46 +00:00
Edoardo Pirovano
ea1acc573a Merge branch 'main' into remove-debug-output 2022-01-02 09:41:49 +00:00
Edoardo Pirovano
79ea6d6a7c Merge pull request #862 from github/aeisenberg/fix-python-tests
Force virtualenv version
2022-01-02 09:41:38 +00:00
Andrew Eisenberg
3e50d096f8 Force virtualenv version
Force the virtualenv version to be 20.11 or less.
The 20.12 version is failing for python 2 right now.
2022-01-01 19:13:10 -08:00
Edoardo Pirovano
cca1cfdacf Remove debugging output 2021-12-31 16:32:08 +00:00
Edoardo Pirovano
cdea582765 Merge pull request #859 from github/update-supported-enterprise-server-versions
Update supported GitHub Enterprise Server versions.
2021-12-29 09:39:28 +00:00
GitHub
3e59dee9e2 Update supported GitHub Enterprise Server versions. 2021-12-29 00:07:19 +00:00
Henry Mercer
249c7ffce1 Merge pull request #856 from github/henrymercer/feature-flagging
Feature flagging via the GitHub API
2021-12-16 16:18:46 +00:00
Henry Mercer
254816c2d2 Stub feature flag API endpoint in tests 2021-12-16 13:39:18 +00:00
Henry Mercer
6d62c245ec Represent feature flags using an enum
Replaces the previous string literal type
2021-12-16 13:38:34 +00:00
Henry Mercer
5e87034b3b Explicitly pass repository to feature flags constructor
As suggested in review: The `GITHUB_REPOSITORY` environment variable is
only available on Actions. Passing it in explicitly avoids potentially
crashing if this code is called from the runner.
2021-12-15 17:03:43 +00:00
Henry Mercer
621e0794ac Throw an error if the feature flag API request errors 2021-12-15 16:34:26 +00:00
Henry Mercer
d6499fad61 Use new feature flag architecture when uploading databases 2021-12-15 13:17:05 +00:00
Henry Mercer
04671efa1d Add support for feature flagging via the GitHub API 2021-12-15 13:16:33 +00:00
Andrew Eisenberg
e1f05902cd Merge pull request #855 from github/correct-changelog
Move changelog entry into correct place
2021-12-14 11:06:40 -08:00
Thomas Horstmeyer
f9e96fa857 Move changelog entry into correct place 2021-12-14 13:49:52 +00:00
Andrew Eisenberg
14a5537e13 Merge pull request #853 from github/aeisenberg/remove-scheduled
Remove scheduled releases
2021-12-13 19:14:37 -08:00
Andrew Eisenberg
d3eb4974a3 Merge branch 'main' into aeisenberg/remove-scheduled 2021-12-13 18:37:38 -08:00
Edoardo Pirovano
39216d10d3 Merge pull request #854 from edoardopirovano/remove-dotnet-restore
Remove `dotnet restore` calls from CI checks
2021-12-13 23:29:58 +00:00
Edoardo Pirovano
265a7db16a Remove dotnet restore calls from CI checks 2021-12-13 18:04:46 +00:00
Andrew Eisenberg
f623d4cec3 Remove scheduled releases
The action will be released roughly every two weeks along
with the CodeQL CLI release process.
2021-12-13 09:34:53 -08:00
Chris Gavin
eacec3646a Merge pull request #843 from github/wait-for-processing-by-default
Start waiting for processing by default.
2021-12-13 12:19:11 +00:00
Chris Gavin
e0e2abc1a5 Merge branch 'main' into wait-for-processing-by-default 2021-12-13 12:04:22 +00:00
Chris Gavin
716b5980cd Merge pull request #844 from github/duplicated-output
Stop printing all output twice.
2021-12-13 11:57:11 +00:00
Chris Gavin
1d83f2a0bc Merge branch 'main' into duplicated-output 2021-12-13 11:44:22 +00:00
Andrew Eisenberg
ce77f88627 Merge pull request #849 from github/mergeback/v1.0.26-to-main-5f532563
Mergeback v1.0.26 refs/heads/v1 into main
2021-12-10 13:41:53 -08:00
github-actions[bot]
a777b51ef7 Update checked-in dependencies 2021-12-10 19:09:21 +00:00
github-actions[bot]
88fbabe21d 1.0.27 2021-12-10 18:40:13 +00:00
github-actions[bot]
eeb215b041 Update changelog and version after v1.0.26 2021-12-10 18:40:11 +00:00
Andrew Eisenberg
5f53256358 Merge pull request #848 from github/update-v1.0.26-07825549
Merge main into v1
2021-12-10 10:39:01 -08:00
github-actions[bot]
25a5103778 1.0.26 2021-12-10 18:20:01 +00:00
Edoardo Pirovano
0782554948 Merge pull request #845 from github/refuse-broken-versions
Refuse to use broken versions in the toolcache
2021-12-09 16:53:57 +00:00
Edoardo Pirovano
705f634a1d Refuse to use broken versions in the toolcache 2021-12-09 13:43:57 +00:00
Chris Gavin
b7b7607959 Stop printing all output twice. 2021-12-09 13:21:32 +00:00
Chris Gavin
7bcc6564d4 Add more context to a change note.
Co-authored-by: Thomas Horstmeyer <73262256+cannist@users.noreply.github.com>
2021-12-09 12:53:20 +00:00
Chris Gavin
b661ef1697 Start waiting for processing by default. 2021-12-09 10:21:34 +00:00
Andrew Eisenberg
6ad00fd084 Merge pull request #842 from github/aeisenberg/cli-2.7.3
Prepare for the CodeQL 2.7.3 release
2021-12-08 17:46:32 -08:00
Andrew Eisenberg
fccdee04ba Prepare for the CodeQL 2.7.3 release 2021-12-08 17:18:05 -08:00
Andrew Eisenberg
e694ca6192 Merge pull request #841 from github/aeisenberg/fix-del
Always use `force: true` for del
2021-12-08 16:01:45 -08:00
Andrew Eisenberg
67d11b5928 Always use force: true for del 2021-12-08 15:37:43 -08:00
Aditya Sharad
924a64d2e0 Merge pull request #840 from github/aeisenberg/fix-rm
Remove rmDir references
2021-12-08 15:08:58 -08:00
Andrew Eisenberg
45dc27d3c1 Remove rmDir references
`rmDir` is not available on the node version used by the actions runner.

Instead, use the `del` package. It is safe, well-tested, and
cross-platform.
2021-12-08 12:11:31 -08:00
Andrew Eisenberg
cbed0358c6 Merge pull request #839 from github/revert-837-aeisenberg/cli-2.7.3
Revert "Bump default CodeQL version to 2.7.3"
2021-12-08 10:58:53 -08:00
Andrew Eisenberg
a8cf6f42c2 Revert "Bump default CodeQL version to 2.7.3" 2021-12-08 10:07:10 -08:00
Andrew Eisenberg
eebe7c46f1 Merge pull request #837 from github/aeisenberg/cli-2.7.3
Bump default CodeQL version to 2.7.3
2021-12-08 09:02:37 -08:00
Edoardo Pirovano
dc32d5448f Add a workflow step to do dotnet restore 2021-12-08 14:47:42 +00:00
Andrew Eisenberg
fac22de4f9 Autobuild: Prefix invocations with CODEQL_RUNNER
Co-authored-by: Aditya Sharad <6874315+adityasharad@users.noreply.github.com>
2021-12-07 20:50:17 -08:00
Andrew Eisenberg
0a1efd7f45 Update changelog 2021-12-07 10:50:12 -08:00
Andrew Eisenberg
043e3deaeb Bump default CodeQL version to 2.7.3 2021-12-07 10:45:17 -08:00
Edoardo Pirovano
0dbcb55617 Merge pull request #835 from github/mergeback/v1.0.25-to-main-546b30f3
Mergeback v1.0.25 refs/heads/v1 into main
2021-12-06 16:23:57 +00:00
github-actions[bot]
00c59b98ce Update checked-in dependencies 2021-12-06 15:25:38 +00:00
github-actions[bot]
7069ada3ed 1.0.26 2021-12-06 15:14:23 +00:00
github-actions[bot]
dd1f9a96d8 Update changelog and version after v1.0.25 2021-12-06 15:14:20 +00:00
Robert
546b30f35a Merge pull request #834 from github/update-v1.0.25-f44219c9
Merge main into v1
2021-12-06 15:13:18 +00:00
github-actions[bot]
d1dde03d7a 1.0.25 2021-12-06 14:42:24 +00:00
Robert
f44219c94b Merge pull request #832 from github/robertbrignull/upload_domain
Upload using uploads.github.com if enabled for that repository
2021-12-06 10:24:27 +00:00
Robert
bdaac951f7 Merge branch 'main' into robertbrignull/upload_domain 2021-12-06 09:59:11 +00:00
Robert
a82f53a364 Merge pull request #833 from github/robertbrignull/fix-dotnet
Deal with new dotnet version
2021-12-06 09:58:58 +00:00
Robert
f721f011bf Add call to dotnet restore 2021-12-06 09:43:07 +00:00
Robert
c82e09aa41 Delete bundled db before recreating 2021-12-01 12:25:57 +00:00
Robert
460d053698 Upload using uploads.github.com if enabled for that repository 2021-12-01 12:13:46 +00:00
Edoardo Pirovano
3bf14e85d8 Merge pull request #829 from github/mergeback/v1.0.24-to-main-e095058b
Mergeback v1.0.24 refs/heads/v1 into main
2021-11-23 11:32:20 +00:00
github-actions[bot]
13a9d6c442 Update checked-in dependencies 2021-11-23 11:12:31 +00:00
github-actions[bot]
dd65833ab6 1.0.25 2021-11-23 10:59:43 +00:00
github-actions[bot]
c2d9e4b48f Update changelog and version after v1.0.24 2021-11-23 10:59:41 +00:00
Edoardo Pirovano
e095058bfa Merge pull request #828 from github/update-v1.0.24-0b242db7
Merge main into v1
2021-11-23 10:58:34 +00:00
Edoardo Pirovano
2c99f99c4a Merge branch 'v1' into update-v1.0.24-0b242db7 2021-11-23 09:54:57 +00:00
github-actions[bot]
bcd7e6896f 1.0.24 2021-11-23 09:52:25 +00:00
Edoardo Pirovano
0b242db78f Merge pull request #827 from github/2.7.2-release
Bump default CodeQL version to 2.7.2
2021-11-22 15:17:04 +00:00
Edoardo Pirovano
c897659213 Add CHANGELOG note for new bundle version 2021-11-22 13:11:20 +00:00
Edoardo Pirovano
8b902e1723 Bump default CodeQL version to 2.7.2 2021-11-22 13:09:42 +00:00
Henning Makholm
a627e9fa50 Merge pull request #818 from github/update-v1.0.23-2ecc17d7
Merge main into v1
2021-11-16 20:56:34 +01:00
Henning Makholm
160021fe53 fix changelog 2021-11-16 20:39:37 +01:00
github-actions[bot]
3f2269bf58 1.0.23 2021-11-16 18:48:44 +00:00
1382 changed files with 53422 additions and 130030 deletions

View File

@@ -10,7 +10,8 @@
"plugin:@typescript-eslint/recommended", "plugin:@typescript-eslint/recommended",
"plugin:@typescript-eslint/recommended-requiring-type-checking", "plugin:@typescript-eslint/recommended-requiring-type-checking",
"plugin:github/recommended", "plugin:github/recommended",
"plugin:github/typescript" "plugin:github/typescript",
"plugin:import/typescript"
], ],
"rules": { "rules": {
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"], "filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],

View File

@@ -28,7 +28,6 @@ runs:
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz" echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz"
elif [[ ${{ inputs.version }} == *"stable"* ]]; then elif [[ ${{ inputs.version }} == *"stable"* ]]; then
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'` export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
echo "Hello $VERSION"
echo "::set-output name=tools-url::https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz" echo "::set-output name=tools-url::https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz"
elif [[ ${{ inputs.version }} == "latest" ]]; then elif [[ ${{ inputs.version }} == "latest" ]]; then
echo "::set-output name=tools-url::latest" echo "::set-output name=tools-url::latest"

63
.github/workflows/__analyze-ref-input.yml generated vendored Normal file
View File

@@ -0,0 +1,63 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: "PR Check - Analyze: 'ref' and 'sha' from inputs"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- v1
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
analyze-ref-input:
strategy:
matrix:
version:
- stable-20201028
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os:
- ubuntu-latest
- macos-latest
- windows-latest
name: "Analyze: 'ref' and 'sha' from inputs"
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
env:
TEST_MODE: true
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -45,6 +45,8 @@ jobs:
with: with:
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}
debug: true debug: true
debug-artifact-name: my-debug-artifacts
debug-database-name: my-db
- name: Build code - name: Build code
shell: bash shell: bash
run: ./build.sh run: ./build.sh
@@ -52,7 +54,7 @@ jobs:
id: analysis id: analysis
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: debug-artifacts-${{ matrix.os }}-${{ matrix.version }} name: my-debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
- shell: bash - shell: bash
run: | run: |
LANGUAGES="cpp csharp go java javascript python" LANGUAGES="cpp csharp go java javascript python"
@@ -62,7 +64,7 @@ jobs:
echo "Missing a SARIF file for $language" echo "Missing a SARIF file for $language"
exit 1 exit 1
fi fi
if [[ ! -f "$language.zip" ]] ; then if [[ ! -f "my-db-$language.zip" ]] ; then
echo "Missing a database bundle for $language" echo "Missing a database bundle for $language"
exit 1 exit 1
fi fi

View File

@@ -48,7 +48,7 @@ jobs:
with: with:
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python languages: cpp,csharp,java,javascript,python
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }} github.sha }}
- name: Build code - name: Build code
shell: bash shell: bash

70
.github/workflows/__upload-ref-sha-input.yml generated vendored Normal file
View File

@@ -0,0 +1,70 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: "PR Check - Upload-sarif: 'ref' and 'sha' from inputs"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- v1
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
upload-ref-sha-input:
strategy:
matrix:
version:
- stable-20201028
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os:
- ubuntu-latest
- macos-latest
- windows-latest
name: "Upload-sarif: 'ref' and 'sha' from inputs"
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
upload: false
env:
TEST_MODE: true
- uses: ./../action/upload-sarif
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
env:
TEST_MODE: true
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -1,7 +1,5 @@
name: Update release branch name: Update release branch
on: on:
schedule:
- cron: 0 9 * * 1
repository_dispatch: repository_dispatch:
# Example of how to trigger this: # Example of how to trigger this:
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}' # curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'

2
.gitignore vendored
View File

@@ -1,2 +1,4 @@
/runner/dist/ /runner/dist/
/runner/node_modules/ /runner/node_modules/
# Ignore for example failing-tests.json from AVA
node_modules/.cache

View File

@@ -1,9 +1,46 @@
# CodeQL Action and CodeQL Runner Changelog # CodeQL Action and CodeQL Runner Changelog
## [UNRELEASED] ## 1.0.32 - 07 Feb 2022
- Add `sarif-id` as an output for the `upload-sarif` and `analyze` actions. [#889](https://github.com/github/codeql-action/pull/889)
- Add `ref` and `sha` inputs to the `analyze` action, which override the defaults provided by the GitHub Action context. [#889](https://github.com/github/codeql-action/pull/889)
- Update default CodeQL bundle version to 2.8.0. [#911](https://github.com/github/codeql-action/pull/911)
## 1.0.31 - 31 Jan 2022
- Remove `experimental` message when using custom CodeQL packages. [#888](https://github.com/github/codeql-action/pull/888)
- Add a better warning message stating that experimental features will be disabled if the workflow has been triggered by a pull request from a fork or the `security-events: write` permission is not present. [#882](https://github.com/github/codeql-action/pull/882)
## 1.0.30 - 24 Jan 2022
- Display a better error message when encountering a workflow that runs the `codeql-action/init` action multiple times. [#876](https://github.com/github/codeql-action/pull/876)
- Update default CodeQL bundle version to 2.7.6. [#877](https://github.com/github/codeql-action/pull/877)
## 1.0.29 - 21 Jan 2022
- The feature to wait for SARIF processing to complete after upload has been disabled by default due to a bug in its interaction with pull requests from forks.
## 1.0.28 - 18 Jan 2022
- Update default CodeQL bundle version to 2.7.5. [#866](https://github.com/github/codeql-action/pull/866)
- Fix a bug where SARIF files were failing upload due to an invalid test for unique categories. [#872](https://github.com/github/codeql-action/pull/872)
## 1.0.27 - 11 Jan 2022
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`. [#855](https://github.com/github/codeql-action/pull/855)
## 1.0.26 - 10 Dec 2021
- Update default CodeQL bundle version to 2.7.3. [#842](https://github.com/github/codeql-action/pull/842)
## 1.0.25 - 06 Dec 2021
No user facing changes. No user facing changes.
## 1.0.24 - 23 Nov 2021
- Update default CodeQL bundle version to 2.7.2. [#827](https://github.com/github/codeql-action/pull/827)
## 1.0.23 - 16 Nov 2021 ## 1.0.23 - 16 Nov 2021
- The `upload-sarif` action now allows multiple uploads in a single job, as long as they have different categories. [#801](https://github.com/github/codeql-action/pull/801) - The `upload-sarif` action now allows multiple uploads in a single job, as long as they have different categories. [#801](https://github.com/github/codeql-action/pull/801)

View File

@@ -45,6 +45,12 @@ inputs:
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file." description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
required: false required: false
default: ${{ github.workspace }} default: ${{ github.workspace }}
ref:
description: "The ref where results will be uploaded. If not provided, the Action will use the GITHUB_REF environment variable. If provided, the sha input must be provided as well. This input is not available in pull requests from forks."
required: false
sha:
description: "The sha of the HEAD of the ref where results will be uploaded. If not provided, the Action will use the GITHUB_SHA environment variable. If provided, the ref input must be provided as well. This input is not available in pull requests from forks."
required: false
category: category:
description: String used by Code Scanning for matching the analyses description: String used by Code Scanning for matching the analyses
required: false required: false
@@ -63,6 +69,8 @@ inputs:
outputs: outputs:
db-locations: db-locations:
description: A map from language to absolute path for each database created by CodeQL. description: A map from language to absolute path for each database created by CodeQL.
sarif-id:
description: The ID of the uploaded SARIF file.
runs: runs:
using: "node12" using: "node12"
main: "../lib/analyze-action.js" main: "../lib/analyze-action.js"

View File

@@ -59,6 +59,16 @@ inputs:
description: Enable debugging mode. This will result in more output being produced which may be useful when debugging certain issues. description: Enable debugging mode. This will result in more output being produced which may be useful when debugging certain issues.
required: false required: false
default: 'false' default: 'false'
debug-artifact-name:
description: >-
The name of the artifact to store debugging information in.
This is only used when debug mode is enabled.
required: false
debug-database-name:
description: >-
The name of the database uploaded to the debugging artifact.
This is only used when debug mode is enabled.
required: false
outputs: outputs:
codeql-path: codeql-path:
description: The path of the CodeQL binary used for analysis description: The path of the CodeQL binary used for analysis

86
lib/actions-util.js generated
View File

@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0; exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const core = __importStar(require("@actions/core")); const core = __importStar(require("@actions/core"));
@@ -51,10 +51,10 @@ exports.getRequiredInput = getRequiredInput;
* This allows us to get stronger type checking of required/optional inputs * This allows us to get stronger type checking of required/optional inputs
* and make behaviour more consistent between actions and the runner. * and make behaviour more consistent between actions and the runner.
*/ */
function getOptionalInput(name) { const getOptionalInput = function (name) {
const value = core.getInput(name); const value = core.getInput(name);
return value.length > 0 ? value : undefined; return value.length > 0 ? value : undefined;
} };
exports.getOptionalInput = getOptionalInput; exports.getOptionalInput = getOptionalInput;
function getTemporaryDirectory() { function getTemporaryDirectory() {
const value = process.env["CODEQL_ACTION_TEMP"]; const value = process.env["CODEQL_ACTION_TEMP"];
@@ -97,12 +97,61 @@ const getCommitOid = async function (ref = "HEAD") {
return commitOid.trim(); return commitOid.trim();
} }
catch (e) { catch (e) {
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`); core.info(`Failed to call git to get current commit. Continuing with data from environment or input: ${e}`);
core.info(e.stack || "NO STACK"); core.info(e.stack || "NO STACK");
return (0, util_1.getRequiredEnvParam)("GITHUB_SHA"); return (0, exports.getOptionalInput)("sha") || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
} }
}; };
exports.getCommitOid = getCommitOid; exports.getCommitOid = getCommitOid;
/**
* If the action was triggered by a pull request, determine the commit sha of the merge base.
* Returns undefined if run by other triggers or the merge base cannot be determined.
*/
const determineMergeBaseCommitOid = async function () {
if (process.env.GITHUB_EVENT_NAME !== "pull_request") {
return undefined;
}
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
try {
let commitOid = "";
let baseOid = "";
let headOid = "";
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), ["show", "-s", "--format=raw", mergeSha], {
silent: true,
listeners: {
stdline: (data) => {
if (data.startsWith("commit ") && commitOid === "") {
commitOid = data.substring(7);
}
else if (data.startsWith("parent ")) {
if (baseOid === "") {
baseOid = data.substring(7);
}
else if (headOid === "") {
headOid = data.substring(7);
}
}
},
stderr: (data) => {
process.stderr.write(data);
},
},
}).exec();
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
if (commitOid === mergeSha &&
headOid.length === 40 &&
baseOid.length === 40) {
return baseOid;
}
return undefined;
}
catch (e) {
core.info(`Failed to call git to determine merge base. Continuing with data from environment: ${e}`);
core.info(e.stack || "NO STACK");
return undefined;
}
};
exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
function isObject(o) { function isObject(o) {
return o !== null && typeof o === "object"; return o !== null && typeof o === "object";
} }
@@ -373,8 +422,21 @@ exports.computeAutomationID = computeAutomationID;
async function getRef() { async function getRef() {
// Will be in the form "refs/heads/master" on a push event // Will be in the form "refs/heads/master" on a push event
// or in the form "refs/pull/N/merge" on a pull_request event // or in the form "refs/pull/N/merge" on a pull_request event
const ref = (0, util_1.getRequiredEnvParam)("GITHUB_REF"); const refInput = (0, exports.getOptionalInput)("ref");
const sha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA"); const shaInput = (0, exports.getOptionalInput)("sha");
const hasRefInput = !!refInput;
const hasShaInput = !!shaInput;
// If one of 'ref' or 'sha' are provided, both are required
if ((hasRefInput || hasShaInput) && !(hasRefInput && hasShaInput)) {
throw new Error("Both 'ref' and 'sha' are required if one of them is provided.");
}
const ref = refInput || (0, util_1.getRequiredEnvParam)("GITHUB_REF");
const sha = shaInput || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
// If the ref is a user-provided input, we have to skip logic
// and assume that it is really where they want to upload the results.
if (refInput) {
return refInput;
}
// For pull request refs we want to detect whether the workflow // For pull request refs we want to detect whether the workflow
// has run `git checkout HEAD^2` to analyze the 'head' ref rather // has run `git checkout HEAD^2` to analyze the 'head' ref rather
// than the 'merge' ref. If so, we want to convert the ref that // than the 'merge' ref. If so, we want to convert the ref that
@@ -412,7 +474,7 @@ exports.getRef = getRef;
* @param exception Exception (only supply if status is 'failure') * @param exception Exception (only supply if status is 'failure')
*/ */
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) { async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
const commitOid = process.env["GITHUB_SHA"] || ""; const commitOid = (0, exports.getOptionalInput)("sha") || process.env["GITHUB_SHA"] || "";
const ref = await getRef(); const ref = await getRef();
const workflowRunIDStr = process.env["GITHUB_RUN_ID"]; const workflowRunIDStr = process.env["GITHUB_RUN_ID"];
let workflowRunID = -1; let workflowRunID = -1;
@@ -464,7 +526,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
} }
exports.createStatusReportBase = createStatusReportBase; exports.createStatusReportBase = createStatusReportBase;
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning."; const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
const GENERIC_404_MSG = "Not authorized to used the CodeQL code scanning feature on this repo."; const GENERIC_404_MSG = "Not authorized to use the CodeQL code scanning feature on this repo.";
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action."; const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action."; const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
/** /**
@@ -479,6 +541,12 @@ const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code sc
async function sendStatusReport(statusReport) { async function sendStatusReport(statusReport) {
const statusReportJSON = JSON.stringify(statusReport); const statusReportJSON = JSON.stringify(statusReport);
core.debug(`Sending status report: ${statusReportJSON}`); core.debug(`Sending status report: ${statusReportJSON}`);
// If in test mode we don't want to upload the results
const testMode = process.env["TEST_MODE"] === "true" || false;
if (testMode) {
core.debug("In test mode. Status reports are not uploaded.");
return true;
}
const nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"); const nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY");
const [owner, repo] = nwo.split("/"); const [owner, repo] = nwo.split("/");
const client = api.getActionsApiClient(); const client = api.getActionsApiClient();

File diff suppressed because one or more lines are too long

View File

@@ -71,6 +71,43 @@ function errorCodes(actual, expected) {
t.deepEqual(actualRef, "refs/pull/1/head"); t.deepEqual(actualRef, "refs/pull/1/head");
callback.restore(); callback.restore();
}); });
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
// These values are be ignored
process.env["GITHUB_REF"] = "refs/pull/1/merge";
process.env["GITHUB_SHA"] = "a".repeat(40);
const callback = sinon.stub(actionsutil, "getCommitOid");
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
callback.withArgs("HEAD").resolves("b".repeat(40));
const actualRef = await actionsutil.getRef();
t.deepEqual(actualRef, "refs/pull/2/merge");
callback.restore();
getAdditionalInputStub.restore();
});
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
await t.throwsAsync(async () => {
await actionsutil.getRef();
}, {
instanceOf: Error,
message: "Both 'ref' and 'sha' are required if one of them is provided.",
});
getAdditionalInputStub.restore();
});
(0, ava_1.default)("getRef() throws an error if only `sha` is provided as an input", async (t) => {
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
await t.throwsAsync(async () => {
await actionsutil.getRef();
}, {
instanceOf: Error,
message: "Both 'ref' and 'sha' are required if one of them is provided.",
});
getAdditionalInputStub.restore();
});
(0, ava_1.default)("computeAutomationID()", async (t) => { (0, ava_1.default)("computeAutomationID()", async (t) => {
let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}'); let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/"); t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");

File diff suppressed because one or more lines are too long

4
lib/analysis-paths.js generated
View File

@@ -37,11 +37,11 @@ function buildIncludeExcludeEnvVar(paths) {
return paths.join("\n"); return paths.join("\n");
} }
function printPathFiltersWarning(config, logger) { function printPathFiltersWarning(config, logger) {
// Index include/exclude/filters only work in javascript and python. // Index include/exclude/filters only work in javascript/python/ruby.
// If any other languages are detected/configured then show a warning. // If any other languages are detected/configured then show a warning.
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) && if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
!config.languages.every(isInterpretedLanguage)) { !config.languages.every(isInterpretedLanguage)) {
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript and Python'); logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript, Python, and Ruby');
} }
} }
exports.printPathFiltersWarning = printPathFiltersWarning; exports.printPathFiltersWarning = printPathFiltersWarning;

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,oEAAoE;IACpE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,4FAA4F,CAC7F,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"} {"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}

View File

@@ -43,6 +43,8 @@ const util = __importStar(require("./util"));
dbLocation: path.resolve(tmpDir, "codeql_databases"), dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {}, packs: {},
debugMode: false, debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
}; };
analysisPaths.includeAndExcludeAnalysisPaths(config); analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined); t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
@@ -65,6 +67,8 @@ const util = __importStar(require("./util"));
dbLocation: path.resolve(tmpDir, "codeql_databases"), dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {}, packs: {},
debugMode: false, debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
}; };
analysisPaths.includeAndExcludeAnalysisPaths(config); analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2"); t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
@@ -88,6 +92,8 @@ const util = __importStar(require("./util"));
dbLocation: path.resolve(tempDir, "codeql_databases"), dbLocation: path.resolve(tempDir, "codeql_databases"),
packs: {}, packs: {},
debugMode: false, debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
}; };
analysisPaths.includeAndExcludeAnalysisPaths(config); analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined); t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;SACjB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;SACjB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;SACjB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"} {"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

View File

@@ -45,6 +45,7 @@ const util = __importStar(require("./util"));
.resolves({}); .resolves({});
sinon.stub(actionsUtil, "sendStatusReport").resolves(true); sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
sinon.stub(configUtils, "getConfig").resolves({ sinon.stub(configUtils, "getConfig").resolves({
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
languages: [], languages: [],
}); });
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput"); const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
@@ -53,6 +54,7 @@ const util = __importStar(require("./util"));
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput"); const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("cleanup-level").returns("none"); optionalInputStub.withArgs("cleanup-level").returns("none");
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir); (0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
// When there are no action inputs for RAM and threads, the action uses // When there are no action inputs for RAM and threads, the action uses
// environment variables (passed down from the init action) to set RAM and // environment variables (passed down from the init action) to set RAM and
// threads usage. // threads usage.

View File

@@ -1 +1 @@
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAA+D;AAC/D,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"} {"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

View File

@@ -45,6 +45,7 @@ const util = __importStar(require("./util"));
.resolves({}); .resolves({});
sinon.stub(actionsUtil, "sendStatusReport").resolves(true); sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
sinon.stub(configUtils, "getConfig").resolves({ sinon.stub(configUtils, "getConfig").resolves({
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
languages: [], languages: [],
}); });
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput"); const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
@@ -53,6 +54,7 @@ const util = __importStar(require("./util"));
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput"); const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("cleanup-level").returns("none"); optionalInputStub.withArgs("cleanup-level").returns("none");
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir); (0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
process.env["CODEQL_THREADS"] = "1"; process.env["CODEQL_THREADS"] = "1";
process.env["CODEQL_RAM"] = "4992"; process.env["CODEQL_RAM"] = "4992";
// Action inputs have precedence over environment variables. // Action inputs have precedence over environment variables.

View File

@@ -1 +1 @@
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAA+D;AAC/D,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"} {"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

38
lib/analyze-action.js generated
View File

@@ -29,6 +29,7 @@ const analyze_1 = require("./analyze");
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
const config_utils_1 = require("./config-utils"); const config_utils_1 = require("./config-utils");
const database_upload_1 = require("./database-upload"); const database_upload_1 = require("./database-upload");
const feature_flags_1 = require("./feature-flags");
const logging_1 = require("./logging"); const logging_1 = require("./logging");
const repository_1 = require("./repository"); const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib")); const upload_lib = __importStar(require("./upload-lib"));
@@ -71,12 +72,14 @@ async function run() {
const outputDir = actionsUtil.getRequiredInput("output"); const outputDir = actionsUtil.getRequiredInput("output");
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger); const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]); const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]);
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
const featureFlags = new feature_flags_1.GitHubFeatureFlags(config.gitHubVersion, apiDetails, repositoryNwo, logger);
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger); await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
if (actionsUtil.getRequiredInput("skip-queries") !== "true") { if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger); runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
if (config.debugMode) { if (config.debugMode) {
// Upload the SARIF files as an Actions artifact for debugging // Upload the SARIF files as an Actions artifact for debugging
await uploadDebugArtifacts(config.languages.map((lang) => path.resolve(outputDir, `${lang}.sarif`)), outputDir); await uploadDebugArtifacts(config.languages.map((lang) => path.resolve(outputDir, `${lang}.sarif`)), outputDir, config.debugArtifactName);
} }
} }
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd); const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
@@ -90,10 +93,10 @@ async function run() {
// Multilanguage tracing: there are additional logs in the root of the cluster // Multilanguage tracing: there are additional logs in the root of the cluster
toUpload.push(...listFolder(path.resolve(config.dbLocation, "log"))); toUpload.push(...listFolder(path.resolve(config.dbLocation, "log")));
} }
await uploadDebugArtifacts(toUpload, config.dbLocation); await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) { if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir // Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
await uploadDebugArtifacts([path.resolve(config.tempDir, "compound-build-tracer.log")], config.tempDir); await uploadDebugArtifacts([path.resolve(config.tempDir, "compound-build-tracer.log")], config.tempDir, config.debugArtifactName);
} }
} }
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") { if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
@@ -106,23 +109,17 @@ async function run() {
core.setOutput("db-locations", dbLocations); core.setOutput("db-locations", dbLocations);
if (runStats && actionsUtil.getRequiredInput("upload") === "true") { if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
uploadResult = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger); uploadResult = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
core.setOutput("sarif-id", uploadResult.sarifID);
} }
else { else {
logger.info("Not uploading results"); logger.info("Not uploading results");
} }
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")); // Possibly upload the database bundles for remote queries
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger); // Possibly upload the database bundles for remote queries await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, featureFlags, apiDetails, logger);
if (uploadResult !== undefined && if (uploadResult !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true") { actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)()); await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
} }
if (config.debugMode) {
// Upload the database bundles as an Actions artifact for debugging
const toUpload = [];
for (const language of config.languages)
toUpload.push(await (0, util_1.bundleDb)(config, language, codeql));
await uploadDebugArtifacts(toUpload, config.dbLocation);
}
} }
catch (origError) { catch (origError) {
const error = origError instanceof Error ? origError : new Error(String(origError)); const error = origError instanceof Error ? origError : new Error(String(origError));
@@ -138,6 +135,19 @@ async function run() {
return; return;
} }
finally { finally {
if (config !== undefined && config.debugMode) {
try {
// Upload the database bundles as an Actions artifact for debugging
const toUpload = [];
for (const language of config.languages) {
toUpload.push(await (0, util_1.bundleDb)(config, language, await (0, codeql_1.getCodeQL)(config.codeQLCmd), `${config.debugDatabaseName}-${language}`));
}
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
}
catch (error) {
console.log(`Failed to upload database debug bundles: ${error}`);
}
}
if (core.isDebug() && config !== undefined) { if (core.isDebug() && config !== undefined) {
core.info("Debug mode is on. Printing CodeQL debug logs..."); core.info("Debug mode is on. Printing CodeQL debug logs...");
for (const language of config.languages) { for (const language of config.languages) {
@@ -173,14 +183,14 @@ async function run() {
await sendStatusReport(startedAt, undefined); await sendStatusReport(startedAt, undefined);
} }
} }
async function uploadDebugArtifacts(toUpload, rootDir) { async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
let suffix = ""; let suffix = "";
const matrix = actionsUtil.getRequiredInput("matrix"); const matrix = actionsUtil.getRequiredInput("matrix");
if (matrix !== undefined && matrix !== "null") { if (matrix !== undefined && matrix !== "null") {
for (const entry of Object.entries(JSON.parse(matrix)).sort()) for (const entry of Object.entries(JSON.parse(matrix)).sort())
suffix += `-${entry[1]}`; suffix += `-${entry[1]}`;
} }
await artifact.create().uploadArtifact(actionsUtil.sanitizeArifactName(`${util_1.DEBUG_ARTIFACT_NAME}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir)); await artifact.create().uploadArtifact(actionsUtil.sanitizeArifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
} }
function listFolder(dir) { function listFolder(dir) {
const entries = fs.readdirSync(dir, { withFileTypes: true }); const entries = fs.readdirSync(dir, { withFileTypes: true });

File diff suppressed because one or more lines are too long

5
lib/analyze.js generated
View File

@@ -132,10 +132,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
} }
try { try {
if (hasPackWithCustomQueries) { if (hasPackWithCustomQueries) {
logger.info("*************"); logger.info("Performing analysis with custom CodeQL Packs.");
logger.info("Performing analysis with custom QL Packs. QL Packs are an experimental feature.");
logger.info("And should not be used in production yet.");
logger.info("*************");
logger.startGroup(`Downloading custom packs for ${language}`); logger.startGroup(`Downloading custom packs for ${language}`);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd); const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
const results = await codeql.packDownload(packsWithVersion); const results = await codeql.packDownload(packsWithVersion);

File diff suppressed because one or more lines are too long

2
lib/analyze.test.js generated
View File

@@ -126,6 +126,8 @@ const util = __importStar(require("./util"));
dbLocation: path.resolve(tmpDir, "codeql_databases"), dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs, packs,
debugMode: false, debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
}; };
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), { fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
recursive: true, recursive: true,

File diff suppressed because one or more lines are too long

View File

@@ -1 +1 @@
{ "maximumVersion": "3.3", "minimumVersion": "3.0" } { "maximumVersion": "3.4", "minimumVersion": "3.0" }

18
lib/codeql.js generated
View File

@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod }; return (mod && mod.__esModule) ? mod : { "default": mod };
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0; exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner")); const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
@@ -37,6 +37,7 @@ const languages_1 = require("./languages");
const toolcache = __importStar(require("./toolcache")); const toolcache = __importStar(require("./toolcache"));
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher"); const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
const util_1 = require("./util");
class CommandInvocationError extends Error { class CommandInvocationError extends Error {
constructor(cmd, args, exitCode, error) { constructor(cmd, args, exitCode, error) {
super(`Failure invoking ${cmd} with arguments ${args}.\n super(`Failure invoking ${cmd} with arguments ${args}.\n
@@ -73,6 +74,7 @@ const CODEQL_VERSION_GROUP_RULES = "2.5.5";
const CODEQL_VERSION_SARIF_GROUP = "2.5.3"; const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2"; exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1"; const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
/** /**
* This variable controls using the new style of tracing from the CodeQL * This variable controls using the new style of tracing from the CodeQL
* CLI. In particular, with versions above this we will use both indirect * CLI. In particular, with versions above this we will use both indirect
@@ -215,7 +217,7 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant
// specified explicitly (in which case we always honor it). // specified explicitly (in which case we always honor it).
if (!codeqlFolder && !codeqlURL && !forceLatest) { if (!codeqlFolder && !codeqlURL && !forceLatest) {
const codeqlVersions = toolcache.findAllVersions("CodeQL", toolCacheDir, logger); const codeqlVersions = toolcache.findAllVersions("CodeQL", toolCacheDir, logger);
if (codeqlVersions.length === 1) { if (codeqlVersions.length === 1 && (0, util_1.isGoodVersion)(codeqlVersions[0])) {
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0], toolCacheDir, logger); const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0], toolCacheDir, logger);
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) { if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`); logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
@@ -510,7 +512,12 @@ async function getCodeQLForCmd(cmd, checkVersion) {
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, args, error_matcher_1.errorMatchers); await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, args, error_matcher_1.errorMatchers);
}, },
async resolveLanguages() { async resolveLanguages() {
const codeqlArgs = ["resolve", "languages", "--format=json"]; const codeqlArgs = [
"resolve",
"languages",
"--format=json",
...getExtraOptionsFromEnv(["resolve", "languages"]),
];
const output = await runTool(cmd, codeqlArgs); const output = await runTool(cmd, codeqlArgs);
try { try {
return JSON.parse(output); return JSON.parse(output);
@@ -633,15 +640,18 @@ async function getCodeQLForCmd(cmd, checkVersion) {
"cleanup", "cleanup",
databasePath, databasePath,
`--mode=${cleanupLevel}`, `--mode=${cleanupLevel}`,
...getExtraOptionsFromEnv(["database", "cleanup"]),
]; ];
await runTool(cmd, codeqlArgs); await runTool(cmd, codeqlArgs);
}, },
async databaseBundle(databasePath, outputFilePath) { async databaseBundle(databasePath, outputFilePath, databaseName) {
const args = [ const args = [
"database", "database",
"bundle", "bundle",
databasePath, databasePath,
`--output=${outputFilePath}`, `--output=${outputFilePath}`,
`--name=${databaseName}`,
...getExtraOptionsFromEnv(["database", "bundle"]),
]; ];
await new toolrunner.ToolRunner(cmd, args).exec(); await new toolrunner.ToolRunner(cmd, args).exec();
}, },

File diff suppressed because one or more lines are too long

53
lib/config-utils.js generated
View File

@@ -25,8 +25,11 @@ const path = __importStar(require("path"));
const yaml = __importStar(require("js-yaml")); const yaml = __importStar(require("js-yaml"));
const semver = __importStar(require("semver")); const semver = __importStar(require("semver"));
const api = __importStar(require("./api-client")); const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
const externalQueries = __importStar(require("./external-queries")); const externalQueries = __importStar(require("./external-queries"));
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages"); const languages_1 = require("./languages");
const util_1 = require("./util");
// Property names from the user-supplied config file. // Property names from the user-supplied config file.
const NAME_PROPERTY = "name"; const NAME_PROPERTY = "name";
const DISABLE_DEFAULT_QUERIES_PROPERTY = "disable-default-queries"; const DISABLE_DEFAULT_QUERIES_PROPERTY = "disable-default-queries";
@@ -116,11 +119,25 @@ const builtinSuites = ["security-extended", "security-and-quality"];
* Determine the set of queries associated with suiteName's suites and add them to resultMap. * Determine the set of queries associated with suiteName's suites and add them to resultMap.
* Throws an error if suiteName is not a valid builtin suite. * Throws an error if suiteName is not a valid builtin suite.
*/ */
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, suiteName, configFile) { async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureFlags, configFile) {
var _a;
const found = builtinSuites.find((suite) => suite === suiteName); const found = builtinSuites.find((suite) => suite === suiteName);
if (!found) { if (!found) {
throw new Error(getQueryUsesInvalid(configFile, suiteName)); throw new Error(getQueryUsesInvalid(configFile, suiteName));
} }
// If we're running the JavaScript security-extended analysis (or a superset of it), the repo is
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
// pack, then add the ML-powered query pack so that we run ML-powered queries.
if (languages.includes("javascript") &&
(found === "security-extended" || found === "security-and-quality") &&
!((_a = packs.javascript) === null || _a === void 0 ? void 0 : _a.some((pack) => pack.packName === util_1.ML_POWERED_JS_QUERIES_PACK.packName)) &&
(await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) &&
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) {
if (!packs.javascript) {
packs.javascript = [];
}
packs.javascript.push(util_1.ML_POWERED_JS_QUERIES_PACK);
}
const suites = languages.map((l) => `${l}-${suiteName}.qls`); const suites = languages.map((l) => `${l}-${suiteName}.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined); await runResolveQueries(codeQL, resultMap, suites, undefined);
} }
@@ -180,7 +197,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
* local paths starting with './', or references to remote repos, or * local paths starting with './', or references to remote repos, or
* a finite set of hardcoded terms for builtin suites. * a finite set of hardcoded terms for builtin suites.
*/ */
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, workspacePath, apiDetails, logger, configFile) { async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureFlags, logger, configFile) {
queryUses = queryUses.trim(); queryUses = queryUses.trim();
if (queryUses === "") { if (queryUses === "") {
throw new Error(getQueryUsesInvalid(configFile)); throw new Error(getQueryUsesInvalid(configFile));
@@ -192,7 +209,7 @@ async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir,
} }
// Check for one of the builtin suites // Check for one of the builtin suites
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) { if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
await addBuiltinSuiteQueries(languages, codeQL, resultMap, queryUses, configFile); await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureFlags, configFile);
return; return;
} }
// Otherwise, must be a reference to another repo // Otherwise, must be a reference to another repo
@@ -404,12 +421,12 @@ async function getLanguages(codeQL, languagesInput, repository, apiDetails, logg
} }
return parsedLanguages; return parsedLanguages;
} }
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, workspacePath, apiDetails, logger) { async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureFlags, logger) {
queriesInput = queriesInput.trim(); queriesInput = queriesInput.trim();
// "+" means "don't override config file" - see shouldAddConfigFileQueries // "+" means "don't override config file" - see shouldAddConfigFileQueries
queriesInput = queriesInput.replace(/^\+/, ""); queriesInput = queriesInput.replace(/^\+/, "");
for (const query of queriesInput.split(",")) { for (const query of queriesInput.split(",")) {
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, workspacePath, apiDetails, logger); await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureFlags, logger);
} }
} }
// Returns true if either no queries were provided in the workflow. // Returns true if either no queries were provided in the workflow.
@@ -425,7 +442,7 @@ function shouldAddConfigFileQueries(queriesInput) {
/** /**
* Get the default config for when the user has not supplied one. * Get the default config for when the user has not supplied one.
*/ */
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) { async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
var _a; var _a;
const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger); const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger);
const queries = {}; const queries = {};
@@ -436,10 +453,10 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
}; };
} }
await addDefaultQueries(codeQL, languages, queries); await addDefaultQueries(codeQL, languages, queries);
if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger);
}
const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {}; const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
if (queriesInput) {
await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
}
return { return {
languages, languages,
queries, queries,
@@ -453,13 +470,15 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
gitHubVersion, gitHubVersion,
dbLocation: dbLocationOrDefault(dbLocation, tempDir), dbLocation: dbLocationOrDefault(dbLocation, tempDir),
debugMode, debugMode,
debugArtifactName,
debugDatabaseName,
}; };
} }
exports.getDefaultConfig = getDefaultConfig; exports.getDefaultConfig = getDefaultConfig;
/** /**
* Load the config from the given file. * Load the config from the given file.
*/ */
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) { async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
var _a; var _a;
let parsedYAML; let parsedYAML;
if (isLocal(configFile)) { if (isLocal(configFile)) {
@@ -500,12 +519,13 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
if (!disableDefaultQueries) { if (!disableDefaultQueries) {
await addDefaultQueries(codeQL, languages, queries); await addDefaultQueries(codeQL, languages, queries);
} }
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
// If queries were provided using `with` in the action configuration, // If queries were provided using `with` in the action configuration,
// they should take precedence over the queries in the config file // they should take precedence over the queries in the config file
// unless they're prefixed with "+", in which case they supplement those // unless they're prefixed with "+", in which case they supplement those
// in the config file. // in the config file.
if (queriesInput) { if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger); await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
} }
if (shouldAddConfigFileQueries(queriesInput) && if (shouldAddConfigFileQueries(queriesInput) &&
QUERIES_PROPERTY in parsedYAML) { QUERIES_PROPERTY in parsedYAML) {
@@ -518,7 +538,7 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
typeof query[QUERIES_USES_PROPERTY] !== "string") { typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueryUsesInvalid(configFile)); throw new Error(getQueryUsesInvalid(configFile));
} }
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, logger, configFile); await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, featureFlags, logger, configFile);
} }
} }
if (PATHS_IGNORE_PROPERTY in parsedYAML) { if (PATHS_IGNORE_PROPERTY in parsedYAML) {
@@ -543,7 +563,6 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger)); paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
} }
} }
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
return { return {
languages, languages,
queries, queries,
@@ -557,6 +576,8 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
gitHubVersion, gitHubVersion,
dbLocation: dbLocationOrDefault(dbLocation, tempDir), dbLocation: dbLocationOrDefault(dbLocation, tempDir),
debugMode, debugMode,
debugArtifactName,
debugDatabaseName,
}; };
} }
/** /**
@@ -682,16 +703,16 @@ function dbLocationOrDefault(dbLocation, tempDir) {
* This will parse the config from the user input if present, or generate * This will parse the config from the user input if present, or generate
* a default config. The parsed config is then stored to a known location. * a default config. The parsed config is then stored to a known location.
*/ */
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) { async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
var _a, _b, _c; var _a, _b, _c;
let config; let config;
// If no config file was provided create an empty one // If no config file was provided create an empty one
if (!configFile) { if (!configFile) {
logger.debug("No configuration file was provided"); logger.debug("No configuration file was provided");
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger); config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
} }
else { else {
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger); config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
} }
// The list of queries should not be empty for any language. If it is then // The list of queries should not be empty for any language. If it is then
// it is a user configuration error. // it is a user configuration error.

File diff suppressed because one or more lines are too long

128
lib/config-utils.test.js generated
View File

@@ -31,6 +31,7 @@ const sinon = __importStar(require("sinon"));
const api = __importStar(require("./api-client")); const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils")); const configUtils = __importStar(require("./config-utils"));
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages"); const languages_1 = require("./languages");
const logging_1 = require("./logging"); const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils"); const testing_utils_1 = require("./testing-utils");
@@ -88,8 +89,8 @@ function mockListLanguages(languages) {
}; };
}, },
}); });
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger); const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger)); t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger));
}); });
}); });
(0, ava_1.default)("loading config saves config", async (t) => { (0, ava_1.default)("loading config saves config", async (t) => {
@@ -111,18 +112,21 @@ function mockListLanguages(languages) {
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir))); t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// Sanity check that getConfig returns undefined before we have called initConfig // Sanity check that getConfig returns undefined before we have called initConfig
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined); t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger); const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
// The saved config file should now exist // The saved config file should now exist
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir))); t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// And that same newly-initialised config should now be returned by getConfig // And that same newly-initialised config should now be returned by getConfig
const config2 = await configUtils.getConfig(tmpDir, logger); const config2 = await configUtils.getConfig(tmpDir, logger);
t.deepEqual(config1, config2); t.not(config2, undefined);
if (config2 !== undefined) {
t.deepEqual(config1, config2);
}
}); });
}); });
(0, ava_1.default)("load input outside of workspace", async (t) => { (0, ava_1.default)("load input outside of workspace", async (t) => {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
try { try {
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -135,7 +139,7 @@ function mockListLanguages(languages) {
// no filename given, just a repo // no filename given, just a repo
const configFile = "octo-org/codeql-config@main"; const configFile = "octo-org/codeql-config@main";
try { try {
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -149,7 +153,7 @@ function mockListLanguages(languages) {
const configFile = "input"; const configFile = "input";
t.false(fs.existsSync(path.join(tmpDir, configFile))); t.false(fs.existsSync(path.join(tmpDir, configFile)));
try { try {
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -215,10 +219,12 @@ function mockListLanguages(languages) {
dbLocation: path.resolve(tmpDir, "codeql_databases"), dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {}, packs: {},
debugMode: false, debugMode: false,
debugArtifactName: "my-artifact",
debugDatabaseName: "my-db",
}; };
const languages = "javascript"; const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir); const configFilePath = createConfigFile(inputFileContents, tmpDir);
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "my-artifact", "my-db", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Should exactly equal the object we constructed earlier // Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig); t.deepEqual(actualConfig, expectedConfig);
}); });
@@ -254,7 +260,7 @@ function mockListLanguages(languages) {
fs.mkdirSync(path.join(tmpDir, "foo")); fs.mkdirSync(path.join(tmpDir, "foo"));
const languages = "javascript"; const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir); const configFilePath = createConfigFile(inputFileContents, tmpDir);
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolve queries was called correctly // Check resolve queries was called correctly
t.deepEqual(resolveQueriesArgs.length, 1); t.deepEqual(resolveQueriesArgs.length, 1);
t.deepEqual(resolveQueriesArgs[0].queries, [ t.deepEqual(resolveQueriesArgs[0].queries, [
@@ -297,7 +303,7 @@ function queriesToResolvedQueryForm(queries) {
}, },
}); });
const languages = "javascript"; const languages = "javascript";
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly // Check resolveQueries was called correctly
// It'll be called once for the default queries // It'll be called once for the default queries
// and once for `./foo` from the config file. // and once for `./foo` from the config file.
@@ -330,7 +336,7 @@ function queriesToResolvedQueryForm(queries) {
}, },
}); });
const languages = "javascript"; const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly // Check resolveQueries was called correctly
// It'll be called once for the default queries and once for `./override`, // It'll be called once for the default queries and once for `./override`,
// but won't be called for './foo' from the config file. // but won't be called for './foo' from the config file.
@@ -362,7 +368,7 @@ function queriesToResolvedQueryForm(queries) {
}, },
}); });
const languages = "javascript"; const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly // Check resolveQueries was called correctly
// It'll be called once for `./workflow-query`, // It'll be called once for `./workflow-query`,
// but won't be called for the default one since that was disabled // but won't be called for the default one since that was disabled
@@ -388,7 +394,7 @@ function queriesToResolvedQueryForm(queries) {
}, },
}); });
const languages = "javascript"; const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly: // Check resolveQueries was called correctly:
// It'll be called once for the default queries, // It'll be called once for the default queries,
// and then once for each of the two queries from the workflow // and then once for each of the two queries from the workflow
@@ -427,7 +433,7 @@ function queriesToResolvedQueryForm(queries) {
}, },
}); });
const languages = "javascript"; const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly // Check resolveQueries was called correctly
// It'll be called once for the default queries, // It'll be called once for the default queries,
// once for each of additional1 and additional2, // once for each of additional1 and additional2,
@@ -466,7 +472,7 @@ function queriesToResolvedQueryForm(queries) {
}, },
}); });
try { try {
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(languages, queries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
t.fail("initConfig did not throw error"); t.fail("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -509,7 +515,7 @@ function queriesToResolvedQueryForm(queries) {
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true }); fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
const configFile = "octo-org/codeql-config/config.yaml@main"; const configFile = "octo-org/codeql-config/config.yaml@main";
const languages = "javascript"; const languages = "javascript";
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
t.assert(spyGetContents.called); t.assert(spyGetContents.called);
}); });
}); });
@@ -519,7 +525,7 @@ function queriesToResolvedQueryForm(queries) {
mockGetContents(dummyResponse); mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main"; const repoReference = "octo-org/codeql-config/config.yaml@main";
try { try {
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -535,7 +541,7 @@ function queriesToResolvedQueryForm(queries) {
mockGetContents(dummyResponse); mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main"; const repoReference = "octo-org/codeql-config/config.yaml@main";
try { try {
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -552,7 +558,7 @@ function queriesToResolvedQueryForm(queries) {
}, },
}); });
try { try {
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -564,7 +570,7 @@ function queriesToResolvedQueryForm(queries) {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
const languages = "rubbish,english"; const languages = "rubbish,english";
try { try {
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -592,7 +598,7 @@ function queriesToResolvedQueryForm(queries) {
const configFile = path.join(tmpDir, "codeql-config.yaml"); const configFile = path.join(tmpDir, "codeql-config.yaml");
fs.writeFileSync(configFile, inputFileContents); fs.writeFileSync(configFile, inputFileContents);
const languages = "javascript"; const languages = "javascript";
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
t.deepEqual(packs, { t.deepEqual(packs, {
[languages_1.Language.javascript]: [ [languages_1.Language.javascript]: [
{ {
@@ -631,7 +637,7 @@ function queriesToResolvedQueryForm(queries) {
fs.writeFileSync(configFile, inputFileContents); fs.writeFileSync(configFile, inputFileContents);
fs.mkdirSync(path.join(tmpDir, "foo")); fs.mkdirSync(path.join(tmpDir, "foo"));
const languages = "javascript,python,cpp"; const languages = "javascript,python,cpp";
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
t.deepEqual(packs, { t.deepEqual(packs, {
[languages_1.Language.javascript]: [ [languages_1.Language.javascript]: [
{ {
@@ -684,7 +690,7 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
const inputFile = path.join(tmpDir, configFile); const inputFile = path.join(tmpDir, configFile);
fs.writeFileSync(inputFile, inputFileContents, "utf8"); fs.writeFileSync(inputFile, inputFileContents, "utf8");
try { try {
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -757,28 +763,26 @@ const invalidPaths = ["a/***/b", "a/**b", "a/b**", "**"];
/** /**
* Test macro for ensuring the packs block is valid * Test macro for ensuring the packs block is valid
*/ */
function parsePacksMacro(t, packsByLanguage, languages, expected) { const parsePacksMacro = ava_1.default.macro({
t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), expected); exec: (t, packsByLanguage, languages, expected) => t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), expected),
} title: (providedTitle = "") => `Parse Packs: ${providedTitle}`,
parsePacksMacro.title = (providedTitle) => `Parse Packs: ${providedTitle}`; });
/** /**
* Test macro for testing when the packs block is invalid * Test macro for testing when the packs block is invalid
*/ */
function parsePacksErrorMacro(t, packsByLanguage, languages, expected) { const parsePacksErrorMacro = ava_1.default.macro({
t.throws(() => { exec: (t, packsByLanguage, languages, expected) => t.throws(() => configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), {
configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b");
}, {
message: expected, message: expected,
}); }),
} title: (providedTitle = "") => `Parse Packs Error: ${providedTitle}`,
parsePacksErrorMacro.title = (providedTitle) => `Parse Packs Error: ${providedTitle}`; });
/** /**
* Test macro for testing when the packs block is invalid * Test macro for testing when the packs block is invalid
*/ */
function invalidPackNameMacro(t, name) { const invalidPackNameMacro = ava_1.default.macro({
parsePacksErrorMacro(t, { [languages_1.Language.cpp]: [name] }, [languages_1.Language.cpp], new RegExp(`The configuration file "/a/b" is invalid: property "packs" "${name}" is not a valid pack`)); exec: (t, name) => parsePacksErrorMacro.exec(t, { [languages_1.Language.cpp]: [name] }, [languages_1.Language.cpp], new RegExp(`The configuration file "/a/b" is invalid: property "packs" "${name}" is not a valid pack`)),
} title: (_providedTitle, arg) => `Invalid pack string: ${arg}`,
invalidPackNameMacro.title = (_, arg) => `Invalid pack string: ${arg}`; });
(0, ava_1.default)("no packs", parsePacksMacro, {}, [], {}); (0, ava_1.default)("no packs", parsePacksMacro, {}, [], {});
(0, ava_1.default)("two packs", parsePacksMacro, ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp], { (0, ava_1.default)("two packs", parsePacksMacro, ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp], {
[languages_1.Language.cpp]: [ [languages_1.Language.cpp]: [
@@ -867,6 +871,50 @@ parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and
(0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/); (0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
(0, ava_1.default)("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], /remove the '\+'/); (0, ava_1.default)("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], /remove the '\+'/);
(0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], /"xxx" is not a valid pack/); (0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], /"xxx" is not a valid pack/);
// errors const mlPoweredQueriesMacro = ava_1.default.macro({
// input w invalid pack name exec: async (t, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString) => {
return await util.withTmpDir(async (tmpDir) => {
const codeQL = (0, codeql_1.setCodeQL)({
async getVersion() {
return codeQLVersion;
},
async resolveQueries() {
return {
byLanguage: {
javascript: { "fake-query.ql": {} },
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
},
});
const { packs } = await configUtils.initConfig("javascript", queriesInput, packsInput, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled
? [feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled]
: []), (0, logging_1.getRunnerLogger)(true));
if (expectedVersionString !== undefined) {
t.deepEqual(packs, {
[languages_1.Language.javascript]: [
{
packName: "codeql/javascript-experimental-atm-queries",
version: expectedVersionString,
},
],
});
}
else {
t.deepEqual(packs, {});
}
});
},
title: (_providedTitle, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString) => `ML-powered queries ${expectedVersionString !== undefined
? `${expectedVersionString} are`
: "aren't"} loaded for packs: ${packsInput}, queries: ${queriesInput} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`,
});
// macro, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, versionString
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, undefined, "security-extended", undefined);
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, undefined, "security-extended", undefined);
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-extended", "~0.0.2");
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-and-quality", "~0.0.2");
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", "0.0.1");
//# sourceMappingURL=config-utils.test.js.map //# sourceMappingURL=config-utils.test.js.map

File diff suppressed because one or more lines are too long

35
lib/database-upload.js generated
View File

@@ -24,9 +24,10 @@ const fs = __importStar(require("fs"));
const actionsUtil = __importStar(require("./actions-util")); const actionsUtil = __importStar(require("./actions-util"));
const api_client_1 = require("./api-client"); const api_client_1 = require("./api-client");
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
const feature_flags_1 = require("./feature-flags");
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
const util_1 = require("./util"); const util_1 = require("./util");
async function uploadDatabases(repositoryNwo, config, apiDetails, logger) { async function uploadDatabases(repositoryNwo, config, featureFlags, apiDetails, logger) {
if (actionsUtil.getRequiredInput("upload-database") !== "true") { if (actionsUtil.getRequiredInput("upload-database") !== "true") {
logger.debug("Database upload disabled in workflow. Skipping upload."); logger.debug("Database upload disabled in workflow. Skipping upload.");
return; return;
@@ -41,33 +42,29 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
logger.debug("Not analyzing default branch. Skipping upload."); logger.debug("Not analyzing default branch. Skipping upload.");
return; return;
} }
const client = (0, api_client_1.getApiClient)(apiDetails); if (!(await featureFlags.getValue(feature_flags_1.FeatureFlag.DatabaseUploadsEnabled))) {
try { logger.debug("Repository is not opted in to database uploads. Skipping upload.");
await client.request("GET /repos/:owner/:repo/code-scanning/codeql/databases", {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
});
}
catch (e) {
if (util.isHTTPError(e) && e.status === 404) {
logger.debug("Repository is not opted in to database uploads. Skipping upload.");
}
else {
console.log(e);
logger.info(`Skipping database upload due to unknown error: ${e}`);
}
return; return;
} }
const client = (0, api_client_1.getApiClient)(apiDetails);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd); const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
for (const language of config.languages) { for (const language of config.languages) {
// Upload the database bundle // Upload the database bundle.
const payload = fs.readFileSync(await (0, util_1.bundleDb)(config, language, codeql)); // Although we are uploading arbitrary file contents to the API, it's worth
// noting that it's the API's job to validate that the contents is acceptable.
// This API method is available to anyone with write access to the repo.
const payload = fs.readFileSync(await (0, util_1.bundleDb)(config, language, codeql, language));
try { try {
await client.request(`PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language`, { await client.request(`POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name`, {
owner: repositoryNwo.owner, owner: repositoryNwo.owner,
repo: repositoryNwo.repo, repo: repositoryNwo.repo,
language, language,
name: `${language}-database`,
data: payload, data: payload,
headers: {
authorization: `token ${apiDetails.auth}`,
"Content-Type": "application/zip",
},
}); });
logger.debug(`Successfully uploaded database for ${language}`); logger.debug(`Successfully uploaded database for ${language}`);
} }

View File

@@ -1 +1 @@
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,IAAI;QACF,MAAM,MAAM,CAAC,OAAO,CAClB,wDAAwD,EACxD;YACE,KAAK,EAAE,aAAa,CAAC,KAAK;YAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;SACzB,CACF,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3C,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;SACH;aAAM;YACL,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,MAAM,CAAC,IAAI,CAAC,kDAAkD,CAAC,EAAE,CAAC,CAAC;SACpE;QACD,OAAO;KACR;IAED,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,6BAA6B;QAC7B,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC,CAAC;QAC1E,IAAI;YACF,MAAM,MAAM,CAAC,OAAO,CAClB,kEAAkE,EAClE;gBACE,KAAK,EAAE,aAAa,CAAC,KAAK;gBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;gBACxB,QAAQ;gBACR,IAAI,EAAE,OAAO;aACd,CACF,CAAC;YACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAjED,0CAiEC"} {"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAErC,mDAA4D;AAG5D,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,YAA0B,EAC1B,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,2BAAW,CAAC,sBAAsB,CAAC,CAAC,EAAE;QACtE,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;QACF,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,8BAA8B;QAC9B,2EAA2E;QAC3E,8EAA8E;QAC9E,wEAAwE;QACxE,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAC7B,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CACnD,CAAC;QACF,IAAI;YACF,MAAM,MAAM,CAAC,OAAO,CAClB,wGAAwG,EACxG;gBACE,KAAK,EAAE,aAAa,CAAC,KAAK;gBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;gBACxB,QAAQ;gBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;gBAC5B,IAAI,EAAE,OAAO;gBACb,OAAO,EAAE;oBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;oBACzC,cAAc,EAAE,iBAAiB;iBAClC;aACF,CACF,CAAC;YACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAhED,0CAgEC"}

View File

@@ -30,6 +30,7 @@ const actionsUtil = __importStar(require("./actions-util"));
const apiClient = __importStar(require("./api-client")); const apiClient = __importStar(require("./api-client"));
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
const database_upload_1 = require("./database-upload"); const database_upload_1 = require("./database-upload");
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages"); const languages_1 = require("./languages");
const testing_utils_1 = require("./testing-utils"); const testing_utils_1 = require("./testing-utils");
const util_1 = require("./util"); const util_1 = require("./util");
@@ -37,6 +38,9 @@ const util_1 = require("./util");
ava_1.default.beforeEach(() => { ava_1.default.beforeEach(() => {
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3"); (0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
}); });
const uploadToUploadsDomainFlags = (0, feature_flags_1.createFeatureFlags)([
feature_flags_1.FeatureFlag.DatabaseUploadsEnabled,
]);
const testRepoName = { owner: "github", repo: "example" }; const testRepoName = { owner: "github", repo: "example" };
const testApiDetails = { const testApiDetails = {
auth: "1234", auth: "1234",
@@ -56,50 +60,21 @@ function getTestConfig(tmpDir) {
dbLocation: tmpDir, dbLocation: tmpDir,
packs: {}, packs: {},
debugMode: false, debugMode: false,
debugArtifactName: util_1.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util_1.DEFAULT_DEBUG_DATABASE_NAME,
}; };
} }
function getRecordingLogger(messages) { async function mockHttpRequests(databaseUploadStatusCode) {
return {
debug: (message) => {
messages.push({ type: "debug", message });
console.debug(message);
},
info: (message) => {
messages.push({ type: "info", message });
console.info(message);
},
warning: (message) => {
messages.push({ type: "warning", message });
console.warn(message);
},
error: (message) => {
messages.push({ type: "error", message });
console.error(message);
},
isDebug: () => true,
startGroup: () => undefined,
endGroup: () => undefined,
};
}
function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
// Passing an auth token is required, so we just use a dummy value // Passing an auth token is required, so we just use a dummy value
const client = github.getOctokit("123"); const client = github.getOctokit("123");
const requestSpy = sinon.stub(client, "request"); const requestSpy = sinon.stub(client, "request");
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql/databases"); const url = "POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name";
if (optInStatusCode < 300) { const databaseUploadSpy = requestSpy.withArgs(url);
optInSpy.resolves(undefined); if (databaseUploadStatusCode < 300) {
databaseUploadSpy.resolves(undefined);
} }
else { else {
optInSpy.throws(new util_1.HTTPError("some error message", optInStatusCode)); databaseUploadSpy.throws(new util_1.HTTPError("some error message", databaseUploadStatusCode));
}
if (databaseUploadStatusCode !== undefined) {
const databaseUploadSpy = requestSpy.withArgs("PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language");
if (databaseUploadStatusCode < 300) {
databaseUploadSpy.resolves(undefined);
}
else {
databaseUploadSpy.throws(new util_1.HTTPError("some error message", databaseUploadStatusCode));
}
} }
sinon.stub(apiClient, "getApiClient").value(() => client); sinon.stub(apiClient, "getApiClient").value(() => client);
} }
@@ -112,7 +87,7 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
.returns("false"); .returns("false");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true); sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
const loggedMessages = []; const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages)); await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" && t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Database upload disabled in workflow. Skipping upload.") !== undefined); v.message === "Database upload disabled in workflow. Skipping upload.") !== undefined);
}); });
@@ -128,7 +103,7 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
const config = getTestConfig(tmpDir); const config = getTestConfig(tmpDir);
config.gitHubVersion = { type: util_1.GitHubVariant.GHES, version: "3.0" }; config.gitHubVersion = { type: util_1.GitHubVariant.GHES, version: "3.0" };
const loggedMessages = []; const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages)); await (0, database_upload_1.uploadDatabases)(testRepoName, config, (0, feature_flags_1.createFeatureFlags)([]), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" && t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Not running against github.com. Skipping upload.") !== undefined); v.message === "Not running against github.com. Skipping upload.") !== undefined);
}); });
@@ -144,7 +119,7 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
const config = getTestConfig(tmpDir); const config = getTestConfig(tmpDir);
config.gitHubVersion = { type: util_1.GitHubVariant.GHAE }; config.gitHubVersion = { type: util_1.GitHubVariant.GHAE };
const loggedMessages = []; const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages)); await (0, database_upload_1.uploadDatabases)(testRepoName, config, (0, feature_flags_1.createFeatureFlags)([]), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" && t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Not running against github.com. Skipping upload.") !== undefined); v.message === "Not running against github.com. Skipping upload.") !== undefined);
}); });
@@ -158,12 +133,12 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
.returns("true"); .returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false); sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
const loggedMessages = []; const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages)); await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" && t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Not analyzing default branch. Skipping upload.") !== undefined); v.message === "Not analyzing default branch. Skipping upload.") !== undefined);
}); });
}); });
(0, ava_1.default)("Abort database upload if opt-in request returns 404", async (t) => { (0, ava_1.default)("Abort database upload if feature flag is disabled", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => { await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir); (0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
sinon sinon
@@ -171,40 +146,18 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
.withArgs("upload-database") .withArgs("upload-database")
.returns("true"); .returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true); sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(404);
(0, codeql_1.setCodeQL)({ (0, codeql_1.setCodeQL)({
async databaseBundle() { async databaseBundle() {
return; return;
}, },
}); });
const loggedMessages = []; const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages)); await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), (0, feature_flags_1.createFeatureFlags)([]), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" && t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === v.message ===
"Repository is not opted in to database uploads. Skipping upload.") !== undefined); "Repository is not opted in to database uploads. Skipping upload.") !== undefined);
}); });
}); });
(0, ava_1.default)("Abort database upload if opt-in request fails with something other than 404", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(500);
(0, codeql_1.setCodeQL)({
async databaseBundle() {
return;
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "info" &&
v.message ===
"Skipping database upload due to unknown error: Error: some error message") !== undefined);
});
});
(0, ava_1.default)("Don't crash if uploading a database fails", async (t) => { (0, ava_1.default)("Don't crash if uploading a database fails", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => { await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir); (0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
@@ -213,20 +166,23 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
.withArgs("upload-database") .withArgs("upload-database")
.returns("true"); .returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true); sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(204, 500); const featureFlags = (0, feature_flags_1.createFeatureFlags)([
feature_flags_1.FeatureFlag.DatabaseUploadsEnabled,
]);
await mockHttpRequests(500);
(0, codeql_1.setCodeQL)({ (0, codeql_1.setCodeQL)({
async databaseBundle(_, outputFilePath) { async databaseBundle(_, outputFilePath) {
fs.writeFileSync(outputFilePath, ""); fs.writeFileSync(outputFilePath, "");
}, },
}); });
const loggedMessages = []; const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages)); await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), featureFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "warning" && t.assert(loggedMessages.find((v) => v.type === "warning" &&
v.message === v.message ===
"Failed to upload database for javascript: Error: some error message") !== undefined); "Failed to upload database for javascript: Error: some error message") !== undefined);
}); });
}); });
(0, ava_1.default)("Successfully uploading a database", async (t) => { (0, ava_1.default)("Successfully uploading a database to api.github.com", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => { await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir); (0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
sinon sinon
@@ -234,14 +190,34 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
.withArgs("upload-database") .withArgs("upload-database")
.returns("true"); .returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true); sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(204, 201); await mockHttpRequests(201);
(0, codeql_1.setCodeQL)({ (0, codeql_1.setCodeQL)({
async databaseBundle(_, outputFilePath) { async databaseBundle(_, outputFilePath) {
fs.writeFileSync(outputFilePath, ""); fs.writeFileSync(outputFilePath, "");
}, },
}); });
const loggedMessages = []; const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages)); await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Successfully uploaded database for javascript") !== undefined);
});
});
(0, ava_1.default)("Successfully uploading a database to uploads.github.com", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
await mockHttpRequests(201);
(0, codeql_1.setCodeQL)({
async databaseBundle(_, outputFilePath) {
fs.writeFileSync(outputFilePath, "");
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" && t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Successfully uploaded database for javascript") !== undefined); v.message === "Successfully uploaded database for javascript") !== undefined);
}); });

File diff suppressed because one or more lines are too long

View File

@@ -1,3 +1,3 @@
{ {
"bundleVersion": "codeql-bundle-20211115" "bundleVersion": "codeql-bundle-20220128"
} }

95
lib/feature-flags.js generated Normal file
View File

@@ -0,0 +1,95 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.createFeatureFlags = exports.GitHubFeatureFlags = exports.FeatureFlag = void 0;
const api_client_1 = require("./api-client");
const util = __importStar(require("./util"));
var FeatureFlag;
(function (FeatureFlag) {
FeatureFlag["DatabaseUploadsEnabled"] = "database_uploads_enabled";
FeatureFlag["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
})(FeatureFlag = exports.FeatureFlag || (exports.FeatureFlag = {}));
class GitHubFeatureFlags {
constructor(gitHubVersion, apiDetails, repositoryNwo, logger) {
this.gitHubVersion = gitHubVersion;
this.apiDetails = apiDetails;
this.repositoryNwo = repositoryNwo;
this.logger = logger;
}
async getValue(flag) {
const response = (await this.getApiResponse())[flag];
if (response === undefined) {
this.logger.debug(`Feature flag '${flag}' undefined in API response, considering it disabled.`);
return false;
}
return response;
}
async getApiResponse() {
const loadApiResponse = async () => {
// Do nothing when not running against github.com
if (this.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
this.logger.debug("Not running against github.com. Disabling all feature flags.");
return {};
}
const client = (0, api_client_1.getApiClient)(this.apiDetails);
try {
const response = await client.request("GET /repos/:owner/:repo/code-scanning/codeql-action/features", {
owner: this.repositoryNwo.owner,
repo: this.repositoryNwo.repo,
});
return response.data;
}
catch (e) {
if (util.isHTTPError(e) && e.status === 403) {
this.logger.warning("This run of the CodeQL Action does not have permission to access Code Scanning API endpoints. " +
"As a result, it will not be opted into any experimental features. " +
"This could be because the Action is running on a pull request from a fork. If not, " +
`please ensure the Action has the 'security-events: write' permission. Details: ${e}`);
}
else {
// Some feature flags, such as `ml_powered_queries_enabled` affect the produced alerts.
// Considering these feature flags disabled in the event of a transient error could
// therefore lead to alert churn. As a result, we crash if we cannot determine the value of
// the feature flags.
throw new Error(`Encountered an error while trying to load feature flags: ${e}`);
}
}
};
const apiResponse = this.cachedApiResponse || (await loadApiResponse());
this.cachedApiResponse = apiResponse;
return apiResponse;
}
}
exports.GitHubFeatureFlags = GitHubFeatureFlags;
/**
* Create a feature flags instance with the specified set of enabled flags.
*
* This should be only used within tests.
*/
function createFeatureFlags(enabledFlags) {
return {
getValue: async (flag) => {
return enabledFlags.includes(flag);
},
};
}
exports.createFeatureFlags = createFeatureFlags;
//# sourceMappingURL=feature-flags.js.map

1
lib/feature-flags.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,6CAA8D;AAG9D,6CAA+B;AAM/B,IAAY,WAGX;AAHD,WAAY,WAAW;IACrB,kEAAmD,CAAA;IACnD,qEAAsD,CAAA;AACxD,CAAC,EAHW,WAAW,GAAX,mBAAW,KAAX,mBAAW,QAGtB;AAUD,MAAa,kBAAkB;IAG7B,YACU,aAAiC,EACjC,UAA4B,EAC5B,aAA4B,EAC5B,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,eAAU,GAAV,UAAU,CAAkB;QAC5B,kBAAa,GAAb,aAAa,CAAe;QAC5B,WAAM,GAAN,MAAM,CAAQ;IACrB,CAAC;IAEJ,KAAK,CAAC,QAAQ,CAAC,IAAiB;QAC9B,MAAM,QAAQ,GAAG,CAAC,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC;QACrD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,iBAAiB,IAAI,uDAAuD,CAC7E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE;YACjC,iDAAiD;YACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;gBACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8DAA8D,CAC/D,CAAC;gBACF,OAAO,EAAE,CAAC;aACX;YACD,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC7C,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,8DAA8D,EAC9D;oBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;oBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;iBAC9B,CACF,CAAC;gBACF,OAAO,QAAQ,CAAC,IAAI,CAAC;aACtB;YAAC,OAAO,CAAC,EAAE;gBACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;oBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;wBAC9F,oEAAoE;wBACpE,qFAAqF;wBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;iBACH;qBAAM;oBACL,uFAAuF;oBACvF,mFAAmF;oBACnF,2FAA2F;oBAC3F,qBAAqB;oBACrB,MAAM,IAAI,KAAK,CACb,4DAA4D,CAAC,EAAE,CAChE,CAAC;iBACH;aACF;QACH,CAAC,CAAC;QAEF,MAAM,WAAW,GAAG,IAAI,CAAC,iBAAiB,IAAI,CAAC,MAAM,eAAe,EAAE,CAAC,CAAC;QACxE,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;CACF;AAhED,gDAgEC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,YAA2B;IAC5D,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YACvB,OAAO,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;AACJ,CAAC;AAND,gDAMC"}

95
lib/feature-flags.test.js generated Normal file
View File

@@ -0,0 +1,95 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const feature_flags_1 = require("./feature-flags");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const testing_utils_1 = require("./testing-utils");
const util_1 = require("./util");
(0, testing_utils_1.setupTests)(ava_1.default);
ava_1.default.beforeEach(() => {
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
});
const testApiDetails = {
auth: "1234",
url: "https://github.com",
};
const testRepositoryNwo = (0, repository_1.parseRepositoryNwo)("github/example");
const ALL_FEATURE_FLAGS_DISABLED_VARIANTS = [
{
description: "GHES",
gitHubVersion: { type: util_1.GitHubVariant.GHES, version: "3.0.0" },
},
{ description: "GHAE", gitHubVersion: { type: util_1.GitHubVariant.GHAE } },
];
for (const variant of ALL_FEATURE_FLAGS_DISABLED_VARIANTS) {
(0, ava_1.default)(`All feature flags are disabled if running against ${variant.description}`, async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const loggedMessages = [];
const featureFlags = new feature_flags_1.GitHubFeatureFlags(variant.gitHubVersion, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
t.assert((await featureFlags.getValue(flag)) === false);
}
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message ===
"Not running against github.com. Disabling all feature flags.") !== undefined);
});
});
}
(0, ava_1.default)("Feature flags are disabled if they're not returned in API response", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const loggedMessages = [];
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
t.assert((await featureFlags.getValue(flag)) === false);
}
for (const featureFlag of [
"database_uploads_enabled",
"ml_powered_queries_enabled",
]) {
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message ===
`Feature flag '${featureFlag}' undefined in API response, considering it disabled.`) !== undefined);
}
});
});
(0, ava_1.default)("Feature flags exception is propagated if the API request errors", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, logging_1.getRunnerLogger)(true));
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(500, {});
await t.throwsAsync(async () => featureFlags.getValue(feature_flags_1.FeatureFlag.DatabaseUploadsEnabled), {
message: "Encountered an error while trying to load feature flags: Error: some error message",
});
});
});
const FEATURE_FLAGS = [
"database_uploads_enabled",
"ml_powered_queries_enabled",
];
for (const featureFlag of FEATURE_FLAGS) {
(0, ava_1.default)(`Feature flag '${featureFlag}' is enabled if enabled in the API response`, async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, logging_1.getRunnerLogger)(true));
const expectedFeatureFlags = {};
for (const f of FEATURE_FLAGS) {
expectedFeatureFlags[f] = false;
}
expectedFeatureFlags[featureFlag] = true;
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureFlags);
const actualFeatureFlags = {
database_uploads_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.DatabaseUploadsEnabled),
ml_powered_queries_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled),
};
t.deepEqual(actualFeatureFlags, expectedFeatureFlags);
});
});
}
//# sourceMappingURL=feature-flags.test.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"feature-flags.test.js","sourceRoot":"","sources":["../src/feature-flags.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAGvB,mDAAkE;AAClE,uCAA4C;AAC5C,6CAAkD;AAClD,mDAMyB;AAEzB,iCAAgF;AAEhF,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,MAAM,cAAc,GAAqB;IACvC,IAAI,EAAE,MAAM;IACZ,GAAG,EAAE,oBAAoB;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAA,+BAAkB,EAAC,gBAAgB,CAAC,CAAC;AAE/D,MAAM,mCAAmC,GAGpC;IACH;QACE,WAAW,EAAE,MAAM;QACnB,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC9D;IACD,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,EAAE;CACrE,CAAC;AAEF,KAAK,MAAM,OAAO,IAAI,mCAAmC,EAAE;IACzD,IAAA,aAAI,EAAC,qDAAqD,OAAO,CAAC,WAAW,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC3F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,OAAO,CAAC,aAAa,EACrB,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;YAEF,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;gBAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;aACzD;YAED,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,8DAA8D,CACnE,KAAK,SAAS,CAChB,CAAC;QACJ,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ;AAED,IAAA,aAAI,EAAC,oEAAoE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI;YACxB,0BAA0B;YAC1B,4BAA4B;SAC7B,EAAE;YACD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,iBAAiB,WAAW,uDAAuD,CACxF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iEAAiE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,MAAM,CAAC,CAAC,WAAW,CACjB,KAAK,IAAI,EAAE,CAAC,YAAY,CAAC,QAAQ,CAAC,2BAAW,CAAC,sBAAsB,CAAC,EACrE;YACE,OAAO,EACL,oFAAoF;SACvF,CACF,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,MAAM,aAAa,GAAG;IACpB,0BAA0B;IAC1B,4BAA4B;CAC7B,CAAC;AAEF,KAAK,MAAM,WAAW,IAAI,aAAa,EAAE;IACvC,IAAA,aAAI,EAAC,iBAAiB,WAAW,6CAA6C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC1F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;YAEF,MAAM,oBAAoB,GAAgC,EAAE,CAAC;YAC7D,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;gBAC7B,oBAAoB,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;aACjC;YACD,oBAAoB,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC;YACzC,IAAA,0CAA0B,EAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;YAEtD,MAAM,kBAAkB,GAAgC;gBACtD,wBAAwB,EAAE,MAAM,YAAY,CAAC,QAAQ,CACnD,2BAAW,CAAC,sBAAsB,CACnC;gBACD,0BAA0B,EAAE,MAAM,YAAY,CAAC,QAAQ,CACrD,2BAAW,CAAC,uBAAuB,CACpC;aACF,CAAC;YAEF,CAAC,CAAC,SAAS,CAAC,kBAAkB,EAAE,oBAAoB,CAAC,CAAC;QACxD,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ"}

5
lib/fingerprints.js generated
View File

@@ -226,9 +226,8 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
exports.resolveUriToFile = resolveUriToFile; exports.resolveUriToFile = resolveUriToFile;
// Compute fingerprints for results in the given sarif file // Compute fingerprints for results in the given sarif file
// and return an updated sarif file contents. // and return an updated sarif file contents.
async function addFingerprints(sarifContents, sourceRoot, logger) { async function addFingerprints(sarif, sourceRoot, logger) {
var _a, _b, _c; var _a, _b, _c;
const sarif = JSON.parse(sarifContents);
// Gather together results for the same file and construct // Gather together results for the same file and construct
// callbacks to accept hashes for that file and update the location // callbacks to accept hashes for that file and update the location
const callbacksByFile = {}; const callbacksByFile = {};
@@ -266,7 +265,7 @@ async function addFingerprints(sarifContents, sourceRoot, logger) {
}; };
await hash(teeCallback, filepath); await hash(teeCallback, filepath);
} }
return JSON.stringify(sarif); return sarif;
} }
exports.addFingerprints = addFingerprints; exports.addFingerprints = addFingerprints;
//# sourceMappingURL=fingerprints.js.map //# sourceMappingURL=fingerprints.js.map

File diff suppressed because one or more lines are too long

View File

@@ -169,30 +169,24 @@ function testResolveUriToFile(uri, index, artifactsURIs) {
}); });
(0, ava_1.default)("addFingerprints", async (t) => { (0, ava_1.default)("addFingerprints", async (t) => {
// Run an end-to-end test on a test file // Run an end-to-end test on a test file
let input = fs const input = JSON.parse(fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`) .readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
.toString(); .toString());
let expected = fs const expected = JSON.parse(fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.expected.sarif`) .readFileSync(`${__dirname}/../src/testdata/fingerprinting.expected.sarif`)
.toString(); .toString());
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory // The URIs in the SARIF files resolve to files in the testdata directory
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`); const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, (0, logging_1.getRunnerLogger)(true)), expected); t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, (0, logging_1.getRunnerLogger)(true)), expected);
}); });
(0, ava_1.default)("missingRegions", async (t) => { (0, ava_1.default)("missingRegions", async (t) => {
// Run an end-to-end test on a test file // Run an end-to-end test on a test file
let input = fs const input = JSON.parse(fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`) .readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
.toString(); .toString());
let expected = fs const expected = JSON.parse(fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.expected.sarif`) .readFileSync(`${__dirname}/../src/testdata/fingerprinting2.expected.sarif`)
.toString(); .toString());
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory // The URIs in the SARIF files resolve to files in the testdata directory
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`); const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, (0, logging_1.getRunnerLogger)(true)), expected); t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, (0, logging_1.getRunnerLogger)(true)), expected);

File diff suppressed because one or more lines are too long

14
lib/init-action.js generated
View File

@@ -23,6 +23,7 @@ const path = __importStar(require("path"));
const core = __importStar(require("@actions/core")); const core = __importStar(require("@actions/core"));
const actions_util_1 = require("./actions-util"); const actions_util_1 = require("./actions-util");
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
const feature_flags_1 = require("./feature-flags");
const init_1 = require("./init"); const init_1 = require("./init");
const languages_1 = require("./languages"); const languages_1 = require("./languages");
const logging_1 = require("./logging"); const logging_1 = require("./logging");
@@ -53,14 +54,15 @@ async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
} }
const statusReport = { const statusReport = {
...statusReportBase, ...statusReportBase,
disable_default_queries: disableDefaultQueries,
languages, languages,
workflow_languages: workflowLanguages || "", ml_powered_js_queries: (0, util_1.getMlPoweredJsQueriesStatus)(config),
paths, paths,
paths_ignore: pathsIgnore, paths_ignore: pathsIgnore,
disable_default_queries: disableDefaultQueries,
queries: queries.join(","), queries: queries.join(","),
tools_input: (0, actions_util_1.getOptionalInput)("tools") || "", tools_input: (0, actions_util_1.getOptionalInput)("tools") || "",
tools_resolved_version: toolsVersion, tools_resolved_version: toolsVersion,
workflow_languages: workflowLanguages || "",
}; };
await (0, actions_util_1.sendStatusReport)(statusReport); await (0, actions_util_1.sendStatusReport)(statusReport);
} }
@@ -78,6 +80,8 @@ async function run() {
}; };
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails); const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.actions); (0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.actions);
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, apiDetails, repositoryNwo, logger);
try { try {
const workflowErrors = await (0, actions_util_1.validateWorkflow)(); const workflowErrors = await (0, actions_util_1.validateWorkflow)();
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) { if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
@@ -87,7 +91,11 @@ async function run() {
codeql = initCodeQLResult.codeql; codeql = initCodeQLResult.codeql;
toolsVersion = initCodeQLResult.toolsVersion; toolsVersion = initCodeQLResult.toolsVersion;
await (0, util_1.enrichEnvironment)(util_1.Mode.actions, codeql); await (0, util_1.enrichEnvironment)(util_1.Mode.actions, codeql);
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), (0, actions_util_1.getOptionalInput)("debug") === "true", (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), (0, actions_util_1.getTemporaryDirectory)(), (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE"), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, logger); config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), (0, actions_util_1.getOptionalInput)("debug") === "true", (0, actions_util_1.getOptionalInput)("debug-artifact-name") || util_1.DEFAULT_DEBUG_ARTIFACT_NAME, (0, actions_util_1.getOptionalInput)("debug-database-name") || util_1.DEFAULT_DEBUG_DATABASE_NAME, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE"), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, featureFlags, logger);
if (config.languages.some(languages_1.isTracedLanguage)) {
// We currently do not support tracing on Windows 11 and Windows Server 2022
(0, util_1.checkNotWindows11)();
}
if (config.languages.includes(languages_1.Language.python) && if (config.languages.includes(languages_1.Language.python) &&
(0, actions_util_1.getRequiredInput)("setup-python-dependencies") === "true") { (0, actions_util_1.getRequiredInput)("setup-python-dependencies") === "true") {
try { try {

File diff suppressed because one or more lines are too long

33
lib/init.js generated
View File

@@ -38,24 +38,39 @@ async function initCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant,
return { codeql, toolsVersion }; return { codeql, toolsVersion };
} }
exports.initCodeQL = initCodeQL; exports.initCodeQL = initCodeQL;
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) { async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
logger.startGroup("Load language configuration"); logger.startGroup("Load language configuration");
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger); const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
analysisPaths.printPathFiltersWarning(config, logger); analysisPaths.printPathFiltersWarning(config, logger);
logger.endGroup(); logger.endGroup();
return config; return config;
} }
exports.initConfig = initConfig; exports.initConfig = initConfig;
async function runInit(codeql, config, sourceRoot, processName, processLevel) { async function runInit(codeql, config, sourceRoot, processName, processLevel) {
var _a;
fs.mkdirSync(config.dbLocation, { recursive: true }); fs.mkdirSync(config.dbLocation, { recursive: true });
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) { try {
// Init a database cluster if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
await codeql.databaseInitCluster(config.dbLocation, config.languages, sourceRoot, processName, processLevel); // Init a database cluster
await codeql.databaseInitCluster(config.dbLocation, config.languages, sourceRoot, processName, processLevel);
}
else {
for (const language of config.languages) {
// Init language database
await codeql.databaseInit(util.getCodeQLDatabasePath(config, language), language, sourceRoot);
}
}
} }
else { catch (e) {
for (const language of config.languages) { // Handle the situation where init is called twice
// Init language database // for the same database in the same job.
await codeql.databaseInit(util.getCodeQLDatabasePath(config, language), language, sourceRoot); if (e instanceof Error &&
((_a = e.message) === null || _a === void 0 ? void 0 : _a.includes("Refusing to create databases")) &&
e.message.includes("exists and is not an empty directory.")) {
throw new Error(`Is the "init" action called twice in the same job? ${e.message}`);
}
else {
throw e;
} }
} }
return await (0, tracer_config_1.getCombinedTracerConfig)(config, codeql); return await (0, tracer_config_1.getCombinedTracerConfig)(config, codeql);

View File

@@ -1 +1 @@
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,SAAkB,EAClB,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AApCD,gCAoCC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC;IAEhC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;QAChE,0BAA0B;QAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,SAAS,EAChB,UAAU,EACV,WAAW,EACX,YAAY,CACb,CAAC;KACH;SAAM;QACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACvC,yBAAyB;YACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;SACH;KACF;IAED,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AA9BD,0BA8BC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"} {"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,YAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,YAAY,EACZ,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA1CD,gCA0CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC;;IAEhC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI;QACF,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;YAChE,0BAA0B;YAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,SAAS,EAChB,UAAU,EACV,WAAW,EACX,YAAY,CACb,CAAC;SACH;aAAM;YACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,yBAAyB;gBACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,kDAAkD;QAClD,yCAAyC;QACzC,IACE,CAAC,YAAY,KAAK;aAClB,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,8BAA8B,CAAC,CAAA;YACnD,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,uCAAuC,CAAC,EAC3D;YACA,MAAM,IAAI,KAAK,CACb,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;SACH;aAAM;YACL,MAAM,CAAC,CAAC;SACT;KACF;IACD,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AA7CD,0BA6CC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}

9
lib/runner.js generated
View File

@@ -18,15 +18,20 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod); __setModuleDefault(result, mod);
return result; return result;
}; };
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const os = __importStar(require("os")); const os = __importStar(require("os"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const commander_1 = require("commander"); const commander_1 = require("commander");
const del_1 = __importDefault(require("del"));
const analyze_1 = require("./analyze"); const analyze_1 = require("./analyze");
const autobuild_1 = require("./autobuild"); const autobuild_1 = require("./autobuild");
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
const config_utils_1 = require("./config-utils"); const config_utils_1 = require("./config-utils");
const feature_flags_1 = require("./feature-flags");
const init_1 = require("./init"); const init_1 = require("./init");
const languages_1 = require("./languages"); const languages_1 = require("./languages");
const logging_1 = require("./logging"); const logging_1 = require("./logging");
@@ -133,7 +138,7 @@ program
const checkoutPath = cmd.checkoutPath || process.cwd(); const checkoutPath = cmd.checkoutPath || process.cwd();
// Wipe the temp dir // Wipe the temp dir
logger.info(`Cleaning temp directory ${tempDir}`); logger.info(`Cleaning temp directory ${tempDir}`);
fs.rmSync(tempDir, { recursive: true, force: true }); await (0, del_1.default)(tempDir, { force: true });
fs.mkdirSync(tempDir, { recursive: true }); fs.mkdirSync(tempDir, { recursive: true });
const auth = await (0, util_1.getGitHubAuth)(logger, cmd.githubAuth, cmd.githubAuthStdin); const auth = await (0, util_1.getGitHubAuth)(logger, cmd.githubAuth, cmd.githubAuthStdin);
const apiDetails = { const apiDetails = {
@@ -159,7 +164,7 @@ program
} }
await (0, util_1.enrichEnvironment)(util_1.Mode.runner, codeql); await (0, util_1.enrichEnvironment)(util_1.Mode.runner, codeql);
const workspacePath = checkoutPath; const workspacePath = checkoutPath;
const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, false, (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, logger); const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, false, "", "", (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
const sourceRoot = checkoutPath; const sourceRoot = checkoutPath;
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, parseTraceProcessName(), parseTraceProcessLevel()); const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, parseTraceProcessName(), parseTraceProcessLevel());
if (tracerConfig === undefined) { if (tracerConfig === undefined) {

File diff suppressed because one or more lines are too long

49
lib/testing-utils.js generated
View File

@@ -19,9 +19,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.setupActionsVars = exports.setupTests = void 0; exports.mockFeatureFlagApiEndpoint = exports.getRecordingLogger = exports.setupActionsVars = exports.setupTests = void 0;
const github = __importStar(require("@actions/github"));
const sinon = __importStar(require("sinon")); const sinon = __importStar(require("sinon"));
const apiClient = __importStar(require("./api-client"));
const CodeQL = __importStar(require("./codeql")); const CodeQL = __importStar(require("./codeql"));
const util_1 = require("./util");
function wrapOutput(context) { function wrapOutput(context) {
// Function signature taken from Socket.write. // Function signature taken from Socket.write.
// Note there are two overloads: // Note there are two overloads:
@@ -89,4 +92,48 @@ function setupActionsVars(tempDir, toolsDir) {
process.env["RUNNER_TOOL_CACHE"] = toolsDir; process.env["RUNNER_TOOL_CACHE"] = toolsDir;
} }
exports.setupActionsVars = setupActionsVars; exports.setupActionsVars = setupActionsVars;
function getRecordingLogger(messages) {
return {
debug: (message) => {
messages.push({ type: "debug", message });
console.debug(message);
},
info: (message) => {
messages.push({ type: "info", message });
console.info(message);
},
warning: (message) => {
messages.push({ type: "warning", message });
console.warn(message);
},
error: (message) => {
messages.push({ type: "error", message });
console.error(message);
},
isDebug: () => true,
startGroup: () => undefined,
endGroup: () => undefined,
};
}
exports.getRecordingLogger = getRecordingLogger;
/** Mock the HTTP request to the feature flags enablement API endpoint. */
function mockFeatureFlagApiEndpoint(responseStatusCode, response) {
// Passing an auth token is required, so we just use a dummy value
const client = github.getOctokit("123");
const requestSpy = sinon.stub(client, "request");
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql-action/features");
if (responseStatusCode < 300) {
optInSpy.resolves({
status: responseStatusCode,
data: response,
headers: {},
url: "GET /repos/:owner/:repo/code-scanning/codeql-action/features",
});
}
else {
optInSpy.throws(new util_1.HTTPError("some error message", responseStatusCode));
}
sinon.stub(apiClient, "getApiClient").value(() => client);
}
exports.mockFeatureFlagApiEndpoint = mockFeatureFlagApiEndpoint;
//# sourceMappingURL=testing-utils.js.map //# sourceMappingURL=testing-utils.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,6CAA+B;AAE/B,iDAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC"} {"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAiB;IAC1C,MAAM,SAAS,GAAG,IAA2B,CAAC;IAE9C,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}

12
lib/toolcache.js generated
View File

@@ -18,6 +18,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod); __setModuleDefault(result, mod);
return result; return result;
}; };
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.downloadTool = exports.findAllVersions = exports.find = exports.cacheDir = exports.extractTar = void 0; exports.downloadTool = exports.findAllVersions = exports.find = exports.cacheDir = exports.extractTar = void 0;
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
@@ -27,6 +30,7 @@ const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const io = __importStar(require("@actions/io")); const io = __importStar(require("@actions/io"));
const actionsToolcache = __importStar(require("@actions/tool-cache")); const actionsToolcache = __importStar(require("@actions/tool-cache"));
const safeWhich = __importStar(require("@chrisgavin/safe-which")); const safeWhich = __importStar(require("@chrisgavin/safe-which"));
const del_1 = __importDefault(require("del"));
const semver = __importStar(require("semver")); const semver = __importStar(require("semver"));
const uuid_1 = require("uuid"); const uuid_1 = require("uuid");
const util_1 = require("./util"); const util_1 = require("./util");
@@ -123,7 +127,7 @@ async function cacheDir(sourceDir, tool, version, toolCacheDir, logger) {
throw new Error("sourceDir is not a directory"); throw new Error("sourceDir is not a directory");
} }
// Create the tool dir // Create the tool dir
const destPath = createToolPath(tool, version, arch, toolCacheDir, logger); const destPath = await createToolPath(tool, version, arch, toolCacheDir, logger);
// copy each child item. do not move. move can fail on Windows // copy each child item. do not move. move can fail on Windows
// due to anti-virus software having an open handle on a file. // due to anti-virus software having an open handle on a file.
for (const itemName of fs.readdirSync(sourceDir)) { for (const itemName of fs.readdirSync(sourceDir)) {
@@ -232,12 +236,12 @@ function createExtractFolder(tempDir) {
} }
return dest; return dest;
} }
function createToolPath(tool, version, arch, toolCacheDir, logger) { async function createToolPath(tool, version, arch, toolCacheDir, logger) {
const folderPath = path.join(toolCacheDir, tool, semver.clean(version) || version, arch || ""); const folderPath = path.join(toolCacheDir, tool, semver.clean(version) || version, arch || "");
logger.debug(`destination ${folderPath}`); logger.debug(`destination ${folderPath}`);
const markerPath = `${folderPath}.complete`; const markerPath = `${folderPath}.complete`;
fs.rmSync(folderPath, { recursive: true, force: true }); await (0, del_1.default)(folderPath, { force: true });
fs.rmSync(markerPath, { recursive: true, force: true }); await (0, del_1.default)(markerPath, { force: true });
fs.mkdirSync(folderPath, { recursive: true }); fs.mkdirSync(folderPath, { recursive: true });
return folderPath; return folderPath;
} }

File diff suppressed because one or more lines are too long

View File

@@ -44,10 +44,6 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
if (((_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout) !== undefined) { if (((_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout) !== undefined) {
options.listeners.stdout(data); options.listeners.stdout(data);
} }
else {
// if no stdout listener was originally defined then we match default behavior of Toolrunner
process.stdout.write(data);
}
}, },
stderr: (data) => { stderr: (data) => {
var _a; var _a;
@@ -55,10 +51,6 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
if (((_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stderr) !== undefined) { if (((_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stderr) !== undefined) {
options.listeners.stderr(data); options.listeners.stderr(data);
} }
else {
// if no stderr listener was originally defined then we match default behavior of Toolrunner
process.stderr.write(data);
}
}, },
}; };
// we capture the original return code or error so that if no match is found we can duplicate the behavior // we capture the original return code or error so that if no match is found we can duplicate the behavior

View File

@@ -1 +1 @@
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;KAC7D;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW;iBAChC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;iBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAzED,wDAyEC"} {"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;KAC7D;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW;iBAChC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;iBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAnED,wDAmEC"}

View File

@@ -45,6 +45,8 @@ function getTestConfig(tmpDir) {
dbLocation: path.resolve(tmpDir, "codeql_databases"), dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {}, packs: {},
debugMode: false, debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
}; };
} }
// A very minimal setup // A very minimal setup
@@ -160,7 +162,10 @@ function getTestConfig(tmpDir) {
javascript: { spec, env: { a: "a", b: "b" } }, javascript: { spec, env: { a: "a", b: "b" } },
python: { spec, env: { b: "c" } }, python: { spec, env: { b: "c" } },
}, config)); }, config));
t.deepEqual(e.message, "Incompatible values in environment parameter b: b and c"); // If e is undefined, then the previous assertion will fail.
if (e !== undefined) {
t.deepEqual(e.message, "Incompatible values in environment parameter b: b and c");
}
}); });
}); });
(0, ava_1.default)("concatTracerConfigs - cpp spec lines come last if present", async (t) => { (0, ava_1.default)("concatTracerConfigs - cpp spec lines come last if present", async (t) => {

File diff suppressed because one or more lines are too long

97
lib/upload-lib.js generated
View File

@@ -54,25 +54,24 @@ function combineSarifFiles(sarifFiles) {
} }
combinedSarif.runs.push(...sarifObject.runs); combinedSarif.runs.push(...sarifObject.runs);
} }
return JSON.stringify(combinedSarif); return combinedSarif;
} }
exports.combineSarifFiles = combineSarifFiles; exports.combineSarifFiles = combineSarifFiles;
// Populates the run.automationDetails.id field using the analysis_key and environment // Populates the run.automationDetails.id field using the analysis_key and environment
// and return an updated sarif file contents. // and return an updated sarif file contents.
function populateRunAutomationDetails(sarifContents, category, analysis_key, environment) { function populateRunAutomationDetails(sarif, category, analysis_key, environment) {
if (analysis_key === undefined) {
return sarifContents;
}
const automationID = getAutomationID(category, analysis_key, environment); const automationID = getAutomationID(category, analysis_key, environment);
const sarif = JSON.parse(sarifContents); if (automationID !== undefined) {
for (const run of sarif.runs || []) { for (const run of sarif.runs || []) {
if (run.automationDetails === undefined) { if (run.automationDetails === undefined) {
run.automationDetails = { run.automationDetails = {
id: automationID, id: automationID,
}; };
}
} }
return sarif;
} }
return JSON.stringify(sarif); return sarif;
} }
exports.populateRunAutomationDetails = populateRunAutomationDetails; exports.populateRunAutomationDetails = populateRunAutomationDetails;
function getAutomationID(category, analysis_key, environment) { function getAutomationID(category, analysis_key, environment) {
@@ -83,7 +82,11 @@ function getAutomationID(category, analysis_key, environment) {
} }
return automationID; return automationID;
} }
return actionsUtil.computeAutomationID(analysis_key, environment); // analysis_key is undefined for the runner.
if (analysis_key !== undefined) {
return actionsUtil.computeAutomationID(analysis_key, environment);
}
return undefined;
} }
// Upload the given payload. // Upload the given payload.
// If the request fails then this will retry a small number of times. // If the request fails then this will retry a small number of times.
@@ -92,6 +95,7 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, logger) {
// If in test mode we don't want to upload the results // If in test mode we don't want to upload the results
const testMode = process.env["TEST_MODE"] === "true" || false; const testMode = process.env["TEST_MODE"] === "true" || false;
if (testMode) { if (testMode) {
logger.debug("In test mode. Results are not uploaded.");
return; return;
} }
const client = api.getApiClient(apiDetails); const client = api.getApiClient(apiDetails);
@@ -200,7 +204,7 @@ function validateSarifFileSchema(sarifFilePath, logger) {
exports.validateSarifFileSchema = validateSarifFileSchema; exports.validateSarifFileSchema = validateSarifFileSchema;
// buildPayload constructs a map ready to be uploaded to the API from the given // buildPayload constructs a map ready to be uploaded to the API from the given
// parameters, respecting the current mode and target GitHub instance version. // parameters, respecting the current mode and target GitHub instance version.
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion) { function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion, mergeBaseCommitOid) {
if (util.isActions()) { if (util.isActions()) {
const payloadObj = { const payloadObj = {
commit_oid: commitOid, commit_oid: commitOid,
@@ -219,11 +223,23 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
// This behaviour can be made the default when support for GHES 3.0 is discontinued. // This behaviour can be made the default when support for GHES 3.0 is discontinued.
if (gitHubVersion.type !== util.GitHubVariant.GHES || if (gitHubVersion.type !== util.GitHubVariant.GHES ||
semver.satisfies(gitHubVersion.version, `>=3.1`)) { semver.satisfies(gitHubVersion.version, `>=3.1`)) {
if (process.env.GITHUB_EVENT_NAME === "pull_request" && if (process.env.GITHUB_EVENT_NAME === "pull_request") {
process.env.GITHUB_EVENT_PATH) { if (commitOid === util.getRequiredEnvParam("GITHUB_SHA") &&
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")); mergeBaseCommitOid) {
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`; // We're uploading results for the merge commit
payloadObj.base_sha = githubEvent.pull_request.base.sha; // and were able to determine the merge base.
// So we use that as the most accurate base.
payloadObj.base_ref = `refs/heads/${util.getRequiredEnvParam("GITHUB_BASE_REF")}`;
payloadObj.base_sha = mergeBaseCommitOid;
}
else if (process.env.GITHUB_EVENT_PATH) {
// Either we're not uploading results for the merge commit
// or we could not determine the merge base.
// Using the PR base is the only option here
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
}
} }
} }
return payloadObj; return payloadObj;
@@ -244,18 +260,19 @@ exports.buildPayload = buildPayload;
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, sourceRoot, environment, gitHubVersion, apiDetails, logger) { async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, sourceRoot, environment, gitHubVersion, apiDetails, logger) {
logger.startGroup("Uploading results"); logger.startGroup("Uploading results");
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`); logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
validateUniqueCategory(category);
// Validate that the files we were asked to upload are all valid SARIF files // Validate that the files we were asked to upload are all valid SARIF files
for (const file of sarifFiles) { for (const file of sarifFiles) {
validateSarifFileSchema(file, logger); validateSarifFileSchema(file, logger);
} }
let sarifPayload = combineSarifFiles(sarifFiles); let sarif = combineSarifFiles(sarifFiles);
sarifPayload = await fingerprints.addFingerprints(sarifPayload, sourceRoot, logger); sarif = await fingerprints.addFingerprints(sarif, sourceRoot, logger);
sarifPayload = populateRunAutomationDetails(sarifPayload, category, analysisKey, environment); sarif = populateRunAutomationDetails(sarif, category, analysisKey, environment);
const toolNames = util.getToolNames(sarif);
validateUniqueCategory(sarif);
const sarifPayload = JSON.stringify(sarif);
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64"); const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = (0, file_url_1.default)(sourceRoot); const checkoutURI = (0, file_url_1.default)(sourceRoot);
const toolNames = util.getToolNames(sarifPayload); const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion, await actionsUtil.determineMergeBaseCommitOid());
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion);
// Log some useful debug info about the info // Log some useful debug info about the info
const rawUploadSizeBytes = sarifPayload.length; const rawUploadSizeBytes = sarifPayload.length;
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`); logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
@@ -325,16 +342,28 @@ async function waitForProcessing(repositoryNwo, sarifID, apiDetails, logger) {
logger.endGroup(); logger.endGroup();
} }
exports.waitForProcessing = waitForProcessing; exports.waitForProcessing = waitForProcessing;
function validateUniqueCategory(category) { function validateUniqueCategory(sarif) {
var _a, _b, _c;
// This check only works on actions as env vars don't persist between calls to the runner
if (util.isActions()) { if (util.isActions()) {
// This check only works on actions as env vars don't persist between calls to the runner // duplicate categories are allowed in the same sarif file
const sentinelEnvVar = `CODEQL_UPLOAD_SARIF${category ? `_${sanitize(category)}` : ""}`; // but not across multiple sarif files
if (process.env[sentinelEnvVar]) { const categories = {};
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per category. " + for (const run of sarif.runs) {
"Please specify a unique `category` to call this action multiple times. " + const id = (_a = run === null || run === void 0 ? void 0 : run.automationDetails) === null || _a === void 0 ? void 0 : _a.id;
`Category: ${category ? category : "(none)"}`); const tool = (_c = (_b = run.tool) === null || _b === void 0 ? void 0 : _b.driver) === null || _c === void 0 ? void 0 : _c.name;
const category = `${sanitize(id)}_${sanitize(tool)}`;
categories[category] = { id, tool };
}
for (const [category, { id, tool }] of Object.entries(categories)) {
const sentinelEnvVar = `CODEQL_UPLOAD_SARIF_${category}`;
if (process.env[sentinelEnvVar]) {
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. " +
"The easiest fix is to specify a unique value for the `category` input. " +
`Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})`);
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
} }
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
} }
} }
exports.validateUniqueCategory = validateUniqueCategory; exports.validateUniqueCategory = validateUniqueCategory;
@@ -348,6 +377,6 @@ exports.validateUniqueCategory = validateUniqueCategory;
* @param str the initial value to sanitize * @param str the initial value to sanitize
*/ */
function sanitize(str) { function sanitize(str) {
return str.replace(/[^a-zA-Z0-9_]/g, "_"); return (str !== null && str !== void 0 ? str : "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
} }
//# sourceMappingURL=upload-lib.js.map //# sourceMappingURL=upload-lib.js.map

File diff suppressed because one or more lines are too long

125
lib/upload-lib.test.js generated
View File

@@ -53,20 +53,29 @@ ava_1.default.beforeEach(() => {
const allVersions = newVersions.concat(oldVersions); const allVersions = newVersions.concat(oldVersions);
process.env["GITHUB_EVENT_NAME"] = "push"; process.env["GITHUB_EVENT_NAME"] = "push";
for (const version of allVersions) { for (const version of allVersions) {
const payload = uploadLib.buildPayload("commit", "refs/heads/master", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version); const payload = uploadLib.buildPayload("commit", "refs/heads/master", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
// Not triggered by a pull request // Not triggered by a pull request
t.falsy(payload.base_ref); t.falsy(payload.base_ref);
t.falsy(payload.base_sha); t.falsy(payload.base_sha);
} }
process.env["GITHUB_EVENT_NAME"] = "pull_request"; process.env["GITHUB_EVENT_NAME"] = "pull_request";
process.env["GITHUB_SHA"] = "commit";
process.env["GITHUB_BASE_REF"] = "master";
process.env["GITHUB_EVENT_PATH"] = `${__dirname}/../src/testdata/pull_request.json`; process.env["GITHUB_EVENT_PATH"] = `${__dirname}/../src/testdata/pull_request.json`;
for (const version of newVersions) { for (const version of newVersions) {
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version); const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
// Uploads for a merge commit use the merge base
t.deepEqual(payload.base_ref, "refs/heads/master");
t.deepEqual(payload.base_sha, "mergeBaseCommit");
}
for (const version of newVersions) {
const payload = uploadLib.buildPayload("headCommit", "refs/pull/123/head", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
// Uploads for the head use the PR base
t.deepEqual(payload.base_ref, "refs/heads/master"); t.deepEqual(payload.base_ref, "refs/heads/master");
t.deepEqual(payload.base_sha, "f95f852bd8fca8fcc58a9a2d6c842781e32a215e"); t.deepEqual(payload.base_sha, "f95f852bd8fca8fcc58a9a2d6c842781e32a215e");
} }
for (const version of oldVersions) { for (const version of oldVersions) {
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version); const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
// These older versions won't expect these values // These older versions won't expect these values
t.falsy(payload.base_ref); t.falsy(payload.base_ref);
t.falsy(payload.base_sha); t.falsy(payload.base_sha);
@@ -98,9 +107,13 @@ ava_1.default.beforeEach(() => {
}); });
}); });
(0, ava_1.default)("populateRunAutomationDetails", (t) => { (0, ava_1.default)("populateRunAutomationDetails", (t) => {
let sarif = '{"runs": [{}]}'; let sarif = {
runs: [{}],
};
const analysisKey = ".github/workflows/codeql-analysis.yml:analyze"; const analysisKey = ".github/workflows/codeql-analysis.yml:analyze";
let expectedSarif = '{"runs":[{"automationDetails":{"id":"language:javascript/os:linux/"}}]}'; let expectedSarif = {
runs: [{ automationDetails: { id: "language:javascript/os:linux/" } }],
};
// Category has priority over analysis_key/environment // Category has priority over analysis_key/environment
let modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, "language:javascript/os:linux", analysisKey, '{"language": "other", "os": "other"}'); let modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, "language:javascript/os:linux", analysisKey, '{"language": "other", "os": "other"}');
t.deepEqual(modifiedSarif, expectedSarif); t.deepEqual(modifiedSarif, expectedSarif);
@@ -108,25 +121,99 @@ ava_1.default.beforeEach(() => {
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, "language:javascript/os:linux/", analysisKey, ""); modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, "language:javascript/os:linux/", analysisKey, "");
t.deepEqual(modifiedSarif, expectedSarif); t.deepEqual(modifiedSarif, expectedSarif);
// check that the automation details doesn't get overwritten // check that the automation details doesn't get overwritten
sarif = '{"runs":[{"automationDetails":{"id":"my_id"}}]}'; sarif = { runs: [{ automationDetails: { id: "my_id" } }] };
expectedSarif = '{"runs":[{"automationDetails":{"id":"my_id"}}]}'; expectedSarif = { runs: [{ automationDetails: { id: "my_id" } }] };
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, undefined, analysisKey, '{"os": "linux", "language": "javascript"}');
t.deepEqual(modifiedSarif, expectedSarif);
// check multiple runs
sarif = { runs: [{ automationDetails: { id: "my_id" } }, {}] };
expectedSarif = {
runs: [
{ automationDetails: { id: "my_id" } },
{
automationDetails: {
id: ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/",
},
},
],
};
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, undefined, analysisKey, '{"os": "linux", "language": "javascript"}'); modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, undefined, analysisKey, '{"os": "linux", "language": "javascript"}');
t.deepEqual(modifiedSarif, expectedSarif); t.deepEqual(modifiedSarif, expectedSarif);
}); });
(0, ava_1.default)("validateUniqueCategory", (t) => { (0, ava_1.default)("validateUniqueCategory when empty", (t) => {
t.notThrows(() => uploadLib.validateUniqueCategory(undefined)); t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif()));
t.throws(() => uploadLib.validateUniqueCategory(undefined)); t.throws(() => uploadLib.validateUniqueCategory(createMockSarif()));
t.notThrows(() => uploadLib.validateUniqueCategory("abc")); });
t.throws(() => uploadLib.validateUniqueCategory("abc")); (0, ava_1.default)("validateUniqueCategory for automation details id", (t) => {
t.notThrows(() => uploadLib.validateUniqueCategory("def")); t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc")));
t.throws(() => uploadLib.validateUniqueCategory("def")); t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("AbC")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("def")));
// Our category sanitization is not perfect. Here are some examples // Our category sanitization is not perfect. Here are some examples
// of where we see false clashes // of where we see false clashes
t.notThrows(() => uploadLib.validateUniqueCategory("abc/def")); t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc/def")));
t.throws(() => uploadLib.validateUniqueCategory("abc@def")); t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc@def")));
t.throws(() => uploadLib.validateUniqueCategory("abc_def")); t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc_def")));
t.throws(() => uploadLib.validateUniqueCategory("abc def")); t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc def")));
// this one is fine // this one is fine
t.notThrows(() => uploadLib.validateUniqueCategory("abc_ def")); t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc_ def")));
}); });
(0, ava_1.default)("validateUniqueCategory for tool name", (t) => {
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "AbC")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "def")));
// Our category sanitization is not perfect. Here are some examples
// of where we see false clashes
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc/def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc@def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc_def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc def")));
// this one is fine
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc_ def")));
});
(0, ava_1.default)("validateUniqueCategory for automation details id and tool name", (t) => {
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "abc")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc_", "def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc_", "def")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("ghi", "_jkl")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("ghi", "_jkl")));
// Our category sanitization is not perfect. Here are some examples
// of where we see false clashes
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "_")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "def__")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc_def")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("mno_", "pqr")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("mno", "_pqr")));
});
(0, ava_1.default)("validateUniqueCategory for multiple runs", (t) => {
const sarif1 = createMockSarif("abc", "def");
const sarif2 = createMockSarif("ghi", "jkl");
// duplicate categories are allowed within the same sarif file
const multiSarif = { runs: [sarif1.runs[0], sarif1.runs[0], sarif2.runs[0]] };
t.notThrows(() => uploadLib.validateUniqueCategory(multiSarif));
// should throw if there are duplicate categories in separate validations
t.throws(() => uploadLib.validateUniqueCategory(sarif1));
t.throws(() => uploadLib.validateUniqueCategory(sarif2));
});
function createMockSarif(id, tool) {
return {
runs: [
{
automationDetails: {
id,
},
tool: {
driver: {
name: tool,
},
},
},
],
};
}
//# sourceMappingURL=upload-lib.test.js.map //# sourceMappingURL=upload-lib.test.js.map

File diff suppressed because one or more lines are too long

View File

@@ -48,6 +48,7 @@ async function run() {
}; };
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails); const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)()); const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
core.setOutput("sarif-id", uploadResult.sarifID);
if (actionsUtil.getRequiredInput("wait-for-processing") === "true") { if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)()); await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
} }

View File

@@ -1 +1 @@
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YAClE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"} {"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QACjD,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YAClE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

101
lib/util.js generated
View File

@@ -18,24 +18,37 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod); __setModuleDefault(result, mod);
return result; return result;
}; };
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0; exports.getMlPoweredJsQueriesStatus = exports.ML_POWERED_JS_QUERIES_PACK = exports.checkNotWindows11 = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const os = __importStar(require("os")); const os = __importStar(require("os"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const core = __importStar(require("@actions/core")); const core = __importStar(require("@actions/core"));
const del_1 = __importDefault(require("del"));
const semver = __importStar(require("semver")); const semver = __importStar(require("semver"));
const api_client_1 = require("./api-client"); const api_client_1 = require("./api-client");
const apiCompatibility = __importStar(require("./api-compatibility.json")); const apiCompatibility = __importStar(require("./api-compatibility.json"));
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
/**
* Specifies bundle versions that are known to be broken
* and will not be used if found in the toolcache.
*/
const BROKEN_VERSIONS = ["0.0.0-20211207"];
/** /**
* The URL for github.com. * The URL for github.com.
*/ */
exports.GITHUB_DOTCOM_URL = "https://github.com"; exports.GITHUB_DOTCOM_URL = "https://github.com";
/** /**
* Name of the debugging artifact. * Default name of the debugging artifact.
*/ */
exports.DEBUG_ARTIFACT_NAME = "debug-artifacts"; exports.DEFAULT_DEBUG_ARTIFACT_NAME = "debug-artifacts";
/**
* Default name of the database in the debugging artifact.
*/
exports.DEFAULT_DEBUG_DATABASE_NAME = "db";
/** /**
* Get the extra options for the codeql commands. * Get the extra options for the codeql commands.
*/ */
@@ -59,8 +72,7 @@ exports.getExtraOptionsEnvParam = getExtraOptionsEnvParam;
* *
* Returns an array of unique string tool names. * Returns an array of unique string tool names.
*/ */
function getToolNames(sarifContents) { function getToolNames(sarif) {
const sarif = JSON.parse(sarifContents);
const toolNames = {}; const toolNames = {};
for (const run of sarif.runs || []) { for (const run of sarif.runs || []) {
const tool = run.tool || {}; const tool = run.tool || {};
@@ -81,7 +93,7 @@ async function withTmpDir(body) {
const symlinkSubdir = path.join(tmpDir, "symlink"); const symlinkSubdir = path.join(tmpDir, "symlink");
fs.symlinkSync(realSubdir, symlinkSubdir, "dir"); fs.symlinkSync(realSubdir, symlinkSubdir, "dir");
const result = await body(symlinkSubdir); const result = await body(symlinkSubdir);
fs.rmSync(tmpDir, { recursive: true, force: true }); await (0, del_1.default)(tmpDir, { force: true });
return result; return result;
} }
exports.withTmpDir = withTmpDir; exports.withTmpDir = withTmpDir;
@@ -483,12 +495,18 @@ async function codeQlVersionAbove(codeql, requiredVersion) {
} }
exports.codeQlVersionAbove = codeQlVersionAbove; exports.codeQlVersionAbove = codeQlVersionAbove;
// Create a bundle for the given DB, if it doesn't already exist // Create a bundle for the given DB, if it doesn't already exist
async function bundleDb(config, language, codeql) { async function bundleDb(config, language, codeql, dbName) {
const databasePath = getCodeQLDatabasePath(config, language); const databasePath = getCodeQLDatabasePath(config, language);
const databaseBundlePath = path.resolve(config.dbLocation, `${databasePath}.zip`); const databaseBundlePath = path.resolve(config.dbLocation, `${dbName}.zip`);
if (!fs.existsSync(databaseBundlePath)) { // For a tiny bit of added safety, delete the file if it exists.
await codeql.databaseBundle(databasePath, databaseBundlePath); // The file is probably from an earlier call to this function, either
// as part of this action step or a previous one, but it could also be
// from somewhere else or someone trying to make the action upload a
// non-database file.
if (fs.existsSync(databaseBundlePath)) {
await (0, del_1.default)(databaseBundlePath, { force: true });
} }
await codeql.databaseBundle(databasePath, databaseBundlePath, dbName);
return databaseBundlePath; return databaseBundlePath;
} }
exports.bundleDb = bundleDb; exports.bundleDb = bundleDb;
@@ -496,4 +514,67 @@ async function delay(milliseconds) {
return new Promise((resolve) => setTimeout(resolve, milliseconds)); return new Promise((resolve) => setTimeout(resolve, milliseconds));
} }
exports.delay = delay; exports.delay = delay;
function isGoodVersion(versionSpec) {
return !BROKEN_VERSIONS.includes(versionSpec);
}
exports.isGoodVersion = isGoodVersion;
function checkNotWindows11() {
if (os.platform() === "win32" && semver.gte(os.release(), "10.0.20348")) {
throw new Error("Tracing builds with CodeQL is currently not supported on Windows 11 and Windows Server 2022. Please modify your Actions workflow to use an earlier version of Windows for this job, for example by setting `runs-on: windows-2019`.");
}
}
exports.checkNotWindows11 = checkNotWindows11;
/**
* The ML-powered JS query pack to add to the analysis if a repo is opted into the ML-powered
* queries beta.
*/
exports.ML_POWERED_JS_QUERIES_PACK = {
packName: "codeql/javascript-experimental-atm-queries",
version: "~0.0.2",
};
/**
* Get information about ML-powered JS queries to populate status reports with.
*
* This will be:
*
* - The version string if the analysis is using the ML-powered query pack that will be added to the
* analysis if the repo is opted into the ML-powered queries beta, i.e.
* {@link ML_POWERED_JS_QUERIES_PACK.version}. If the version string
* {@link ML_POWERED_JS_QUERIES_PACK.version} is undefined, then the status report string will be
* "latest", however this shouldn't occur in practice (see comment below).
* - "false" if the analysis won't run any ML-powered JS queries.
* - "other" in all other cases.
*
* Our goal of the status report here is to allow us to compare the occurrence of timeouts and other
* errors with ML-powered queries turned on and off. We also want to be able to compare minor
* version bumps caused by us bumping the version range of `ML_POWERED_JS_QUERIES_PACK` in a new
* version of the CodeQL Action. For instance, we might want to compare the `~0.1.0` and `~0.0.2`
* version strings.
*
* We restrict the set of strings we report here by excluding other version strings and combinations
* of version strings. We do this to limit the cardinality of the ML-powered JS queries status
* report field, since some platforms that ingest this status report bill based on the cardinality
* of its fields.
*
* This function lives here rather than in `init-action.ts` so it's easier to test, since tests for
* `init-action.ts` would each need to live in their own file. See `analyze-action-env.ts` for an
* explanation as to why this is.
*/
function getMlPoweredJsQueriesStatus(config) {
const mlPoweredJsQueryPacks = (config.packs.javascript || []).filter((pack) => pack.packName === exports.ML_POWERED_JS_QUERIES_PACK.packName);
if (mlPoweredJsQueryPacks.length === 0) {
return "false";
}
const firstVersionString = mlPoweredJsQueryPacks[0].version;
if (mlPoweredJsQueryPacks.length === 1 &&
exports.ML_POWERED_JS_QUERIES_PACK.version === firstVersionString) {
// We should always specify an explicit version string in `ML_POWERED_JS_QUERIES_PACK`,
// otherwise we won't be able to make changes to the pack unless those changes are compatible
// with each version of the CodeQL Action. Therefore in practice, we should never hit the
// `latest` case here.
return exports.ML_POWERED_JS_QUERIES_PACK.version || "latest";
}
return "other";
}
exports.getMlPoweredJsQueriesStatus = getMlPoweredJsQueriesStatus;
//# sourceMappingURL=util.js.map //# sourceMappingURL=util.js.map

File diff suppressed because one or more lines are too long

60
lib/util.test.js generated
View File

@@ -35,7 +35,7 @@ const util = __importStar(require("./util"));
(0, testing_utils_1.setupTests)(ava_1.default); (0, testing_utils_1.setupTests)(ava_1.default);
(0, ava_1.default)("getToolNames", (t) => { (0, ava_1.default)("getToolNames", (t) => {
const input = fs.readFileSync(`${__dirname}/../src/testdata/tool-names.sarif`, "utf8"); const input = fs.readFileSync(`${__dirname}/../src/testdata/tool-names.sarif`, "utf8");
const toolNames = util.getToolNames(input); const toolNames = util.getToolNames(JSON.parse(input));
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]); t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
}); });
(0, ava_1.default)("getMemoryFlag() should return the correct --ram flag", (t) => { (0, ava_1.default)("getMemoryFlag() should return the correct --ram flag", (t) => {
@@ -204,4 +204,62 @@ async function mockStdInForAuthExpectError(t, mockLogger, ...text) {
const stdin = stream.Readable.from(text); const stdin = stream.Readable.from(text);
await t.throwsAsync(async () => util.getGitHubAuth(mockLogger, undefined, true, stdin)); await t.throwsAsync(async () => util.getGitHubAuth(mockLogger, undefined, true, stdin));
} }
const ML_POWERED_JS_STATUS_TESTS = [
[[], "false"],
[[{ packName: "someOtherPack" }], "false"],
[
[{ packName: "someOtherPack" }, util.ML_POWERED_JS_QUERIES_PACK],
util.ML_POWERED_JS_QUERIES_PACK.version,
],
[[util.ML_POWERED_JS_QUERIES_PACK], util.ML_POWERED_JS_QUERIES_PACK.version],
[[{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName }], "other"],
[
[{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName, version: "~0.0.1" }],
"other",
],
[
[
{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName, version: "0.0.1" },
{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName, version: "0.0.2" },
],
"other",
],
[
[
{ packName: "someOtherPack" },
{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName },
],
"other",
],
];
for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
const packDescriptions = `[${packs
.map((pack) => JSON.stringify(pack))
.join(", ")}]`;
(0, ava_1.default)(`ML-powered JS queries status report is "${expectedStatus}" for packs = ${packDescriptions}`, (t) => {
return util.withTmpDir(async (tmpDir) => {
const config = {
languages: [],
queries: {},
paths: [],
pathsIgnore: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
gitHubVersion: {
type: util.GitHubVariant.DOTCOM,
},
dbLocation: "",
packs: {
javascript: packs,
},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
};
t.is(util.getMlPoweredJsQueriesStatus(config), expectedStatus);
});
});
}
//# sourceMappingURL=util.test.js.map //# sourceMappingURL=util.test.js.map

File diff suppressed because one or more lines are too long

2
node_modules/.bin/ava generated vendored
View File

@@ -1 +1 @@
../ava/cli.js ../ava/entrypoints/cli.mjs

View File

@@ -1 +0,0 @@
../import-local/fixtures/cli.js

1
node_modules/.bin/is-ci generated vendored
View File

@@ -1 +0,0 @@
../is-ci/bin.js

1
node_modules/.bin/rc generated vendored
View File

@@ -1 +0,0 @@
../rc/cli.js

2143
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
MIT License MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com) Copyright (c) Mark Wubben (https://novemberborn.net)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:

View File

@@ -1,11 +1,21 @@
# @ava/typescript # @ava/typescript
Adds [TypeScript](https://www.typescriptlang.org/) support to [AVA](https://avajs.dev). Adds [TypeScript](https://www.typescriptlang.org/) support to [AVA 4](https://avajs.dev).
This is designed to work for projects that precompile TypeScript. It allows AVA to load the compiled JavaScript, while configuring AVA to treat the TypeScript files as test files. This is designed to work for projects that precompile TypeScript. It allows AVA to load the compiled JavaScript, while configuring AVA to treat the TypeScript files as test files.
In other words, say you have a test file at `src/test.ts`. You've configured TypeScript to output to `build/`. Using `@ava/typescript` you can run the test using `npx ava src/test.ts`. In other words, say you have a test file at `src/test.ts`. You've configured TypeScript to output to `build/`. Using `@ava/typescript` you can run the test using `npx ava src/test.ts`.
## For AVA 3 users
Use version 2:
```console
npm install --save-dev @ava/typescript@2
```
Note that v2 does not support ES modules. This requires v3 and AVA 4.
## Enabling TypeScript support ## Enabling TypeScript support
Add this package to your project: Add this package to your project:
@@ -39,6 +49,10 @@ Output files are expected to have the `.js` extension.
AVA searches your entire project for `*.js`, `*.cjs`, `*.mjs` and `*.ts` files (or other extensions you've configured). It will ignore such files found in the `rewritePaths` targets (e.g. `build/`). If you use more specific paths, for instance `build/main/`, you may need to change AVA's `files` configuration to ignore other directories. AVA searches your entire project for `*.js`, `*.cjs`, `*.mjs` and `*.ts` files (or other extensions you've configured). It will ignore such files found in the `rewritePaths` targets (e.g. `build/`). If you use more specific paths, for instance `build/main/`, you may need to change AVA's `files` configuration to ignore other directories.
## ES Modules
If your `package.json` has configured `"type": "module"`, or you've configured AVA to treat the `js` extension as `module`, then `@ava/typescript` will import the output file as an ES module. Note that this is based on the *output file*, not the `ts` extension.
## Add additional extensions ## Add additional extensions
You can configure AVA to recognize additional file extensions. To add (partial†) JSX support: You can configure AVA to recognize additional file extensions. To add (partial†) JSX support:

View File

@@ -1,9 +1,10 @@
'use strict'; import fs from 'node:fs';
const path = require('path'); import path from 'node:path';
const escapeStringRegexp = require('escape-string-regexp'); import {pathToFileURL} from 'node:url';
const execa = require('execa'); import escapeStringRegexp from 'escape-string-regexp';
const pkg = require('./package.json'); import execa from 'execa';
const pkg = JSON.parse(fs.readFileSync(new URL('package.json', import.meta.url)));
const help = `See https://github.com/avajs/typescript/blob/v${pkg.version}/README.md`; const help = `See https://github.com/avajs/typescript/blob/v${pkg.version}/README.md`;
function isPlainObject(x) { function isPlainObject(x) {
@@ -44,7 +45,7 @@ const configProperties = {
required: true, required: true,
isValid(compile) { isValid(compile) {
return compile === false || compile === 'tsc'; return compile === false || compile === 'tsc';
} },
}, },
rewritePaths: { rewritePaths: {
required: true, required: true,
@@ -53,23 +54,21 @@ const configProperties = {
return false; return false;
} }
return Object.entries(rewritePaths).every(([from, to]) => { return Object.entries(rewritePaths).every(([from, to]) => from.endsWith('/') && typeof to === 'string' && to.endsWith('/'));
return from.endsWith('/') && typeof to === 'string' && to.endsWith('/'); },
});
}
}, },
extensions: { extensions: {
required: false, required: false,
isValid(extensions) { isValid(extensions) {
return Array.isArray(extensions) && return Array.isArray(extensions)
extensions.length > 0 && && extensions.length > 0
extensions.every(ext => typeof ext === 'string' && ext !== '') && && extensions.every(ext => typeof ext === 'string' && ext !== '')
new Set(extensions).size === extensions.length; && new Set(extensions).size === extensions.length;
} },
} },
}; };
module.exports = ({negotiateProtocol}) => { export default function typescriptProvider({negotiateProtocol}) {
const protocol = negotiateProtocol(['ava-3.2'], {version: pkg.version}); const protocol = negotiateProtocol(['ava-3.2'], {version: pkg.version});
if (protocol === null) { if (protocol === null) {
return; return;
@@ -86,12 +85,12 @@ module.exports = ({negotiateProtocol}) => {
const { const {
extensions = ['ts'], extensions = ['ts'],
rewritePaths: relativeRewritePaths, rewritePaths: relativeRewritePaths,
compile compile,
} = config; } = config;
const rewritePaths = Object.entries(relativeRewritePaths).map(([from, to]) => [ const rewritePaths = Object.entries(relativeRewritePaths).map(([from, to]) => [
path.join(protocol.projectDir, from), path.join(protocol.projectDir, from),
path.join(protocol.projectDir, to) path.join(protocol.projectDir, to),
]); ]);
const testFileExtension = new RegExp(`\\.(${extensions.map(ext => escapeStringRegexp(ext)).join('|')})$`); const testFileExtension = new RegExp(`\\.(${extensions.map(ext => escapeStringRegexp(ext)).join('|')})$`);
@@ -102,13 +101,13 @@ module.exports = ({negotiateProtocol}) => {
} }
return { return {
extensions: extensions.slice(), extensions: [...extensions],
rewritePaths: rewritePaths.slice() rewritePaths: [...rewritePaths],
}; };
}, },
get extensions() { get extensions() {
return extensions.slice(); return [...extensions];
}, },
ignoreChange(filePath) { ignoreChange(filePath) {
@@ -139,18 +138,19 @@ module.exports = ({negotiateProtocol}) => {
filePatterns: [ filePatterns: [
...filePatterns, ...filePatterns,
'!**/*.d.ts', '!**/*.d.ts',
...Object.values(relativeRewritePaths).map(to => `!${to}**`) ...Object.values(relativeRewritePaths).map(to => `!${to}**`),
], ],
ignoredByWatcherPatterns: [ ignoredByWatcherPatterns: [
...ignoredByWatcherPatterns, ...ignoredByWatcherPatterns,
...Object.values(relativeRewritePaths).map(to => `${to}**/*.js.map`) ...Object.values(relativeRewritePaths).map(to => `${to}**/*.js.map`),
] ],
}; };
} },
}; };
}, },
worker({extensionsToLoadAsModules, state: {extensions, rewritePaths}}) { worker({extensionsToLoadAsModules, state: {extensions, rewritePaths}}) {
const useImport = extensionsToLoadAsModules.includes('js');
const testFileExtension = new RegExp(`\\.(${extensions.map(ext => escapeStringRegexp(ext)).join('|')})$`); const testFileExtension = new RegExp(`\\.(${extensions.map(ext => escapeStringRegexp(ext)).join('|')})$`);
return { return {
@@ -159,18 +159,12 @@ module.exports = ({negotiateProtocol}) => {
}, },
async load(ref, {requireFn}) { async load(ref, {requireFn}) {
for (const extension of extensionsToLoadAsModules) {
if (ref.endsWith(`.${extension}`)) {
throw new Error('@ava/typescript cannot yet load ESM files');
}
}
const [from, to] = rewritePaths.find(([from]) => ref.startsWith(from)); const [from, to] = rewritePaths.find(([from]) => ref.startsWith(from));
// TODO: Support JSX preserve mode — https://www.typescriptlang.org/docs/handbook/jsx.html // TODO: Support JSX preserve mode — https://www.typescriptlang.org/docs/handbook/jsx.html
const rewritten = `${to}${ref.slice(from.length)}`.replace(testFileExtension, '.js'); const rewritten = `${to}${ref.slice(from.length)}`.replace(testFileExtension, '.js');
return requireFn(rewritten); return useImport ? import(pathToFileURL(rewritten)) : requireFn(rewritten); // eslint-disable-line node/no-unsupported-features/es-syntax
} },
}; };
} },
}; };
}; }

View File

@@ -5,7 +5,7 @@ You can also use this to escape a string that is inserted into the middle of a r
@example @example
``` ```
import escapeStringRegexp = require('escape-string-regexp'); import escapeStringRegexp from 'escape-string-regexp';
const escapedString = escapeStringRegexp('How much $ for a 🦄?'); const escapedString = escapeStringRegexp('How much $ for a 🦄?');
//=> 'How much \\$ for a 🦄\\?' //=> 'How much \\$ for a 🦄\\?'
@@ -13,6 +13,4 @@ const escapedString = escapeStringRegexp('How much $ for a 🦄?');
new RegExp(escapedString); new RegExp(escapedString);
``` ```
*/ */
declare const escapeStringRegexp: (string: string) => string; export default function escapeStringRegexp(string: string): string;
export = escapeStringRegexp;

View File

@@ -1,13 +1,11 @@
'use strict'; export default function escapeStringRegexp(string) {
module.exports = string => {
if (typeof string !== 'string') { if (typeof string !== 'string') {
throw new TypeError('Expected a string'); throw new TypeError('Expected a string');
} }
// Escape characters with special meaning either inside or outside character sets. // Escape characters with special meaning either inside or outside character sets.
// Use a simple backslash escape when its always valid, and a \unnnn escape when the simpler form would be disallowed by Unicode patterns stricter grammar. // Use a simple backslash escape when its always valid, and a `\xnn` escape when the simpler form would be disallowed by Unicode patterns stricter grammar.
return string return string
.replace(/[|\\{}()[\]^$+*?.]/g, '\\$&') .replace(/[|\\{}()[\]^$+*?.]/g, '\\$&')
.replace(/-/g, '\\x2d'); .replace(/-/g, '\\x2d');
}; }

View File

@@ -1,6 +1,6 @@
{ {
"name": "escape-string-regexp", "name": "escape-string-regexp",
"version": "4.0.0", "version": "5.0.0",
"description": "Escape RegExp special characters", "description": "Escape RegExp special characters",
"license": "MIT", "license": "MIT",
"repository": "sindresorhus/escape-string-regexp", "repository": "sindresorhus/escape-string-regexp",
@@ -10,8 +10,10 @@
"email": "sindresorhus@gmail.com", "email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com" "url": "https://sindresorhus.com"
}, },
"type": "module",
"exports": "./index.js",
"engines": { "engines": {
"node": ">=10" "node": ">=12"
}, },
"scripts": { "scripts": {
"test": "xo && ava && tsd" "test": "xo && ava && tsd"
@@ -31,8 +33,8 @@
"characters" "characters"
], ],
"devDependencies": { "devDependencies": {
"ava": "^1.4.1", "ava": "^3.15.0",
"tsd": "^0.11.0", "tsd": "^0.14.0",
"xo": "^0.28.3" "xo": "^0.38.2"
} }
} }

View File

@@ -1,4 +1,4 @@
# escape-string-regexp [![Build Status](https://travis-ci.org/sindresorhus/escape-string-regexp.svg?branch=master)](https://travis-ci.org/sindresorhus/escape-string-regexp) # escape-string-regexp
> Escape RegExp special characters > Escape RegExp special characters
@@ -11,7 +11,7 @@ $ npm install escape-string-regexp
## Usage ## Usage
```js ```js
const escapeStringRegexp = require('escape-string-regexp'); import escapeStringRegexp from 'escape-string-regexp';
const escapedString = escapeStringRegexp('How much $ for a 🦄?'); const escapedString = escapeStringRegexp('How much $ for a 🦄?');
//=> 'How much \\$ for a 🦄\\?' //=> 'How much \\$ for a 🦄\\?'

View File

@@ -1,13 +1,17 @@
{ {
"name": "@ava/typescript", "name": "@ava/typescript",
"version": "2.0.0", "version": "3.0.1",
"description": "TypeScript provider for AVA", "description": "TypeScript provider for AVA",
"engines": { "engines": {
"node": ">=12.22 <13 || >=14.16 <15 || >=15" "node": ">=12.22 <13 || >=14.17 <15 || >=16.4 <17 || >=17"
}, },
"files": [ "files": [
"index.js" "index.js"
], ],
"exports": {
".": "./index.js"
},
"type": "module",
"author": "Mark Wubben (https://novemberborn.net)", "author": "Mark Wubben (https://novemberborn.net)",
"repository": "avajs/typescript", "repository": "avajs/typescript",
"license": "MIT", "license": "MIT",
@@ -19,15 +23,15 @@
"test": "xo && c8 ava" "test": "xo && c8 ava"
}, },
"dependencies": { "dependencies": {
"escape-string-regexp": "^4.0.0", "escape-string-regexp": "^5.0.0",
"execa": "^5.0.0" "execa": "^5.1.1"
}, },
"devDependencies": { "devDependencies": {
"ava": "^3.15.0", "ava": "4.0.0-rc.1",
"c8": "^7.7.1", "c8": "^7.10.0",
"del": "^6.0.0", "del": "^6.0.0",
"typescript": "^4.2.4", "typescript": "^4.4.4",
"xo": "^0.38.2" "xo": "^0.46.3"
}, },
"c8": { "c8": {
"reporter": [ "reporter": [
@@ -40,14 +44,15 @@
"files": [ "files": [
"!test/broken-fixtures/**" "!test/broken-fixtures/**"
], ],
"ignoredByWatcher": [
"test/fixtures/**",
"test/broken-fixtures/**"
],
"timeout": "60s" "timeout": "60s"
}, },
"xo": { "xo": {
"ignores": [ "ignores": [
"test/broken-fixtures" "test/broken-fixtures"
], ]
"rules": {
"import/order": "off"
}
} }
} }

View File

@@ -1,14 +0,0 @@
ISC License (ISC)
Copyright (c) 2017, Mark Wubben <mark@novemberborn.net> (novemberborn.net)
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.

View File

@@ -1,18 +0,0 @@
# @concordance/react
React plugin for [Concordance](https://github.com/concordancejs/concordance).
Allows
[`React.createElement()`](https://facebook.github.io/react/docs/react-api.html#createelement)
objects to be compared, formatted, diffed and serialized. Also supports
`toJSON()` renderings of
[`react-test-renderer`](https://www.npmjs.com/package/react-test-renderer).
These may be compared to `React.createElement()` objects.
When comparing [React
component](https://facebook.github.io/react/docs/components-and-props.html)
elements, the element type is compared by identity. After deserialization the
element types are compared by function name.
Component elements are formatted with a &#x235F; character after the element
name. Properties and children are formatted by [Concordance](https://github.com/concordancejs/concordance).

View File

@@ -1,75 +0,0 @@
'use strict'
const pkg = require('./package.json')
const elementFactory = require('./lib/elementFactory')
const testJsonFactory = require('./lib/testJsonFactory')
// Must be unique across all registered plugins.
exports.name = pkg.name
// Expected API version to be passed to register().
exports.apiVersion = 1
// Expected minimal version of Concordance. Concordance will increment its API
// version for breaking changes, this is useful if you rely on features or
// patches that were introduced in a specific version of Concordance.
exports.minimalConcordanceVersion = '1.0.0'
// Plugin-specific version of its serialization output.
exports.serializerVersion = 2
exports.theme = {
react: {
functionType: '\u235F',
openTag: {
start: '<',
end: '>',
selfClose: '/',
selfCloseVoid: ' /'
},
closeTag: {
open: '</',
close: '>'
},
tagName: {open: '', close: ''},
attribute: {
separator: '=',
value: {
openBracket: '{',
closeBracket: '}',
string: {
line: {open: '"', close: '"', escapeQuote: '"'}
}
}
},
child: {
openBracket: '{',
closeBracket: '}',
string: {
line: {open: '', close: '', escapeQuote: ''},
multiline: {start: '', end: '', escapeQuote: ''}
}
}
}
}
const ELEMENT = Symbol.for('react.element')
const TEST_JSON = Symbol.for('react.test.json')
function register (api) {
const reactTags = new Set()
const element = elementFactory(api, reactTags)
const testJson = testJsonFactory(api, element)
api.addDescriptor(0x01, element.tag, element.deserialize)
api.addDescriptor(0x02, testJson.tag, testJson.deserialize)
reactTags.add(element.tag).add(testJson.tag)
return value => {
if (value.$$typeof === ELEMENT) return element.describe
if (value.$$typeof === TEST_JSON) return testJson.describe
return null
}
}
exports.register = register

View File

@@ -1,239 +0,0 @@
'use strict'
function diffShallow (api, actual, expected, theme, indent) {
const childBuffer = api.lineBuilder.buffer()
const propertyBuffer = api.lineBuilder.buffer()
return {
append (formatted, origin) {
if (origin.isItem === true) {
childBuffer.append(formatted)
} else {
propertyBuffer.append(formatted)
}
},
finalize: () => {
const namesAreEqual = actual.compareNames(expected)
const actualName = actual.formatName(theme)
const expectedName = expected.formatName(theme)
const openTag = theme.react.openTag
const innerIndentation = indent.increase()
const allChildren = childBuffer.withFirstPrefixed(innerIndentation)
const children = allChildren.decompose()
const allProperties = propertyBuffer.withFirstPrefixed(innerIndentation)
const properties = allProperties.decompose()
// If the first properties are also the last, and either side has no
// children, ensure the properties are treated as being last. This
// leads to a better balanced diff.
if (properties.remaining.isEmpty && (!actual.hasChildren || !expected.hasChildren)) {
properties.last = properties.first
properties.first = {actual: api.lineBuilder.buffer(), expected: api.lineBuilder.buffer()}
}
const result = api.lineBuilder.buffer()
// Create a custom diff that is as neat as possible. It's likely
// there's a generic algorithm that can be used, but for expediency's
// sake handles all possible diffs by brute force instead.
if (actual.hasProperties && expected.hasProperties) {
if (namesAreEqual) {
result
.append(api.lineBuilder.first(openTag.start + actualName))
.append(properties.first.actual.stripFlags())
.append(properties.first.expected.stripFlags())
} else {
result
.append(api.lineBuilder.actual.first(openTag.start + actualName))
.append(properties.first.actual.stripFlags())
.append(api.lineBuilder.expected.first(openTag.start + expectedName))
.append(properties.first.expected.stripFlags())
}
result.append(properties.remaining.stripFlags())
if (actual.hasChildren && expected.hasChildren) {
result
.append(properties.last.actual.stripFlags())
.append(properties.last.expected.stripFlags())
.append(api.lineBuilder.line(indent + openTag.end))
if (namesAreEqual) {
result
.append(allChildren.stripFlags())
.append(api.lineBuilder.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
} else {
result
.append(children.first.actual.stripFlags())
.append(children.first.expected.stripFlags())
.append(children.remaining.stripFlags())
.append(children.last.actual.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(children.last.expected.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
}
} else if (actual.hasChildren) {
result
.append(properties.last.actual.stripFlags())
.append(api.lineBuilder.actual.line(indent + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(properties.last.expected.stripFlags())
.append(api.lineBuilder.expected.last(indent + openTag.selfClose + openTag.end))
} else if (expected.hasChildren) {
result
.append(properties.last.actual.stripFlags())
.append(api.lineBuilder.actual.last(indent + openTag.selfClose + openTag.end))
.append(properties.last.expected.stripFlags())
.append(api.lineBuilder.expected.line(indent + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
} else {
result
.append(properties.last.actual.stripFlags())
.append(properties.last.expected.stripFlags())
.append(api.lineBuilder.last(indent + openTag.selfClose + openTag.end))
}
} else if (actual.hasProperties) {
result
.append(api.lineBuilder.actual.first(openTag.start + actualName))
.append(allProperties.stripFlags())
if (actual.hasChildren && expected.hasChildren) {
result
.append(api.lineBuilder.actual.line(indent + openTag.end))
.append(children.first.actual.stripFlags())
.append(api.lineBuilder.expected.first(openTag.start + expectedName + openTag.end))
.append(children.first.expected.stripFlags())
.append(children.remaining.stripFlags())
if (namesAreEqual) {
result
.append(children.last.actual.stripFlags())
.append(children.last.expected.stripFlags())
.append(api.lineBuilder.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
} else {
result
.append(children.last.actual.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(children.last.expected.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
}
} else if (actual.hasChildren) {
result
.append(api.lineBuilder.actual.last(indent + openTag.selfClose + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(api.lineBuilder.expected.single(openTag.start + expectedName + openTag.selfCloseVoid + openTag.end))
} else if (expected.hasChildren) {
result
.append(api.lineBuilder.actual.last(indent + openTag.selfClose + openTag.end))
.append(api.lineBuilder.expected.first(openTag.start + expectedName + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
} else {
result
.append(api.lineBuilder.actual.last(indent + openTag.selfClose + openTag.end))
.append(api.lineBuilder.expected.single(openTag.start + expectedName + openTag.selfCloseVoid + openTag.end))
}
} else if (expected.hasProperties) {
if (actual.hasChildren && expected.hasChildren) {
result
.append(api.lineBuilder.actual.first(openTag.start + actualName + openTag.end))
.append(children.first.actual.stripFlags())
.append(api.lineBuilder.expected.first(openTag.start + expectedName))
.append(allProperties.stripFlags())
.append(api.lineBuilder.expected.line(indent + openTag.end))
.append(children.first.expected.stripFlags())
.append(children.remaining.stripFlags())
if (namesAreEqual) {
result
.append(children.last.actual.stripFlags())
.append(children.last.expected.stripFlags())
.append(api.lineBuilder.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
} else {
result
.append(children.last.actual.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(children.last.expected.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
}
} else if (actual.hasChildren) {
result
.append(api.lineBuilder.actual.first(openTag.start + actualName + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(api.lineBuilder.expected.first(openTag.start + expectedName))
.append(allProperties.stripFlags())
.append(api.lineBuilder.expected.last(indent + openTag.selfClose + openTag.end))
} else if (expected.hasChildren) {
result
.append(api.lineBuilder.actual.single(openTag.start + actualName + openTag.selfCloseVoid + openTag.end))
.append(api.lineBuilder.expected.first(openTag.start + expectedName))
.append(allProperties.stripFlags())
.append(api.lineBuilder.expected.line(indent + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
} else {
result
.append(api.lineBuilder.actual.single(openTag.start + actualName + openTag.selfCloseVoid + openTag.end))
.append(api.lineBuilder.expected.first(openTag.start + expectedName))
.append(allProperties.stripFlags())
.append(api.lineBuilder.expected.last(indent + openTag.selfCloseVoid + openTag.end))
}
} else {
if (actual.hasChildren && expected.hasChildren) {
if (namesAreEqual) {
result
.append(api.lineBuilder.first(openTag.start + actualName + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
} else {
result
.append(api.lineBuilder.actual.first(openTag.start + actualName + openTag.end))
.append(children.first.actual.stripFlags())
.append(api.lineBuilder.expected.first(openTag.start + expectedName + openTag.end))
.append(children.first.expected.stripFlags())
.append(children.remaining.stripFlags())
.append(children.last.actual.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(children.last.expected.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
}
} else if (actual.hasChildren) {
result
.append(api.lineBuilder.actual.first(openTag.start + actualName + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(api.lineBuilder.expected.single(openTag.start + expectedName + openTag.selfCloseVoid + openTag.end))
} else if (expected.hasChildren) {
result
.append(api.lineBuilder.actual.single(openTag.start + actualName + openTag.selfCloseVoid + openTag.end))
.append(api.lineBuilder.expected.first(openTag.start + expectedName + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
} else {
if (namesAreEqual) {
result.append(api.lineBuilder.single(openTag.start + actualName + openTag.selfCloseVoid + openTag.end))
} else {
result
.append(api.lineBuilder.actual.single(openTag.start + actualName + openTag.selfCloseVoid + openTag.end))
.append(api.lineBuilder.expected.single(openTag.start + expectedName + openTag.selfCloseVoid + openTag.end))
}
}
}
return result
},
shouldFormat (subject) {
return subject.isItem === true || subject.isProperty === true
},
increaseIndent: true
}
}
module.exports = diffShallow

View File

@@ -1,353 +0,0 @@
'use strict'
const arrify = require('arrify')
const diffShallow = require('./diffShallow')
const escapeText = require('./escapeText')
const FRAGMENT_NAME = Symbol.for('react.fragment')
function factory (api, reactTags) {
const tag = Symbol('@concordance/react.ElementValue')
function customPropertyFormatter (theme, indent, key, value) {
const separator = theme.react.attribute.separator + theme.react.attribute.value.openBracket
if (value.isSingle) {
return value
.withFirstPrefixed(key.formatAsKey(theme) + separator)
.withLastPostfixed(theme.react.attribute.value.closeBracket)
}
return api.lineBuilder.first(key.formatAsKey(theme) + separator)
.concat(value.withFirstPrefixed(indent.increase()).stripFlags())
.append(api.lineBuilder.last(indent + theme.react.attribute.value.closeBracket))
}
function themeProperty (theme) {
theme.property.increaseValueIndent = true
theme.property.customFormat = customPropertyFormatter
}
function themeStringProperty (theme) {
theme.property.separator = theme.react.attribute.separator
theme.property.after = ''
Object.assign(theme.string.line, theme.react.attribute.value.string.line)
}
function customItemFormatter (theme, indent, value) {
if (value.isSingle) {
return value
.withFirstPrefixed(theme.react.child.openBracket)
.withLastPostfixed(theme.react.child.closeBracket)
}
return api.lineBuilder.first(theme.react.child.openBracket)
.concat(value.withFirstPrefixed(indent.increase()).stripFlags())
.append(api.lineBuilder.last(indent + theme.react.child.closeBracket))
}
function themeChild (theme) {
theme.item.increaseValueIndent = true
theme.item.customFormat = customItemFormatter
}
function themeReactChild (theme) {
theme.item.after = ''
}
function themeStringChild (theme) {
theme.item.after = ''
Object.assign(theme.string, theme.react.child.string)
}
function describe (props) {
const element = props.value
const type = element.type
const hasTypeFn = typeof type === 'function'
const typeFn = hasTypeFn ? type : null
const name = hasTypeFn ? type.displayName || type.name : type
const children = arrify(element.props.children)
const properties = Object.assign({}, element.props)
delete properties.children
if (element.key !== null) {
properties.key = element.key
}
const hasProperties = Object.keys(properties).length > 0
return new DescribedElementValue(Object.assign({
children,
hasProperties,
hasTypeFn,
name,
properties,
typeFn,
isList: children.length > 0
}, props))
}
function deserialize (state, recursor) {
return new DeserializedElementValue(state, recursor)
}
class ElementValue extends api.ObjectValue {
constructor (props) {
super(props)
this.isFragment = props.name === FRAGMENT_NAME
this.name = props.name
this.hasProperties = props.hasProperties
this.hasTypeFn = props.hasTypeFn
this.hasChildren = this.isList
}
compare (expected) {
return this.tag === expected.tag && this.name === expected.name
? api.SHALLOW_EQUAL
: api.UNEQUAL
}
formatName (theme) {
const formatted = api.wrapFromTheme(theme.react.tagName, this.isFragment ? 'React.Fragment' : this.name)
return this.hasTypeFn
? formatted + theme.react.functionType
: formatted
}
compareNames (expected) {
return this.name === expected.name && this.hasTypeFn === expected.hasTypeFn
}
formatShallow (theme, indent) {
const childBuffer = api.lineBuilder.buffer()
const propertyBuffer = api.lineBuilder.buffer()
return {
append (formatted, origin) {
if (origin.isItem === true) {
childBuffer.append(formatted)
} else {
propertyBuffer.append(formatted)
}
},
finalize: () => {
const name = this.formatName(theme)
const openTag = theme.react.openTag
if (!this.hasChildren && !this.hasProperties) {
return api.lineBuilder.single(openTag.start + name + openTag.selfCloseVoid + openTag.end)
}
const innerIndentation = indent.increase()
const children = childBuffer.withFirstPrefixed(innerIndentation).stripFlags()
const properties = propertyBuffer.withFirstPrefixed(innerIndentation).stripFlags()
const result = api.lineBuilder.buffer()
if (this.hasProperties) {
result
.append(api.lineBuilder.first(openTag.start + name))
.append(properties)
if (this.hasChildren) {
result.append(api.lineBuilder.line(indent + openTag.end))
} else {
result.append(api.lineBuilder.last(indent + openTag.selfClose + openTag.end))
}
} else {
result.append(api.lineBuilder.first(openTag.start + name + openTag.end))
}
if (this.hasChildren) {
result
.append(children)
.append(api.lineBuilder.last(indent + api.wrapFromTheme(theme.react.closeTag, name)))
}
return result
},
maxDepth: () => {
const name = this.formatName(theme)
const openTag = theme.react.openTag
if (!this.hasChildren && !this.hasProperties) {
return api.lineBuilder.single(openTag.start + name + openTag.selfCloseVoid + openTag.end)
}
let str = openTag.start + name
if (this.hasProperties) {
str += theme.maxDepth
if (this.hasChildren) {
str += openTag.end
} else {
str += ' ' + openTag.selfClose + openTag.end
}
} else {
str += openTag.end
}
if (this.hasChildren) {
str += theme.maxDepth + api.wrapFromTheme(theme.react.closeTag, name)
}
return api.lineBuilder.single(str)
},
shouldFormat (subject) {
return subject.isItem === true || subject.isProperty === true
},
increaseIndent: true
}
}
prepareDiff (expected) {
return {
compareResult: this.tag === expected.tag
? api.SHALLOW_EQUAL
: api.UNEQUAL
}
}
diffShallow (expected, theme, indent) {
return diffShallow(api, this, expected, theme, indent)
}
serialize () {
return [this.isFragment, this.isFragment ? null : this.name, this.hasProperties, this.hasTypeFn, super.serialize()]
}
}
Object.defineProperty(ElementValue.prototype, 'tag', {value: tag})
function modifyThemes (recursor) {
return api.mapRecursor(recursor, next => {
let modifier
if (next.isItem === true) {
if (next.tag === api.descriptorTags.primitiveItem && next.value.tag === api.descriptorTags.string) {
modifier = themeStringChild
} else if (next.tag === api.descriptorTags.complexItem && reactTags.has(next.value.tag)) {
modifier = themeReactChild
} else {
modifier = themeChild
}
} else if (next.isProperty === true) {
if (
next.tag === api.descriptorTags.primitiveProperty &&
next.value.tag === api.descriptorTags.string &&
!next.value.includesLinebreaks
) {
modifier = themeStringProperty
} else {
modifier = themeProperty
}
}
return modifier
? api.modifyTheme(next, modifier)
: next
})
}
function DescribedMixin (base) {
return class extends api.DescribedMixin(base) {
constructor (props) {
super(props)
this.children = props.children
this.properties = props.properties
this.typeFn = props.typeFn
}
compare (expected) {
const result = super.compare(expected)
return result === api.SHALLOW_EQUAL && this.typeFn !== expected.typeFn
? api.UNEQUAL
: result
}
compareNames (expected) {
return super.compareNames(expected) && this.typeFn === expected.typeFn
}
createPropertyRecursor () {
// Symbols are not valid property keys for React elements. This code
// also assumes that the keys can be formatted as JSX-like attribute
// names. Keys are not pre-escaped before being passed to Concordance's
// property descriptor.
const keys = Object.keys(this.properties).sort()
const size = keys.length
let index = 0
const next = () => {
if (index === size) return null
const key = keys[index++]
// Note that string values are not specifically escaped such that the
// output is valid JSX.
return this.describeProperty(key, this.describeAny(this.properties[key]))
}
return {size, next}
}
createListRecursor () {
if (!this.isList) return super.createListRecursor()
const size = this.children.length
let index = 0
const next = () => {
if (index === size) return null
const current = index++
const child = this.children[current]
const type = typeof child
let descriptor
if (type === 'string') {
descriptor = this.describeAny(escapeText(child))
} else {
descriptor = this.describeAny(child)
}
return this.describeItem(current, descriptor)
}
return {size, next}
}
createRecursor () {
return modifyThemes(super.createRecursor())
}
}
}
function DeserializedMixin (base) {
return class extends api.DeserializedMixin(base) {
constructor (state, recursor) {
super(state[4], recursor)
this.isFragment = state[0]
this.name = this.isFragment ? FRAGMENT_NAME : state[1]
this.hasProperties = state[2]
this.hasTypeFn = state[3]
}
createRecursor () {
return modifyThemes(super.createRecursor())
}
}
}
const DescribedElementValue = DescribedMixin(ElementValue)
const DeserializedElementValue = DeserializedMixin(ElementValue)
return {
DescribedMixin,
DeserializedMixin,
ElementValue,
describe,
deserialize,
tag
}
}
module.exports = factory

View File

@@ -1,10 +0,0 @@
'use strict'
function escapeText (text) {
return text
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
// TODO: Escape characters that Concordance would otherwise replace with \u
// sequences.
}
module.exports = escapeText

View File

@@ -1,59 +0,0 @@
'use strict'
const arrify = require('arrify')
function factory (api, element) {
const tag = Symbol('@concordance/react.TestJsonValue')
function describe (props) {
const obj = props.value
const name = obj.type
const children = arrify(obj.children)
const properties = Object.assign({}, obj.props)
const hasProperties = Object.keys(properties).length > 0
return new DescribedTestJsonValue(Object.assign({
children,
hasProperties,
hasTypeFn: false,
name,
properties,
typeFn: null,
isList: children.length > 0
}, props))
}
function deserialize (state, recursor) {
return new DeserializedTestJsonValue(state, recursor)
}
class TestJsonValue extends element.ElementValue {
compare (expected) {
// Allow expected value to be a React element.
return (this.tag === expected.tag || expected.tag === element.tag) && this.name === expected.name
? api.SHALLOW_EQUAL
: api.UNEQUAL
}
prepareDiff (expected) {
return {
// Allow expected value to be a React element.
compareResult: this.tag === expected.tag || expected.tag === element.tag
? api.SHALLOW_EQUAL
: api.UNEQUAL
}
}
}
Object.defineProperty(TestJsonValue.prototype, 'tag', {value: tag})
const DescribedTestJsonValue = element.DescribedMixin(TestJsonValue)
const DeserializedTestJsonValue = element.DeserializedMixin(TestJsonValue)
return {
describe,
deserialize,
tag
}
}
module.exports = factory

View File

@@ -1,8 +0,0 @@
'use strict';
module.exports = function (val) {
if (val === null || val === undefined) {
return [];
}
return Array.isArray(val) ? val : [val];
};

View File

@@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@@ -1,36 +0,0 @@
# arrify [![Build Status](https://travis-ci.org/sindresorhus/arrify.svg?branch=master)](https://travis-ci.org/sindresorhus/arrify)
> Convert a value to an array
## Install
```
$ npm install --save arrify
```
## Usage
```js
const arrify = require('arrify');
arrify('unicorn');
//=> ['unicorn']
arrify(['unicorn']);
//=> ['unicorn']
arrify(null);
//=> []
arrify(undefined);
//=> []
```
*Supplying `null` or `undefined` results in an empty array.*
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)

View File

@@ -1,75 +0,0 @@
{
"name": "@concordance/react",
"version": "2.0.0",
"description": "Compare, format, diff and serialize React trees with Concordance",
"main": "index.js",
"files": [
"lib",
"index.js"
],
"engines": {
"node": ">=6.12.3 <7 || >=8.9.4 <9 || >=10.0.0"
},
"scripts": {
"build:fixtures": "babel --presets=module:@babel/preset-react,module:ava/stage-4 --out-dir=test/fixtures/react --extensions=.jsx test/fixtures/react",
"lint": "as-i-preach",
"pretest": "npm run -s build:fixtures",
"test": "npm run -s lint && nyc ava"
},
"repository": {
"type": "git",
"url": "git+https://github.com/concordancejs/react.git"
},
"author": "Mark Wubben (https://novemberborn.net/)",
"license": "ISC",
"bugs": {
"url": "https://github.com/concordancejs/react/issues"
},
"homepage": "https://github.com/concordancejs/react#readme",
"keywords": [
"concordance-plugin",
"concordance",
"react"
],
"dependencies": {
"arrify": "^1.0.1"
},
"devDependencies": {
"@babel/cli": "^7.1.0",
"@babel/core": "^7.1.0",
"@babel/preset-react": "^7.0.0",
"@novemberborn/as-i-preach": "^10.1.0",
"ava": "1.0.0-beta.8",
"codecov": "^3.1.0",
"concordance": "^4.0.0",
"nyc": "^13.0.1",
"react": "^16.5.2",
"react-test-renderer": "^16.5.2"
},
"as-i-preach": {
"allowDevDependencies": [
"test/**/*.js",
"test/**/*.jsx"
],
"ignore": [
"test/fixtures/react/*.js"
]
},
"ava": {
"babel": {
"testOptions": {
"presets": [
"module:@babel/preset-react"
]
}
}
},
"nyc": {
"reporter": [
"html",
"lcov",
"text"
]
},
"standard-engine": "@novemberborn/as-i-preach"
}

View File

@@ -1,132 +0,0 @@
/// <reference types="node" />
/// <reference lib="es2016" />
/// <reference lib="es2017.sharedmemory" />
/// <reference lib="esnext.asynciterable" />
/// <reference lib="dom" />
declare type TypedArray = Int8Array | Uint8Array | Uint8ClampedArray | Int16Array | Uint16Array | Int32Array | Uint32Array | Float32Array | Float64Array;
declare type Primitive = null | undefined | string | number | boolean | Symbol;
export interface ArrayLike {
length: number;
}
export interface Class<T = unknown> {
new (...args: any[]): T;
}
declare type DomElement = object & {
nodeType: 1;
nodeName: string;
};
declare type NodeStream = object & {
pipe: Function;
};
export declare const enum TypeName {
null = "null",
boolean = "boolean",
undefined = "undefined",
string = "string",
number = "number",
symbol = "symbol",
Function = "Function",
GeneratorFunction = "GeneratorFunction",
AsyncFunction = "AsyncFunction",
Observable = "Observable",
Array = "Array",
Buffer = "Buffer",
Object = "Object",
RegExp = "RegExp",
Date = "Date",
Error = "Error",
Map = "Map",
Set = "Set",
WeakMap = "WeakMap",
WeakSet = "WeakSet",
Int8Array = "Int8Array",
Uint8Array = "Uint8Array",
Uint8ClampedArray = "Uint8ClampedArray",
Int16Array = "Int16Array",
Uint16Array = "Uint16Array",
Int32Array = "Int32Array",
Uint32Array = "Uint32Array",
Float32Array = "Float32Array",
Float64Array = "Float64Array",
ArrayBuffer = "ArrayBuffer",
SharedArrayBuffer = "SharedArrayBuffer",
DataView = "DataView",
Promise = "Promise",
URL = "URL"
}
declare function is(value: unknown): TypeName;
declare namespace is {
const undefined: (value: unknown) => value is undefined;
const string: (value: unknown) => value is string;
const number: (value: unknown) => value is number;
const function_: (value: unknown) => value is Function;
const null_: (value: unknown) => value is null;
const class_: (value: unknown) => value is Class<unknown>;
const boolean: (value: unknown) => value is boolean;
const symbol: (value: unknown) => value is Symbol;
const numericString: (value: unknown) => boolean;
const array: (arg: any) => arg is any[];
const buffer: (input: unknown) => input is Buffer;
const nullOrUndefined: (value: unknown) => value is null | undefined;
const object: (value: unknown) => value is object;
const iterable: (value: unknown) => value is IterableIterator<unknown>;
const asyncIterable: (value: unknown) => value is AsyncIterableIterator<unknown>;
const generator: (value: unknown) => value is Generator;
const nativePromise: (value: unknown) => value is Promise<unknown>;
const promise: (value: unknown) => value is Promise<unknown>;
const generatorFunction: (value: unknown) => value is GeneratorFunction;
const asyncFunction: (value: unknown) => value is Function;
const boundFunction: (value: unknown) => value is Function;
const regExp: (value: unknown) => value is RegExp;
const date: (value: unknown) => value is Date;
const error: (value: unknown) => value is Error;
const map: (value: unknown) => value is Map<unknown, unknown>;
const set: (value: unknown) => value is Set<unknown>;
const weakMap: (value: unknown) => value is WeakMap<object, unknown>;
const weakSet: (value: unknown) => value is WeakSet<object>;
const int8Array: (value: unknown) => value is Int8Array;
const uint8Array: (value: unknown) => value is Uint8Array;
const uint8ClampedArray: (value: unknown) => value is Uint8ClampedArray;
const int16Array: (value: unknown) => value is Int16Array;
const uint16Array: (value: unknown) => value is Uint16Array;
const int32Array: (value: unknown) => value is Int32Array;
const uint32Array: (value: unknown) => value is Uint32Array;
const float32Array: (value: unknown) => value is Float32Array;
const float64Array: (value: unknown) => value is Float64Array;
const arrayBuffer: (value: unknown) => value is ArrayBuffer;
const sharedArrayBuffer: (value: unknown) => value is SharedArrayBuffer;
const dataView: (value: unknown) => value is DataView;
const directInstanceOf: <T>(instance: unknown, klass: Class<T>) => instance is T;
const urlInstance: (value: unknown) => value is URL;
const urlString: (value: unknown) => boolean;
const truthy: (value: unknown) => boolean;
const falsy: (value: unknown) => boolean;
const nan: (value: unknown) => boolean;
const primitive: (value: unknown) => value is Primitive;
const integer: (value: unknown) => value is number;
const safeInteger: (value: unknown) => value is number;
const plainObject: (value: unknown) => boolean;
const typedArray: (value: unknown) => value is TypedArray;
const arrayLike: (value: unknown) => value is ArrayLike;
const inRange: (value: number, range: number | number[]) => boolean;
const domElement: (value: unknown) => value is DomElement;
const observable: (value: unknown) => boolean;
const nodeStream: (value: unknown) => value is NodeStream;
const infinite: (value: unknown) => boolean;
const even: (value: number) => boolean;
const odd: (value: number) => boolean;
const emptyArray: (value: unknown) => boolean;
const nonEmptyArray: (value: unknown) => boolean;
const emptyString: (value: unknown) => boolean;
const nonEmptyString: (value: unknown) => boolean;
const emptyStringOrWhitespace: (value: unknown) => boolean;
const emptyObject: (value: unknown) => boolean;
const nonEmptyObject: (value: unknown) => boolean;
const emptySet: (value: unknown) => boolean;
const nonEmptySet: (value: unknown) => boolean;
const emptyMap: (value: unknown) => boolean;
const nonEmptyMap: (value: unknown) => boolean;
const any: (predicate: unknown, ...values: unknown[]) => boolean;
const all: (predicate: unknown, ...values: unknown[]) => boolean;
}
export default is;

View File

@@ -1,245 +0,0 @@
"use strict";
/// <reference lib="es2016"/>
/// <reference lib="es2017.sharedmemory"/>
/// <reference lib="esnext.asynciterable"/>
/// <reference lib="dom"/>
Object.defineProperty(exports, "__esModule", { value: true });
// TODO: Use the `URL` global when targeting Node.js 10
// tslint:disable-next-line
const URLGlobal = typeof URL === 'undefined' ? require('url').URL : URL;
const toString = Object.prototype.toString;
const isOfType = (type) => (value) => typeof value === type;
const isBuffer = (input) => !is.nullOrUndefined(input) && !is.nullOrUndefined(input.constructor) && is.function_(input.constructor.isBuffer) && input.constructor.isBuffer(input);
const getObjectType = (value) => {
const objectName = toString.call(value).slice(8, -1);
if (objectName) {
return objectName;
}
return null;
};
const isObjectOfType = (type) => (value) => getObjectType(value) === type;
function is(value) {
switch (value) {
case null:
return "null" /* null */;
case true:
case false:
return "boolean" /* boolean */;
default:
}
switch (typeof value) {
case 'undefined':
return "undefined" /* undefined */;
case 'string':
return "string" /* string */;
case 'number':
return "number" /* number */;
case 'symbol':
return "symbol" /* symbol */;
default:
}
if (is.function_(value)) {
return "Function" /* Function */;
}
if (is.observable(value)) {
return "Observable" /* Observable */;
}
if (Array.isArray(value)) {
return "Array" /* Array */;
}
if (isBuffer(value)) {
return "Buffer" /* Buffer */;
}
const tagType = getObjectType(value);
if (tagType) {
return tagType;
}
if (value instanceof String || value instanceof Boolean || value instanceof Number) {
throw new TypeError('Please don\'t use object wrappers for primitive types');
}
return "Object" /* Object */;
}
(function (is) {
// tslint:disable-next-line:strict-type-predicates
const isObject = (value) => typeof value === 'object';
// tslint:disable:variable-name
is.undefined = isOfType('undefined');
is.string = isOfType('string');
is.number = isOfType('number');
is.function_ = isOfType('function');
// tslint:disable-next-line:strict-type-predicates
is.null_ = (value) => value === null;
is.class_ = (value) => is.function_(value) && value.toString().startsWith('class ');
is.boolean = (value) => value === true || value === false;
is.symbol = isOfType('symbol');
// tslint:enable:variable-name
is.numericString = (value) => is.string(value) && value.length > 0 && !Number.isNaN(Number(value));
is.array = Array.isArray;
is.buffer = isBuffer;
is.nullOrUndefined = (value) => is.null_(value) || is.undefined(value);
is.object = (value) => !is.nullOrUndefined(value) && (is.function_(value) || isObject(value));
is.iterable = (value) => !is.nullOrUndefined(value) && is.function_(value[Symbol.iterator]);
is.asyncIterable = (value) => !is.nullOrUndefined(value) && is.function_(value[Symbol.asyncIterator]);
is.generator = (value) => is.iterable(value) && is.function_(value.next) && is.function_(value.throw);
is.nativePromise = (value) => isObjectOfType("Promise" /* Promise */)(value);
const hasPromiseAPI = (value) => !is.null_(value) &&
isObject(value) &&
is.function_(value.then) &&
is.function_(value.catch);
is.promise = (value) => is.nativePromise(value) || hasPromiseAPI(value);
is.generatorFunction = isObjectOfType("GeneratorFunction" /* GeneratorFunction */);
is.asyncFunction = isObjectOfType("AsyncFunction" /* AsyncFunction */);
is.boundFunction = (value) => is.function_(value) && !value.hasOwnProperty('prototype');
is.regExp = isObjectOfType("RegExp" /* RegExp */);
is.date = isObjectOfType("Date" /* Date */);
is.error = isObjectOfType("Error" /* Error */);
is.map = (value) => isObjectOfType("Map" /* Map */)(value);
is.set = (value) => isObjectOfType("Set" /* Set */)(value);
is.weakMap = (value) => isObjectOfType("WeakMap" /* WeakMap */)(value);
is.weakSet = (value) => isObjectOfType("WeakSet" /* WeakSet */)(value);
is.int8Array = isObjectOfType("Int8Array" /* Int8Array */);
is.uint8Array = isObjectOfType("Uint8Array" /* Uint8Array */);
is.uint8ClampedArray = isObjectOfType("Uint8ClampedArray" /* Uint8ClampedArray */);
is.int16Array = isObjectOfType("Int16Array" /* Int16Array */);
is.uint16Array = isObjectOfType("Uint16Array" /* Uint16Array */);
is.int32Array = isObjectOfType("Int32Array" /* Int32Array */);
is.uint32Array = isObjectOfType("Uint32Array" /* Uint32Array */);
is.float32Array = isObjectOfType("Float32Array" /* Float32Array */);
is.float64Array = isObjectOfType("Float64Array" /* Float64Array */);
is.arrayBuffer = isObjectOfType("ArrayBuffer" /* ArrayBuffer */);
is.sharedArrayBuffer = isObjectOfType("SharedArrayBuffer" /* SharedArrayBuffer */);
is.dataView = isObjectOfType("DataView" /* DataView */);
is.directInstanceOf = (instance, klass) => Object.getPrototypeOf(instance) === klass.prototype;
is.urlInstance = (value) => isObjectOfType("URL" /* URL */)(value);
is.urlString = (value) => {
if (!is.string(value)) {
return false;
}
try {
new URLGlobal(value); // tslint:disable-line no-unused-expression
return true;
}
catch (_a) {
return false;
}
};
is.truthy = (value) => Boolean(value);
is.falsy = (value) => !value;
is.nan = (value) => Number.isNaN(value);
const primitiveTypes = new Set([
'undefined',
'string',
'number',
'boolean',
'symbol'
]);
is.primitive = (value) => is.null_(value) || primitiveTypes.has(typeof value);
is.integer = (value) => Number.isInteger(value);
is.safeInteger = (value) => Number.isSafeInteger(value);
is.plainObject = (value) => {
// From: https://github.com/sindresorhus/is-plain-obj/blob/master/index.js
let prototype;
return getObjectType(value) === "Object" /* Object */ &&
(prototype = Object.getPrototypeOf(value), prototype === null || // tslint:disable-line:ban-comma-operator
prototype === Object.getPrototypeOf({}));
};
const typedArrayTypes = new Set([
"Int8Array" /* Int8Array */,
"Uint8Array" /* Uint8Array */,
"Uint8ClampedArray" /* Uint8ClampedArray */,
"Int16Array" /* Int16Array */,
"Uint16Array" /* Uint16Array */,
"Int32Array" /* Int32Array */,
"Uint32Array" /* Uint32Array */,
"Float32Array" /* Float32Array */,
"Float64Array" /* Float64Array */
]);
is.typedArray = (value) => {
const objectType = getObjectType(value);
if (objectType === null) {
return false;
}
return typedArrayTypes.has(objectType);
};
const isValidLength = (value) => is.safeInteger(value) && value > -1;
is.arrayLike = (value) => !is.nullOrUndefined(value) && !is.function_(value) && isValidLength(value.length);
is.inRange = (value, range) => {
if (is.number(range)) {
return value >= Math.min(0, range) && value <= Math.max(range, 0);
}
if (is.array(range) && range.length === 2) {
return value >= Math.min(...range) && value <= Math.max(...range);
}
throw new TypeError(`Invalid range: ${JSON.stringify(range)}`);
};
const NODE_TYPE_ELEMENT = 1;
const DOM_PROPERTIES_TO_CHECK = [
'innerHTML',
'ownerDocument',
'style',
'attributes',
'nodeValue'
];
is.domElement = (value) => is.object(value) && value.nodeType === NODE_TYPE_ELEMENT && is.string(value.nodeName) &&
!is.plainObject(value) && DOM_PROPERTIES_TO_CHECK.every(property => property in value);
is.observable = (value) => {
if (!value) {
return false;
}
if (value[Symbol.observable] && value === value[Symbol.observable]()) {
return true;
}
if (value['@@observable'] && value === value['@@observable']()) {
return true;
}
return false;
};
is.nodeStream = (value) => !is.nullOrUndefined(value) && isObject(value) && is.function_(value.pipe) && !is.observable(value);
is.infinite = (value) => value === Infinity || value === -Infinity;
const isAbsoluteMod2 = (rem) => (value) => is.integer(value) && Math.abs(value % 2) === rem;
is.even = isAbsoluteMod2(0);
is.odd = isAbsoluteMod2(1);
const isWhiteSpaceString = (value) => is.string(value) && /\S/.test(value) === false;
is.emptyArray = (value) => is.array(value) && value.length === 0;
is.nonEmptyArray = (value) => is.array(value) && value.length > 0;
is.emptyString = (value) => is.string(value) && value.length === 0;
is.nonEmptyString = (value) => is.string(value) && value.length > 0;
is.emptyStringOrWhitespace = (value) => is.emptyString(value) || isWhiteSpaceString(value);
is.emptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length === 0;
is.nonEmptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length > 0;
is.emptySet = (value) => is.set(value) && value.size === 0;
is.nonEmptySet = (value) => is.set(value) && value.size > 0;
is.emptyMap = (value) => is.map(value) && value.size === 0;
is.nonEmptyMap = (value) => is.map(value) && value.size > 0;
const predicateOnArray = (method, predicate, values) => {
if (is.function_(predicate) === false) {
throw new TypeError(`Invalid predicate: ${JSON.stringify(predicate)}`);
}
if (values.length === 0) {
throw new TypeError('Invalid number of values');
}
return method.call(values, predicate);
};
// tslint:disable variable-name
is.any = (predicate, ...values) => predicateOnArray(Array.prototype.some, predicate, values);
is.all = (predicate, ...values) => predicateOnArray(Array.prototype.every, predicate, values);
// tslint:enable variable-name
})(is || (is = {}));
// Some few keywords are reserved, but we'll populate them for Node.js users
// See https://github.com/Microsoft/TypeScript/issues/2536
Object.defineProperties(is, {
class: {
value: is.class_
},
function: {
value: is.function_
},
null: {
value: is.null_
}
});
exports.default = is;
// For CommonJS default export support
module.exports = is;
module.exports.default = is;
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More