Compare commits

...

471 Commits

Author SHA1 Message Date
Simon Engledew
8d18e347a7 Merge pull request #319 from github/simon-engledew/lint-workspace
Validate the codeql-action workspace for common errors
2020-11-26 16:36:46 +00:00
Simon Engledew
92df38732c Merge branch 'main' into simon-engledew/lint-workspace 2020-11-26 15:22:22 +00:00
Simon Engledew
be09fb3e99 Implement feedback 2020-11-26 15:20:38 +00:00
Robert
7d74882aaf Merge pull request #321 from ericcornelissen/eslint/github-no-then
Update code so "github/no-then" passes
2020-11-26 14:35:51 +00:00
Eric Cornelissen
e6ea8cbae0 Run npm run build 2020-11-25 22:46:30 +01:00
Eric Cornelissen
cd727934bf Update source so github/no-then passes 2020-11-25 22:45:53 +01:00
Eric Cornelissen
35fd0a93b1 Update ESLint configuration
Remove the line to disable the "github/no-then"  rule.
2020-11-25 22:44:45 +01:00
Simon Engledew
378f1f95d7 Merge pull request #320 from github/simon-engledew/fix-DEP0005-buffer
Fix deprecated method Buffer.new
2020-11-25 16:25:07 +00:00
Simon Engledew
eed314143b Add paths-ignore case 2020-11-25 14:23:21 +00:00
Simon Engledew
253d46ac97 Better copy for PathsSpecified, suggested by @sampart 2020-11-25 13:30:32 +00:00
Simon Engledew
c5f58f3ee7 Markdown is not supported - roll back and fix typo 2020-11-25 13:06:51 +00:00
Simon Engledew
94b10dbb8f See if markdown is supported 2020-11-25 12:46:37 +00:00
Simon Engledew
85aefe5fa4 Signpost the latest documentation in the warning 2020-11-25 12:45:13 +00:00
Simon Engledew
582f792089 Fix deprecated method Buffer.new 2020-11-25 12:38:45 +00:00
Simon Engledew
8d468d67de Improve linting hints 2020-11-25 12:36:59 +00:00
Simon Engledew
c06dcf8fa2 Update source 2020-11-25 11:03:48 +00:00
Simon Engledew
cb3b3a8cb5 Comment the lint cases to make maintaining easier 2020-11-25 10:55:06 +00:00
Simon Engledew
92ffb08081 Tidy up ready for review 2020-11-24 17:21:25 +00:00
Simon Engledew
1020df5700 Merge branch 'main' into simon-engledew/lint-workspace 2020-11-24 14:55:54 +00:00
Simon Engledew
fd16298746 Revert an accidental change to make testing quicker 2020-11-24 14:28:08 +00:00
Simon Engledew
2ac22e8935 Send short codes that do not need URL encoding for better splunk tracing 2020-11-24 12:43:08 +00:00
Sam Partington
27520b94c4 Merge pull request #318 from github/api-param-object
Introduce parameter object for API params that travel together
2020-11-24 12:10:47 +00:00
Sam Partington
3ee4739b13 Make anonymous objects into variables for readability 2020-11-24 11:23:53 +00:00
Sam Partington
ab9b1a72db Fix tests
Previously, most tests were using https://github.com and only the first was using https://github.example.com. As it happens, https://github.com works for all of them.
2020-11-24 11:10:25 +00:00
Simon Engledew
6df1fc5e38 Do not fail if the workflow has been deleted 2020-11-24 10:51:31 +00:00
Simon Engledew
754f502a84 Workflow triggers are null if unspecified 2020-11-24 10:25:58 +00:00
Simon Engledew
c0bd7b0b2b Handle relative workflow paths 2020-11-24 09:56:10 +00:00
Simon Engledew
33bb87523e Write a warning if there is an error with the workflow 2020-11-24 09:51:00 +00:00
Simon Engledew
7eb9dfcc60 Add a function that can lint a CodeQL action workflow 2020-11-23 17:29:19 +00:00
Sam Partington
20567b5888 Introduce parameter object for API params that travel together 2020-11-23 14:39:01 +00:00
David Verdeguer
b15854c9af Merge pull request #316 from The-Compiler/patch-1
Make sure a Python 2 pip is installed
2020-11-20 17:28:05 +01:00
Florian Bruhin
b168eee469 Make sure a Python 2 pip is installed
In the README of this repo, it's suggested to use the `ubuntu-latest` environment.
That environment will soon be upgraded to Ubuntu 20.04: https://github.com/actions/virtual-environments/issues/1816

As pointed out in that issue, the updated image comes without a Python 2 pip preinstalled:

    Setup Python dependencies
    /home/runner/work/_actions/github/codeql-action/v1/python-setup/install_tools.sh
    [...]
      + python2 -m pip install --user --upgrade pip setuptools wheel
      /usr/bin/python2: No module named pip
    Warning: Unable to download and extract the tools needed for installing the python dependecies. You can call this action with 'setup-python-dependencies: false' to disable this process.
2020-11-20 16:48:43 +01:00
Chris Gavin
7589c051a9 Merge pull request #313 from github/query-binary-planting
Add a query to detect binary planting vulnerabilities.
2020-11-20 15:25:01 +00:00
Chris Gavin
f5e028fd83 Merge branch 'main' into query-binary-planting 2020-11-20 15:04:10 +00:00
Robert
c4b7211148 Merge pull request #311 from ericcornelissen/fix-typos
Fix typos in source code, logging, comments, and config files
2020-11-20 13:42:37 +00:00
Eric Cornelissen
6aaf0483f0 Merge branch 'main' into fix-typos 2020-11-20 14:32:12 +01:00
Chris Gavin
c5d599ecb2 Merge pull request #310 from ericcornelissen/eslint-rule/no-shadow
Update code so "no-shadow" passes
2020-11-20 13:23:13 +00:00
Eric Cornelissen
6ed5c82bb9 Merge branch 'main' into fix-typos 2020-11-20 13:59:16 +01:00
Eric Cornelissen
82ba92f462 Update spelling for autobuild to auto-built 2020-11-20 13:58:20 +01:00
Chris Gavin
7091b81414 Merge branch 'main' into eslint-rule/no-shadow 2020-11-20 12:44:18 +00:00
Chris Gavin
6d232b4ec8 Merge pull request #314 from github/skip-integration-tests-prs-from-forks
Skip Runner integration tests on pull requests from forks.
2020-11-20 12:27:46 +00:00
Chris Gavin
b59d204bbe Merge branch 'main' into skip-integration-tests-prs-from-forks 2020-11-20 12:18:45 +00:00
Chris Gavin
0a05e95b52 Merge pull request #312 from ericcornelissen/fix-minor-code-mistakes
Two minor code fixes
2020-11-20 11:48:19 +00:00
Chris Gavin
fd36bec497 Skip Runner integration tests on pull requests from forks. 2020-11-20 11:47:43 +00:00
Chris Gavin
a04d948b04 Merge branch 'main' into fix-minor-code-mistakes 2020-11-20 11:37:27 +00:00
Chris Gavin
b03b9fe641 Add a query to detect binary planting vulnerabilities. 2020-11-20 11:34:33 +00:00
Eric Cornelissen
35b050652a Run npm run build 2020-11-20 11:48:25 +01:00
Simon Engledew
31872f129b Merge pull request #308 from github/simon-engledew/fast-fail
Abort CodeQL action if the status cannot be reported
2020-11-20 10:45:40 +00:00
Eric Cornelissen
5416d4f3b5 Run npm run build 2020-11-20 11:35:59 +01:00
Eric Cornelissen
cf8c79ca35 Fix unused sorted value in update-release-branch
Fix a minor issue in the update-release-branch.py script that performs a
call to `sorted` but doesn't use the output. Since `sorted` does not
operate in place, the call is currently useless. As a result, the function
`get_pr_for_commit` does not currently work as exected. I.e. it is
expected to return the "first" (i.e. lowest PR number), but actually it
returns the first in the list provided by GitHub.
2020-11-20 11:20:45 +01:00
Eric Cornelissen
4e8634c29c Remove duplicate statement in tracer-config test
The tracer-config.test.js file contained a duplicate of the statement:

  process.env["SEMMLE_DEPTRACE_SOCKET"] = "abc";

one line apart. This removes the second instance of this statement.
2020-11-20 11:14:08 +01:00
Eric Cornelissen
512c07d9a3 Fix typos in Action config files 2020-11-20 11:11:17 +01:00
Eric Cornelissen
85ea24bd20 Fix typos in src
- Rename "toolrunnner" (three 'n') to "toolrunner"
- Rename "relativeFilepaht" to "relativeFilepath"
- Fix various typos in documentation & comments
- Fix typos in logs and test names
2020-11-20 11:00:55 +01:00
Simon Engledew
6a45994b42 Merge branch 'main' into simon-engledew/fast-fail 2020-11-20 09:21:26 +00:00
Eric Cornelissen
992a0cf8f2 Merge branch 'main' into eslint-rule/no-shadow 2020-11-19 23:41:11 +01:00
Eric Cornelissen
847f4ef293 Run npm run build 2020-11-19 23:03:45 +01:00
Eric Cornelissen
483c94b974 Fix no shadow issues in upload-lib.ts
Rename one instance of "path"  to avoid shadowing.
2020-11-19 22:54:48 +01:00
Eric Cornelissen
74559947b1 Fix no shadow issues in fingerprints.ts
Rename various instances of "hash", shadowing the function with that
name.
2020-11-19 22:54:05 +01:00
Eric Cornelissen
ffe94681e4 Fix no shadow issues in fingerprints.test.ts
Rename shadowing "uri" argument to the more explicit "artifactURI".
2020-11-19 22:51:29 +01:00
Eric Cornelissen
2a2910e693 Fix no shadow issues in config-utils.ts
Rename throwaway variable "suite" to "found" when assigned from "find".

Rename local variable "path" to "newPath" as it is a modification of
the "originalPath" provided to `validateAndSanitisePath`.

Rename instances of "path" to more explicit varients "ignorePath" and
"includePath". Maybe "ignoredPath" and "includedPath" are better names?
2020-11-19 22:50:02 +01:00
Eric Cornelissen
06e99f1523 Fix no shadow issues in config-utils.test.ts
Rename "queries" variable in test cases to "testQueries" to avoid
shadowing it in a subsequent helper function call (4 times).

Rename "path" twice in a hlper function to "validPath" and "invalidPath"
to avoid shadowing "path". The new names are more explicit.
2020-11-19 22:47:32 +01:00
Eric Cornelissen
98ad63b240 Fix no shadow issues in codeql.ts
Two simple variable renames from "path" to "paths" since the types are
arrays of strings (not just one string).

One function definition inside a function moved outside that function
to avoid shadowing the "options" argument.
2020-11-19 22:45:15 +01:00
Eric Cornelissen
b54c2aab11 Fix no shadow issues in api-client.ts
Duplicate use of "_" placeholder argument name. This change may conflict
with #192.
2020-11-19 22:42:13 +01:00
Eric Cornelissen
22f779c5e6 Update ESLint configuration
Remove the "no-shadow": "off" override and replace it by enabling
"@typescript-eslint/no-shadow" in the "rules" section, following the
typescript-eslint docs:
https://github.com/typescript-eslint/typescript-eslint/blob/master/packages/eslint-plugin/docs/rules/no-shadow.md#how-to-use
2020-11-19 22:38:38 +01:00
Robert
aafb457527 Merge pull request #222 from github/robertbrignull/go_build_trace
Check CODEQL_EXTRACTOR_GO_TRACE and treat Go as a traced language
2020-11-19 18:01:53 +00:00
Robert
0b0bc35050 Merge branch 'main' into robertbrignull/go_build_trace 2020-11-19 17:20:54 +00:00
Simon Engledew
eb4226ede4 Scanning endpoint failures should not halt the scan 2020-11-19 15:49:46 +00:00
Simon Engledew
7fda765d49 Merge remote-tracking branch 'origin/main' into simon-engledew/fast-fail 2020-11-19 13:55:47 +00:00
Chris Gavin
0924fb6b26 Merge pull request #309 from github/safe-which
Ensure unqualified program names are present on `PATH` before executing them.
2020-11-19 13:52:57 +00:00
Simon Engledew
17d4671d60 Fail processing on a 422 as well
Until there is a more robust versioning system it is probably safest to require endpoint compatiblity and not continue the action if there is a mismatch.
2020-11-19 13:14:45 +00:00
Simon Engledew
f3ff4c84ba Implement review feedback 2020-11-19 12:39:57 +00:00
Chris Gavin
726cfc8441 Ensure unqualified program names are present on PATH before executing them. 2020-11-18 22:20:13 +00:00
Simon Engledew
68dedeaa57 Do not run CodeQL if code scanning is not enabled
Put more fine grained logic around which errors we ignore and process.
Re-instate status reporting in Enterprise.
Abort the code scanning process the status endpoint reports it is not configured.
2020-11-18 17:21:57 +00:00
Robert
dc80b016b6 Merge pull request #301 from github/robertbrignull/tools_version
Send action ref and tool version in status reports
2020-11-18 11:38:26 +00:00
Robert
0d960df08a Merge branch 'main' into robertbrignull/tools_version 2020-11-18 11:25:46 +00:00
Chris Gavin
10b43b815a Merge pull request #231 from github/add-multi-cause-markdown-flag
Enable the CodeQL SARIF multi-cause markdown flag.
2020-11-18 08:39:30 +00:00
Chris Gavin
f94e06a382 Merge main into add-multi-cause-markdown-flag. 2020-11-18 08:26:39 +00:00
Robert
d4eb1e36af Merge pull request #302 from github/robertbrignull/no_tracing
Don't use the word "trace" in log messages
2020-11-13 15:27:37 +00:00
Robert
54c857ce0a update build command 2020-11-13 15:15:58 +00:00
Robert
3f2a7abc7b change voicing 2020-11-13 15:04:54 +00:00
Robert
e3a9a7a91c Update init/action.yml
Co-authored-by: Sam Partington <sampart@github.com>
2020-11-13 12:51:25 +00:00
Robert
4d8912d269 Don't use the word "trace" in log messages 2020-11-13 12:01:07 +00:00
Robert
af27146b64 Merge branch 'main' into robertbrignull/go_build_trace 2020-11-13 10:24:37 +00:00
Robert
1737b806ff check if running locally 2020-11-12 14:18:58 +00:00
Robert
80b43ca9d3 send action ref and tool version in status reports 2020-11-12 12:27:31 +00:00
Robert
acacf9bbd5 Merge pull request #299 from github/azure_pipelines
Detect Agent.Worker.exe
2020-11-11 10:28:25 +00:00
Robert
9a7b7cb035 Merge branch 'main' into azure_pipelines 2020-11-11 10:12:56 +00:00
Chris Gavin
fdb0d486b6 Merge pull request #298 from github/use-github-action-repository-variable
Make use of the `GITHUB_ACTION_REPOSITORY` environment variable if it is set.
2020-11-10 18:13:50 +00:00
Chris Gavin
241fca876b Merge branch 'main' into use-github-action-repository-variable 2020-11-10 17:55:13 +00:00
Robert
aae4713a4d detect Agent.Worker.exe 2020-11-10 16:16:58 +00:00
Robin Neatherway
d3285a0ea2 Merge pull request #297 from github/rneatherway-patch-1
Change suggested workflow to analyse merge commit
2020-11-10 14:46:28 +00:00
Chris Gavin
bf30ea69d6 Make use of the GITHUB_ACTION_REPOSITORY environment variable if it is set. 2020-11-10 13:30:20 +00:00
Robin Neatherway
988704e971 Change suggested workflow to analyse merge commit 2020-11-10 12:10:07 +00:00
Robert
409b71a3d0 Merge pull request #296 from 0xflotus/patch-1
fix: small typo in import-action-entrypoint.ql
2020-11-10 09:34:24 +00:00
0xflotus
1870040fac fix: small typo in import-action-entrypoint.ql 2020-11-10 00:38:46 +01:00
Robin Neatherway
4b301bd34e Merge pull request #294 from github/rneatherway/bump-codeql-20201106
Update default CodeQL bundle version
2020-11-09 14:05:10 +00:00
Robin Neatherway
ab40235d88 Update default CodeQL bundle version 2020-11-09 13:00:43 +00:00
Simon Engledew
f13bd452d7 Merge pull request #291 from github/simon-engledew/output-codeql-path
Add a codeql-path output to the init action
2020-11-05 10:22:10 +00:00
Simon Engledew
f76124122e Remove output from README
As this is an advanced usage it makes more sense to work to getting this included in the documentation instead.
2020-11-05 08:31:35 +00:00
Simon Engledew
c87f3021d4 Expand readme to include codeql-path output example
Also add example from README into workflow to confirm it is accurate.
2020-11-04 19:35:19 +00:00
Simon Engledew
54f3e52e8f Move setOutput into try block in case it errors 2020-11-04 19:29:06 +00:00
Simon Engledew
ff6db59d5a Fix indentation for action output
Co-authored-by: Chris Gavin <chris@chrisgavin.me>
2020-11-04 19:16:09 +00:00
Simon Engledew
77f914a4ba Add codeql-path to README (wip) 2020-11-04 17:22:32 +00:00
Simon Engledew
c213a7c7c9 Use codeql-path output in python-deps workflow 2020-11-04 17:22:32 +00:00
Simon Engledew
854109fe92 Report the path of CodeQL as an output 2020-11-04 15:38:31 +00:00
Simon Engledew
0ed3207eb5 Merge pull request #289 from github/simon-engledew/fix-act-local
Fix invalid API call when running locally
2020-11-04 14:30:51 +00:00
Simon Engledew
df843a2867 Merge branch 'main' into simon-engledew/fix-act-local 2020-11-04 12:47:38 +00:00
Robert
82e3812a35 Merge branch 'main' into robertbrignull/go_build_trace 2020-11-04 11:07:46 +00:00
Simon Engledew
1c8d72e0c2 Remove unnecessary env manipulation
testing-utils.ts / setupTests() already stores and restores the env
2020-11-04 10:59:35 +00:00
Simon Engledew
5c0e2f93f1 Fix invalid API call when running locally
The method getAnalysisKey would call getWorkflowPath and raise an API Error.

This change follows the pattern in prepareLocalRunEnvironment to set a dummy value for the required environment variable, therefore shortcutting the API request.
2020-11-04 09:58:09 +00:00
Chris Gavin
b6989db81e Merge pull request #288 from github/tweak-bump-supported-versions
Make a few small tweaks to the update-supported-enterprise-server-versions script.
2020-11-04 09:48:05 +00:00
Chris Gavin
813b5235a1 Merge branch 'main' into tweak-bump-supported-versions 2020-11-04 09:08:27 +00:00
Andrew Eisenberg
bc1ee1620f Add the --threads config option to finalize db (#281)
This flag is already being used for `runQueries`, so let's use it for
finalize as well.
2020-11-03 08:25:40 -08:00
Simon Engledew
a6c99e6b5b Merge pull request #287 from github/disable-gpgsign-in-tests
Disable gpg key signing in tests
2020-11-03 15:08:31 +00:00
Simon Engledew
d7bd6e39e3 Merge branch 'main' into disable-gpgsign-in-tests 2020-11-03 14:47:39 +00:00
Chris Gavin
4ffed2603e Only increase the newest supported release, never reduce it. 2020-11-03 13:58:07 +00:00
Chris Gavin
beac9d5621 Fix a copy and pasted job name. 2020-11-03 13:57:41 +00:00
Chris Gavin
1364a74a5d Merge pull request #285 from github/check-api-version
Log a warning if the API version is not supported.
2020-11-03 13:36:53 +00:00
Chris Gavin
b16110e60e Log the version warning a second time if a request fails unexpectedly. 2020-11-03 12:57:15 +00:00
Simon Engledew
efc3797e30 Disable gpg key signing in tests
This avoids a popup appearing if your GPG key has a passphrase and ensures the tests still pass even if GPG is misconfigured.
2020-11-03 11:05:49 +00:00
Chris Gavin
5e2fa08dae Merge main into check-api-version. 2020-11-02 09:02:05 +00:00
Chris Gavin
1a4385d516 Only log the version warning once on Actions even if the Action is invoked multiple times. 2020-11-02 09:01:36 +00:00
Chris Gavin
865b4bd832 Pass a logger in to getApiClient() rather than constructing one there. 2020-11-02 08:53:25 +00:00
Chris Gavin
1f7bae7ab8 Use an undefined check rather than hasOwnProperty. 2020-11-02 08:47:11 +00:00
Robert
9a0139eee2 Merge pull request #274 from github/jhutchings1-patch-1
Change to the latest support link
2020-10-30 17:51:26 +00:00
Robert
736f65db3e Merge branch 'main' into jhutchings1-patch-1 2020-10-30 17:28:53 +00:00
Robert Brignull
2e550bba7f Check CODEQL_EXTRACTOR_GO_BUILD_TRACING
Co-authored-by: Max Schaefer <54907921+max-schaefer@users.noreply.github.com>
2020-10-30 16:44:11 +00:00
Robert
7d5b76a81b Merge pull request #272 from sonicdoe/patch-1
Fix alignment of cron comment
2020-10-30 16:36:12 +00:00
Robert
fb10307267 Merge branch 'main' into patch-1 2020-10-30 16:24:53 +00:00
David Verdeguer
f0c568a42f Merge pull request #284 from github/daverlo/min-disk-free
Add min disk free flag to database analyze:
2020-10-30 16:17:54 +01:00
Chris Gavin
1220ae5bfd Log a warning if the API version is not supported. 2020-10-30 12:20:06 +00:00
David Verdeguer
04e7c3cfe7 Merge branch 'main' into daverlo/min-disk-free 2020-10-30 11:25:55 +01:00
David Verdeguer
7b571208e1 Merge pull request #280 from github/daverlo/update-codeql
Use codeql 20201028
2020-10-29 11:42:57 +01:00
David Verdeguer
d96ee1a48c Fix python tests 2020-10-28 16:31:15 +01:00
David Verdeguer
f46875dae9 Use codeql 20201028 2020-10-28 15:18:46 +01:00
Chris Gavin
2d75893188 Enable the CodeQL SARIF multi-cause markdown flag. 2020-10-27 13:50:40 +00:00
David Verdeguer
46110c361b Merge pull request #278 from github/daverlo/setup-python-dependencies
Use setup-python-dependencies input
2020-10-27 14:36:20 +01:00
David Verdeguer
8db7d74c10 Merge branch 'main' into daverlo/setup-python-dependencies 2020-10-27 11:47:00 +01:00
Chris Gavin
abe6b7b085 Merge pull request #276 from github/fix-ci
Fix Python CI jobs.
2020-10-27 10:20:15 +00:00
David Verdeguer
4575212a76 Use setup-python-dependencies input 2020-10-27 10:06:17 +01:00
Chris Gavin
85c65cd6bf Always use the latest CodeQL in the Python dependencies tests. 2020-10-26 14:17:11 +00:00
Chris Gavin
65efaf83e9 Fix Python CI jobs. 2020-10-26 08:31:09 +00:00
Justin Hutchings
762078e765 Change to the latest support link
Support is recommending we redirect users to a new page for formal support requests.
2020-10-23 13:22:06 -07:00
Jakob Krigovsky
ea5ae18876 Fix alignment of cron comment 2020-10-22 18:29:58 +02:00
David Verdeguer
fb7d0f72e5 Merge pull request #270 from github/daverlo/python-mac
Port python deps setup to mac
2020-10-22 15:09:38 +02:00
David Verdeguer
e23b3ef342 Merge branch 'main' into daverlo/python-mac 2020-10-22 14:01:07 +02:00
David Verdeguer
bf20a55f26 Combine python deps tests workflows 2020-10-22 14:00:51 +02:00
David Verdeguer
badb2863db Merge pull request #271 from github/daverlo/fail-analyze
Fail the analyze action when some language fails to run the queries
2020-10-22 12:28:37 +02:00
David Verdeguer
d6287621f6 Fail the analyze action when some language fails to run the queries 2020-10-22 10:22:27 +02:00
Robin Neatherway
6ac5978b44 Merge pull request #266 from github/robertbrignull/runner_workflow
Add a workflow to build and upload the runner
2020-10-21 10:17:00 +01:00
Robin Neatherway
59913e8e05 Merge remote-tracking branch 'origin/robertbrignull/runner_workflow' into robertbrignull/runner_workflow 2020-10-20 15:04:56 +01:00
Robin Neatherway
269b8b9bae Update for review comments 2020-10-20 15:04:04 +01:00
Robin Neatherway
b6e9407b12 Merge branch 'main' into robertbrignull/runner_workflow 2020-10-20 11:59:11 +01:00
Robin Neatherway
c1e2c53b95 Remove push trigger now that I've tested it works 2020-10-20 11:58:20 +01:00
Robin Neatherway
d91d2d2873 Upload the runner to the release directly 2020-10-20 11:00:00 +01:00
David Verdeguer
1c789715a7 Address comments 2020-10-20 10:49:48 +02:00
David Verdeguer
ce8418a2ed Only run tests on main and v1 2020-10-19 13:08:20 +02:00
David Verdeguer
2391771a3f Allow installing python deps on mac 2020-10-19 12:45:33 +02:00
David Verdeguer
04f2f600bc Adapt true for mac 2020-10-19 12:36:34 +02:00
David Verdeguer
8e516b1d36 Run tests 2020-10-19 12:21:58 +02:00
David Verdeguer
c860191a1a Run python deps tests on mac 2020-10-19 12:21:21 +02:00
David Verdeguer
c8b8c0415e Merge pull request #257 from github/daverlo/python-windows
Port python deps setup to windows
2020-10-15 14:48:25 +02:00
David Verdeguer
e0f78380e3 Merge branch 'main' into daverlo/python-windows 2020-10-15 12:12:50 +02:00
David Verdeguer
a41d1bd815 Merge pull request #261 from github/rasmuswl/python-update-setenv
Don't use ::set-env in python-setup
2020-10-15 11:59:37 +02:00
David Verdeguer
7d1f309c39 Address comments 2020-10-15 11:56:40 +02:00
Rasmus Wriedt Larsen
bc72944cba Merge branch 'main' into rasmuswl/python-update-setenv 2020-10-15 10:37:45 +02:00
David Verdeguer
2c3dafc162 Use sys.executable 2020-10-14 10:54:41 +02:00
Robert
3f8927dbc5 Merge pull request #259 from andymckay/patch-1
Change time in template
2020-10-13 17:20:16 +01:00
Robert
345bcba3ea Merge branch 'main' into patch-1 2020-10-13 16:47:00 +01:00
David Verdeguer
e63596efd3 Merge branch 'main' into daverlo/python-windows 2020-10-13 17:44:16 +02:00
David Verdeguer
0d97ea8f14 Call pipenv and poetry through python 2020-10-13 17:38:53 +02:00
Robin Neatherway
cdfffe8ff7 Merge pull request #264 from github/rneatherway-patch-1
Start analysing merge commit for PRs
2020-10-13 15:11:55 +01:00
Robin Neatherway
f813ad0ce2 Merge branch 'main' into rneatherway-patch-1 2020-10-13 14:26:01 +01:00
Robin Neatherway
bc196131af Merge pull request #260 from github/rneatherway/codeql-bundle-20201008
Update default CodeQL version to codeql-bundle-20201008
2020-10-13 11:14:48 +01:00
Robert Brignull
ffaa8aa197 add workflow to build runner 2020-10-13 10:56:07 +01:00
Robin Neatherway
f79717f3c3 Start analysing merge commit for PRs 2020-10-13 10:19:15 +01:00
Rasmus Wriedt Larsen
64ebf10c1d Don't use ::set-env in python-setup
Is now deprecated as described in
https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/
2020-10-08 16:12:40 +02:00
David Verdeguer
72e430fc65 Merge branch 'main' into daverlo/python-windows 2020-10-08 15:53:50 +02:00
Robin Neatherway
2da5fbf0d4 Update default CodeQL version to codeql-bundle-20201008 2020-10-08 14:48:20 +01:00
David Verdeguer
9bc8c56ef0 Add missing [ 2020-10-08 12:17:28 +02:00
David Verdeguer
e3d2d4afc4 Add call to py 2020-10-08 12:14:41 +02:00
David Verdeguer
735ec7d414 Add powershell to call 2020-10-08 12:06:29 +02:00
David Verdeguer
424a9cfa1c Apply suggestions from code review
Co-authored-by: Arthur Baars <aibaars@github.com>
2020-10-08 11:59:35 +02:00
Andy McKay
da5edaf1b4 Change time in template
This template proposes a time which is our peak load for Actions, would this time work better for our customers? Fixes https://github.com/github/codeql-action/issues/258
2020-10-07 09:59:53 -07:00
David Verdeguer
e97bdbdfac Combine auto_install scripts 2020-10-07 12:55:02 +02:00
David Verdeguer
526dac0f91 Only run tests on main and v1 2020-10-06 14:47:20 +02:00
David Verdeguer
0f2fa46e3c Update installPythonDeps for windows 2020-10-06 14:37:35 +02:00
David Verdeguer
2748e6c5c1 Update python-deps-windows test_dir paths 2020-10-06 14:21:10 +02:00
David Verdeguer
74afd3c373 Port python deps setup to windows 2020-10-06 14:14:12 +02:00
David Verdeguer
b1e2c9b8bd Merge pull request #255 from github/daverlo/guard-python
Guard python deps install
2020-10-06 12:38:40 +02:00
David Verdeguer
d81cc671c0 Guard python deps install 2020-10-06 11:30:05 +02:00
Robert
3630a78d1a Merge pull request #235 from github/robertbrignull/subdomain
Make URL parsing more robust, and handle subdomain isolation
2020-10-05 17:53:43 +01:00
Robert Brignull
b185050563 Use GITHUB_DOTCOM_URL so URL deduplication works 2020-10-05 16:44:43 +01:00
Robert Brignull
28a5b954e7 Merge branch 'main' into robertbrignull/subdomain 2020-10-05 13:36:12 +01:00
Chris Gavin
5cdfcab4d4 Merge pull request #236 from github/ignore-temp-dir
Exclude the temporary directory from scanning.
2020-10-05 13:07:02 +01:00
Chris Gavin
11c1460003 Run npm run lint-fix. 2020-10-05 12:44:58 +01:00
Chris Gavin
14192f8248 Merge branch 'main' into ignore-temp-dir 2020-10-05 12:41:49 +01:00
Chris Raynor
d0afe926eb Merge pull request #238 from github/cbraynor/fix201
Resolve violations of import/no-extraneous-dependencies lint
2020-10-05 10:43:32 +01:00
Chris Raynor
0907cd5a41 Merge branch 'main' into cbraynor/fix201 2020-10-05 10:35:27 +01:00
David Verdeguer
a1fc3a5e79 Merge pull request #155 from github/daverlo/python
Python deps setup
2020-10-05 08:34:05 +02:00
David Verdeguer
55eb02cb0a Merge branch 'main' into daverlo/python 2020-10-05 08:05:16 +02:00
David Verdeguer
4ce302bdb9 Use find in python-des tests 2020-10-05 00:04:20 +02:00
Chris Raynor
1a91a0716f Merge pull request #242 from thaJeztah/cron_hint
README: document cron fields in example
2020-10-04 11:26:53 +01:00
Chris Raynor
319881ca28 Merge branch 'main' into cron_hint 2020-10-04 11:23:44 +01:00
Chris Raynor
a89f5ee6fd Fixing excess whitespace 2020-10-04 11:21:23 +01:00
Chris Raynor
31e2458574 Using standard format from docs
Updating the format slightly to match [the docs](https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows#schedule) precisely
2020-10-04 11:20:31 +01:00
Chris Raynor
fe8c48ed50 Merge pull request #243 from github/dependabot/npm_and_yarn/actions/core-1.2.6
Bump @actions/core from 1.2.0 to 1.2.6
2020-10-03 13:27:39 +01:00
Chris Raynor
f49e963057 Updating node_modules 2020-10-03 12:55:40 +01:00
dependabot[bot]
4290eabf33 Bump @actions/core from 1.2.0 to 1.2.6
Bumps [@actions/core](https://github.com/actions/toolkit/tree/HEAD/packages/core) from 1.2.0 to 1.2.6.
- [Release notes](https://github.com/actions/toolkit/releases)
- [Changelog](https://github.com/actions/toolkit/blob/main/packages/core/RELEASES.md)
- [Commits](https://github.com/actions/toolkit/commits/HEAD/packages/core)

Signed-off-by: dependabot[bot] <support@github.com>
2020-10-01 17:36:26 +00:00
Sebastiaan van Stijn
7073967e9a README: document cron fields in example
Signed-off-by: Sebastiaan van Stijn <github@gone.nl>
2020-10-01 16:52:44 +02:00
Chris Raynor
bf60ce880e Merge branch 'main' into cbraynor/fix201 2020-10-01 12:24:03 +01:00
Chris Raynor
4ff6c0d0c9 Merge pull request #237 from github/cbraynor/fix206
Resolve violations of sort-imports lint
2020-10-01 12:22:14 +01:00
Chris Raynor
6d01157d11 Merge branch 'cbraynor/fix206' into cbraynor/fix201 2020-10-01 11:17:59 +01:00
Chris Raynor
10479a214a Merge branch 'main' into cbraynor/fix206 2020-10-01 11:09:05 +01:00
Chris Raynor
122c9b7f24 Switching to import/order instead of sort-imports 2020-10-01 11:03:46 +01:00
Marco Gario
b9e933968b Merge pull request #240 from github/marcogario/platform-bundle
Platform specific bundle
2020-10-01 11:29:53 +02:00
Chris Gavin
62f25fda9b Ignore the tools directory as well and remove the warning. 2020-10-01 10:03:45 +01:00
Marco Gario
d5029a8680 Platform specific bundle 2020-10-01 10:04:48 +02:00
Chris Gavin
dbecf76db8 Use a relative path to ignore the runner temporary directory. 2020-09-29 15:43:37 +01:00
Chris Raynor
47fa956a52 Updating node_modules 2020-09-29 15:05:16 +01:00
Chris Raynor
8200c137dc Resolve violations of import/no-extraneous-dependencies lint
Fixes #201
2020-09-29 15:03:21 +01:00
Chris Raynor
228546a1e5 Resolve violations of sort-imports lint
Resolves #206
2020-09-29 14:43:37 +01:00
Robin Neatherway
bb6fa8ee6d Merge pull request #217 from github/rneatherway/optional-merge
Do not always overwrite the GITHUB_REF for PRs
2020-09-29 13:17:39 +01:00
Robin Neatherway
2d6f6077bf Merge branch 'main' into rneatherway/optional-merge 2020-09-29 13:02:37 +01:00
David Verdeguer
4e39b768c7 Remove tests that should fail 2020-09-29 10:33:25 +02:00
David Verdeguer
5e2245cc23 Add x permission to the test scripts 2020-09-29 10:06:06 +02:00
David Verdeguer
67ddca1d9c Add workflow for testing python setup 2020-09-29 09:57:29 +02:00
Chris Gavin
206e34cbb4 Exclude the temporary directory from scanning. 2020-09-28 21:43:19 +01:00
Robert Brignull
c4dc1b0438 Make URL parsing more robust 2020-09-28 18:39:56 +01:00
Chris Raynor
2841489ddf Merge pull request #233 from miqh/fix/205
Resolve violations of no-useless-escape lint
2020-09-28 11:49:40 +01:00
Chris Raynor
a2931d32c7 Merge branch 'main' into fix/205 2020-09-28 11:20:31 +01:00
Chris Raynor
bcf29b3c36 Merge pull request #232 from miqh/fix/204
Resolve violations of no-throw-literal lint
2020-09-28 11:18:23 +01:00
Michael Huynh
c68c97e2bd Resolve violations of no-useless-escape lint
Resolves #205
2020-09-28 10:55:58 +08:00
Rasmus Wriedt Larsen
18312707fe Explain get_extractor_version for python setup scripts 2020-09-25 12:07:22 +02:00
Michael Huynh
476bf863f2 Resolve violations of no-throw-literal lint
Resolves #204
2020-09-25 17:39:25 +08:00
David Verdeguer
5419fcd735 Improve warning message 2020-09-25 11:28:57 +02:00
Rasmus Wriedt Larsen
dffce9945c Minor fixes to python installation scripts based on code review 2020-09-24 15:43:34 +02:00
Rasmus Wriedt Larsen
6645c550ef Apply suggestions from code review
Co-authored-by: Marco Gario  <marcogario@github.com>
2020-09-24 15:36:00 +02:00
David Verdeguer
7753dec413 Add min disk free flag to database analyze: 2020-09-24 11:42:21 +02:00
David Verdeguer
34955967b7 Fix python string 2020-09-23 18:06:08 +02:00
David Verdeguer
147734591c Check platform before installing python tools 2020-09-23 17:49:39 +02:00
Robert
f2e557e77f Merge pull request #228 from miqh/fix/197
Resolve violations of eslint-comments/no-use lint
2020-09-23 15:20:37 +01:00
Robert
5d00d5b4e8 Merge branch 'main' into fix/197 2020-09-23 14:28:21 +01:00
Robert
d9f39334ae Merge pull request #229 from miqh/fix/203
Resolve violations of no-sparse-arrays lint
2020-09-23 14:27:27 +01:00
Michael Huynh
38363a1043 Resolve violations of no-sparse-arrays lint
Resolves #203
2020-09-23 21:02:03 +08:00
Michael Huynh
80b408e704 Resolve violations of eslint-comments/no-use lint
Resolves #197
2020-09-23 20:25:45 +08:00
Chris Raynor
481f3ce214 Merge pull request #227 from github/cbraynor/cache-pinning
Prioritizing pre-downloaded CodeQL bundle in some circumstances
2020-09-23 11:58:35 +01:00
Chris Raynor
2f4ca98eb6 Reducing the number of new builds significantly while keeping coverage 2020-09-23 11:54:42 +01:00
David Verdeguer
23a1a65b43 Merge branch 'main' into daverlo/python-v2 2020-09-23 09:27:54 +02:00
Chris Raynor
3b90db98f9 Integration tests now both rely on pre-downloaded and latest codeql bundles 2020-09-22 14:42:23 +01:00
Chris Raynor
41464b1396 Allowing a cached version of the CodeQL bundle sometimes
To save time downloading the CodeQL bundle we're pre-downloading it into the
hosted Actions runner, but because the release schedule is different there may
be some version drift. This change allows a different version of the bundle
to be used than the default if a version isn't explicitly specified, there's
only one version downloaded, and it's been marked as a 'pinned-version' -
otherwise it reverts to the prior behavior.
2020-09-22 14:38:27 +01:00
Chris Gavin
367ad73efb Merge pull request #226 from github/octokit-retry
Fix retrying uploads by using Octokit retry plugin.
2020-09-22 12:39:47 +01:00
Chris Gavin
8d26f6175d Merge branch 'main' into octokit-retry 2020-09-22 12:26:34 +01:00
Chris Gavin
31c2eca167 Fix retrying uploads by using Octokit retry plugin. 2020-09-21 19:15:19 +01:00
Marco Gario
427c79f43f Merge pull request #216 from github/split_bundle
Workflow to split the bundle into components
2020-09-21 18:19:03 +02:00
Marco Gario
c36848d44b Merge branch 'main' into split_bundle 2020-09-21 18:12:53 +02:00
Marco Gario
590fdcd891 Apply suggestions from code review
Co-authored-by: Robert <robertbrignull@github.com>
2020-09-21 18:12:23 +02:00
Chris Gavin
e67ba57e65 Merge pull request #221 from github/update-actions-github
Update to the latest version of `@actions/github`.
2020-09-21 16:06:30 +01:00
Chris Gavin
bba73b6d4e Merge main into update-actions-github. 2020-09-21 15:25:08 +01:00
Chris Gavin
cc0eb452c7 Use getOctokit(...) when getting the GitHub API client. 2020-09-21 15:21:05 +01:00
Chris Raynor
a8a62974b3 Merge pull request #223 from miqh/fix/199
Resolve violations of github/array-foreach lint
2020-09-21 11:11:07 +01:00
Marco Gario
d265935d24 Update split.yml 2020-09-21 12:00:29 +02:00
Marco Gario
47eb668155 Update split.yml 2020-09-21 12:00:00 +02:00
Marco Gario
1154bf6df9 Update split.yml 2020-09-21 10:13:23 +02:00
Michael Huynh
4666a0eed0 Resolve violations of github/array-foreach lint
Resolves #199
2020-09-20 17:41:27 +08:00
Robert
b2dfa6e690 Merge pull request #219 from github/robertbrignull/init_status_queries
Include workflow queries in status report
2020-09-18 16:56:03 +01:00
Robert
def00916ce Merge branch 'main' into robertbrignull/init_status_queries 2020-09-18 16:36:17 +01:00
Chris Gavin
9ed519fa12 Update to the latest version of @actions/github. 2020-09-18 16:06:20 +01:00
Robert
55458a1ab1 Merge pull request #179 from github/split_builtin_custom_queries
Split up builtin and custom queries
2020-09-18 10:06:50 +01:00
Robert Brignull
1dc1029baf Merge branch 'main' into split_builtin_custom_queries 2020-09-18 09:52:44 +01:00
Marco Gario
5166e750e9 Merge pull request #218 from github/marcogario/reduce_ci_jobs
Reduce triggers in workflows
2020-09-17 18:36:18 +02:00
Robert Brignull
875a8da7e3 include workflow queries in status report 2020-09-17 17:29:45 +01:00
Marco Gario
ade519b950 Reduce triggers in workflows
See #182. Workflows are now triggered on all PRs but only on push on the main and v1 branch
2020-09-17 14:39:18 +02:00
Robin Neatherway
7795860c11 Do not always overwrite the GITHUB_REF for PRs
As we move towards analysing the merge commit for pull requests by
default, we should stop sending `/refs/pull/n/head` rather than
`refs/pull/n/merge` _unless_ the checked-out SHA has actually changed.
Here we assume that any change (compared to GITHUB_SHA) indicates that
`git checkout HEAD^2` has been run earlier. This may sometimes be
incorrect (e.g. `git checkout mybranch`), but in that case the ref
would be wrong either way.
2020-09-17 13:11:06 +01:00
Marco Gario
b4a8cfa05c Add instructions 2020-09-17 13:42:19 +02:00
Marco Gario
7da583bcb3 Workflow to split the bundle into components 2020-09-17 13:36:26 +02:00
Robert
c9b06117cb Merge pull request #211 from github/robertbrignull/consistent_inputs
Make inputs consistent between action and runner
2020-09-17 10:06:43 +01:00
Robert
4b4f9e8e6a Merge branch 'main' into robertbrignull/consistent_inputs 2020-09-17 09:36:51 +01:00
Robert
2c94a7f61f Merge pull request #215 from github/main-v1
Mergeback v1 to main
2020-09-17 09:33:39 +01:00
Marco Gario
5f592775cc Merge branch 'main' into main-v1 2020-09-16 18:37:11 +02:00
Robert
ef3b75cbdb Update action.yml 2020-09-16 16:23:11 +02:00
Robert Brignull
090a7013dd add explanation to query 2020-09-16 11:03:19 +01:00
Robert Brignull
4c4114f2d8 add an extra getRequiredInput 2020-09-16 11:03:13 +01:00
Robert Brignull
7be1a410c2 Merge branch 'main' into robertbrignull/consistent_inputs 2020-09-16 10:50:54 +01:00
Robert Brignull
c1cee53da5 Add getOptionalInput and getRequiredInput 2020-09-15 18:47:50 +01:00
Robert Brignull
d88fa5cef6 Add queries 2020-09-15 18:33:37 +01:00
Robert
f6894d6610 Merge pull request #210 from github/robertbrignull/disable-snippets
Disable snippets by default
2020-09-15 18:25:28 +01:00
Robert
3d5127d682 Update action.yml 2020-09-15 17:45:34 +01:00
Marco Gario
4076e23ec5 Merge pull request #209 from github/remove_file
Remove unused files
2020-09-15 18:02:05 +02:00
Nick Fyson
0da3e94867 Merge branch 'main' into remove_file 2020-09-15 15:16:05 +01:00
Marco Gario
3325da8a15 Remove unused files 2020-09-15 15:51:27 +02:00
Robert
1ca9d4c096 Merge pull request #207 from github/robertbrignull/replace_all
Replace all occurrences instead of just the first
2020-09-15 14:47:05 +01:00
Robert Brignull
121fd331cd Introduce actions-util.ts 2020-09-15 14:01:21 +01:00
Chris Raynor
50d46f662f Merge pull request #184 from github/update-v1-6567bffc
Merge main into v1
2020-09-15 09:09:40 +01:00
Robert Brignull
89dad149ed Replace all occurrences 2020-09-14 18:13:33 +01:00
Nick Fyson
245c02cf7d Merge pull request #174 from github/nickfyson/error_wrapper
add error-catching wrapper around calls to toolrunner
2020-09-14 13:59:39 +01:00
Nick Fyson
e5e9aad174 Merge branch 'main' into nickfyson/error_wrapper
# Conflicts:
#	lib/codeql.js
#	lib/codeql.js.map
#	src/codeql.ts
2020-09-14 13:30:37 +01:00
Chris Raynor
698eab0c5f Merge pull request #181 from github/cbraynor/eslint
Switching to ESLint
2020-09-14 10:59:19 +01:00
Chris Raynor
7bdf90af87 Adding super simple lint Action 2020-09-14 10:51:29 +01:00
Chris Raynor
cb51c857fc Adding temporary overrides to allow the linter to pass 2020-09-14 10:49:02 +01:00
Chris Raynor
a184d50a26 Running lint-fix 2020-09-14 10:44:43 +01:00
Chris Raynor
c96f84308a Regenerating node_modules 2020-09-14 10:42:37 +01:00
Chris Raynor
09b4a82c83 Removing the tslint config 2020-09-14 10:37:55 +01:00
Chris Raynor
06765f9340 Adding ESLint config and required dev dependencies 2020-09-14 10:32:24 +01:00
Nick Fyson
814840b1de Merge branch 'main' into nickfyson/error_wrapper 2020-09-14 10:28:03 +01:00
Chris Raynor
6567bffcb9 Merge pull request #183 from github/dependabot/npm_and_yarn/node-fetch-2.6.1
Bump node-fetch from 2.6.0 to 2.6.1
2020-09-13 11:22:52 +01:00
Chris Raynor
0579b4d27d Vendoring node_modules 2020-09-13 10:27:23 +01:00
dependabot[bot]
0b64878cfe Bump node-fetch from 2.6.0 to 2.6.1
Bumps [node-fetch](https://github.com/bitinn/node-fetch) from 2.6.0 to 2.6.1.
- [Release notes](https://github.com/bitinn/node-fetch/releases)
- [Changelog](https://github.com/node-fetch/node-fetch/blob/master/docs/CHANGELOG.md)
- [Commits](https://github.com/bitinn/node-fetch/compare/v2.6.0...v2.6.1)

Signed-off-by: dependabot[bot] <support@github.com>
2020-09-12 20:20:25 +00:00
Nick Fyson
b104d6e035 Merge branch 'main' into nickfyson/error_wrapper
# Conflicts:
#	lib/codeql.js.map
2020-09-11 19:04:05 +01:00
Nick Fyson
b3b99014eb make ErrorMatcher an object rather than a tuple 2020-09-11 18:30:07 +01:00
Nick Fyson
1fb7c81099 tweak in response to reviewer comments 2020-09-11 18:25:23 +01:00
Nick Fyson
57ef26cffe Merge pull request #178 from github/nickfyson/include-snippets
add optional workflow input to specify whether snippets are added to sarif
2020-09-11 14:11:14 +01:00
Nick Fyson
be681d444e Merge branch 'main' into nickfyson/include-snippets 2020-09-11 13:58:53 +01:00
David Verdeguer
4ab5cbcc8a Merge pull request #173 from github/rasmuswl/update-python-scripts
Update Python scripts for "Python deps setup"
2020-09-11 12:41:31 +02:00
Rasmus Wriedt Larsen
1b20fa78be Handle error in poetry 1.0.10 2020-09-11 12:40:40 +02:00
Rasmus Wriedt Larsen
7fc41c62be Update python scripts to handle setup.py 2020-09-11 12:40:40 +02:00
David Verdeguer
5701037850 Add python scripts 2020-09-11 12:25:47 +02:00
Robert
a2cdfc8031 Merge pull request #180 from ericcornelissen/patch-1
Fix "Using a custom configuration" link in the README
2020-09-11 10:13:31 +01:00
David Verdeguer
52274f1815 Setup python extractor for installed deps 2020-09-11 11:07:27 +02:00
David Verdeguer
8cea21575c Install python deps on init 2020-09-11 10:53:41 +02:00
Eric Cornelissen
ea3706d88f Fix "Using a custom configuration" link in README
Fix the "Using a custom configuration" link the README. The actual ID of the header on the page is "using-a-custom-configuration-file", so I updated both the link and the link test to match. (as far as I can tell all other links in the README work as expected)
2020-09-10 20:58:39 +03:00
Nick Fyson
77f767cb34 add optional workflow input to specify whether snippets are added to sarif output 2020-09-10 18:26:58 +01:00
Robert Brignull
0539269665 split up builtin and custom queries 2020-09-10 18:17:03 +01:00
Robert
75af0bf309 Merge pull request #177 from github/external_queries_test
Make a local repo for checkoutExternalQueries
2020-09-10 11:43:03 +01:00
Robert
d4f40db368 Merge branch 'main' into external_queries_test 2020-09-10 11:24:22 +01:00
Robert
c5f77d0bfd Merge pull request #175 from github/runner-integration-tests
Add more integration tests for the runner
2020-09-10 11:23:28 +01:00
Robert
e6083671c7 Merge branch 'main' into runner-integration-tests 2020-09-10 10:57:16 +01:00
Robert
4d8719bb73 Merge pull request #176 from github/fingerprint_test
Add another fingerprinting test
2020-09-10 10:24:01 +01:00
Robert Brignull
3be0e89804 make a local repo for checkoutExternalQueries 2020-09-09 18:30:57 +01:00
Robert Brignull
3db0a253ed add another fingerprinting test 2020-09-09 17:37:53 +01:00
Robert
d82e34bb43 Merge branch 'main' into runner-integration-tests 2020-09-09 17:31:35 +01:00
Robert
67246cd645 Update integration-testing.yml 2020-09-09 17:31:20 +01:00
Sam Partington
028706c3a2 Merge pull request #165 from github/allow-additive-queries-in-workflow
Allow "additive" queries in workflow by prefixing with "+"
2020-09-09 16:41:55 +01:00
Sam Partington
4ad13dff42 Merge branch 'main' into allow-additive-queries-in-workflow 2020-09-09 15:32:20 +01:00
Robert Brignull
e4b0068a9d add more integration tests for the runner 2020-09-08 18:32:52 +01:00
Robert
506e641669 Merge pull request #167 from github/windows_tracing
Fix tracing on windows for the runner, and when using multiple self-hosted actions runners
2020-09-08 11:28:42 +01:00
Sam Partington
18cd03ab62 Make variable name less ambiguous 2020-09-08 10:14:51 +01:00
Sam Partington
d677f16692 Merge branch 'main' into allow-additive-queries-in-workflow 2020-09-08 10:00:16 +01:00
Nick Fyson
88951d6193 renames to reflect the switch to using toolrunner 2020-09-08 00:01:04 +01:00
Nick Fyson
3cd41279f2 Merge branch 'main' into nickfyson/error_wrapper
# Conflicts:
#	lib/codeql.js
#	lib/codeql.js.map
#	src/codeql.ts
2020-09-07 23:55:32 +01:00
Nick Fyson
c4f579a4db remove the intentionally failing matcher integration tests 2020-09-07 23:12:12 +01:00
Nick Fyson
30bb37a5c7 restore all default workflow triggers and integration tests 2020-09-07 23:11:10 +01:00
Nick Fyson
cc2dfaf5d8 clean up and rename things to follow proper conventions 2020-09-07 23:02:58 +01:00
Nick Fyson
3e6d23928b add test for initial error matcher 2020-09-07 22:50:37 +01:00
Nick Fyson
7dbaff09b6 move error matcher definition into dedicated file 2020-09-07 18:37:51 +01:00
Robert Brignull
7d9c81f55c Print final process we choose 2020-09-07 17:23:35 +01:00
Robert Brignull
212f4484d3 Merge branch 'main' into windows_tracing 2020-09-07 17:08:59 +01:00
Robert
556aed46b0 Merge pull request #170 from github/windows_message
Print path to file instead of file contents
2020-09-07 16:23:49 +01:00
Robert Brignull
c68937100c Merge branch 'main' into windows_message 2020-09-07 16:01:09 +01:00
Nick Fyson
1f3ce75844 add initial suite of tests for exec_wrapper 2020-09-07 15:17:43 +01:00
Nick Fyson
9701a93bf1 ensure exec_wrapper always captures exit code of called process 2020-09-07 15:17:19 +01:00
Robert
8c43427531 Merge pull request #169 from github/codeql_download_info
Print that we are downloading codeql and it may take a while
2020-09-07 14:32:37 +01:00
Robert
1a772a2b9e Merge branch 'main' into codeql_download_info 2020-09-07 13:37:20 +01:00
Robert Brignull
694fa2d961 add options to specify process name or level to trace 2020-09-07 13:36:47 +01:00
Robert Brignull
789059e604 Merge branch 'main' into windows_tracing 2020-09-07 12:49:20 +01:00
Robert
0e9b8f4a47 Merge pull request #168 from github/runner_refs
make inputting refs easier
2020-09-07 12:19:56 +01:00
Robert
3901848a2f Merge pull request #171 from github/update-v1-68c6069f
Merge main into v1
2020-09-07 11:42:15 +01:00
Nick Fyson
dd4e841441 remove spurious use of exec_wrapper 2020-09-03 17:01:51 +01:00
Nick Fyson
6a43a6178d ensure matchers not applied if exit code is zero 2020-09-03 16:42:56 +01:00
Robert Brignull
9782622366 Print path to file instead of file contents 2020-09-02 19:58:03 +01:00
Robert Brignull
32eb1c4c34 Print that we are downloading codeql and it may take a while 2020-09-02 18:02:49 +01:00
Robert Brignull
ebb41156ee make inputting refs easier 2020-09-02 18:00:46 +01:00
Nick Fyson
231537b08e refactor to handle exec.exec return values together 2020-09-02 16:32:28 +01:00
Robert Brignull
48df01325b add alternative script for in runner mode 2020-09-02 15:50:37 +01:00
Robert Brignull
5c0bd22d01 set -ExecutionPolicy Bypass 2020-09-02 15:50:23 +01:00
Robert Brignull
2dbd7e8dc1 Fix tracing when there are multiple self-hosted runners 2020-09-02 15:28:50 +01:00
Nick Fyson
6ef533485e wrap more codeql calls in the error catcher wrapper 2020-09-01 16:51:39 +01:00
Nick Fyson
7b7e0e12b7 update error_wrapper to take matcher array 2020-09-01 16:35:08 +01:00
Robert
68c6069f0b Merge pull request #162 from github/runner_analyze
Convert init, autobuild, and analyze actions to CLI / runner mode
2020-09-01 15:01:36 +01:00
Robert Brignull
b4d142e980 whitelist @actions/exec/lib/toolrunner 2020-09-01 14:44:38 +01:00
Robert Brignull
4c00c68d14 Add --ram and --threads args 2020-09-01 14:27:56 +01:00
Robert Brignull
09fb3ec514 Remove a call to getActionsApiClient 2020-09-01 14:01:21 +01:00
Robert Brignull
8a821a9c35 Add logger to checkoutExternalRepository 2020-09-01 13:53:59 +01:00
Robert
b8f7e0ff0f Merge pull request #166 from github/update-v1-5bd2832b
Merge main into v1
2020-09-01 13:30:36 +01:00
Robert Brignull
1548b771cb Merge remote-tracking branch 'origin/main' into runner_analyze 2020-08-28 17:28:14 +01:00
Robert Brignull
a0b54fc7ab fix tests 2020-08-28 17:28:05 +01:00
Robert Brignull
aa7e2fe91b automatically import env in autobuild 2020-08-28 17:22:26 +01:00
Sam Partington
abf5854149 Extract more common test code to function 2020-08-28 17:20:40 +01:00
Sam Partington
23cf700e38 Extract common test code to a function
https://github.com/github/codeql-action/pull/165#discussion_r479396850
2020-08-28 17:13:06 +01:00
Robert Brignull
c3d6602e8a use ToolRunner directly instead of exec wrapper 2020-08-28 16:59:34 +01:00
Sam Partington
831c686d9b Add details of queries property to README 2020-08-28 16:58:15 +01:00
Robert
5bd2832b32 Merge pull request #164 from github/npm_test_os
Run `npm test` on linux and macos
2020-08-28 16:57:28 +01:00
Sam Partington
ab8d9eccc9 Add a test which combines workflow queries and disabling the defaults 2020-08-28 16:53:11 +01:00
Sam Partington
82000c26c8 Allow "additive" queries in workflow by prefixing with "+"
See discussion on https://github.com/github/code-scanning/issues/1446
2020-08-28 16:45:57 +01:00
Robert
b1d719eeeb fix tests on non-linux 2020-08-28 15:13:14 +01:00
Robert
da1c00f5c8 Run npm test on all operating systems 2020-08-28 15:10:47 +01:00
Robert Brignull
80e2c4fe4a improve error message when config is not found 2020-08-28 09:43:25 +01:00
Robert Brignull
37bac22443 Make runner arg descriptions more consistent 2020-08-28 09:35:37 +01:00
Robert Brignull
57f03d3bd0 Remove hash from temp dir 2020-08-28 09:27:28 +01:00
Robert Brignull
6c8f96d781 Log that we're clearing the temp dir 2020-08-27 16:45:41 +01:00
Robert Brignull
3dfaa88a1d Remove process of auth 2020-08-27 16:45:28 +01:00
Robert Brignull
1fd45d7407 address review comments 2020-08-27 16:34:09 +01:00
Robert Brignull
6f422a4303 add debug mode to limit output 2020-08-27 14:26:44 +01:00
Robert Brignull
f80d660e33 check environment before running autobuild or analyze 2020-08-27 14:22:16 +01:00
Robert Brignull
a542021200 make --language optional to autobuild and detect dominant language 2020-08-27 14:08:54 +01:00
Robert Brignull
b42ed69542 Update temp dir and tools dir 2020-08-27 14:08:41 +01:00
Marco Gario
8229390c1d Merge pull request #163 from github/codeql_bundle_20200826
CodeQL Bundle: 20200826
2020-08-27 14:54:09 +02:00
Robert Brignull
39b361ed69 Remove dependence of GITHUB_REPOSITORY env var 2020-08-27 11:06:14 +01:00
Robert Brignull
688df282cd fix exporting env vars on linux 2020-08-26 16:40:58 +01:00
Robert Brignull
e9bfa56061 inline inject-tracer.ps1 2020-08-26 16:20:36 +01:00
Robert Brignull
1f2bd10757 add newline to output 2020-08-26 16:20:36 +01:00
Robert Brignull
1c004b9275 inline tracer-env.js 2020-08-26 16:20:36 +01:00
Robert Brignull
f5d645fc73 Fix use of wrong URL 2020-08-26 16:20:36 +01:00
Robert Brignull
3ffe4b7421 Add code to output required env to files 2020-08-26 16:20:36 +01:00
Robert Brignull
217483dfd6 Convert rest of the actions 2020-08-26 16:20:36 +01:00
Marco Gario
b1aa99c6de Merge branch 'main' into codeql_bundle_20200826 2020-08-26 16:42:16 +02:00
Marco Gario
7e207743ab Update defaults.json 2020-08-26 16:28:44 +02:00
Robert
aac5eb2aea Merge pull request #159 from github/rename_cli
Rename CLI to CodeQL runner
2020-08-26 15:03:23 +01:00
Robert Brignull
09677dada5 rename CLI to runner 2020-08-25 17:44:30 +01:00
Robert
fe756603d9 Merge pull request #152 from github/vercel/pkg
Add vercel/pkg to create standalone CLI
2020-08-25 17:42:54 +01:00
Robert Brignull
80a22f43d8 Merge branch 'main' into vercel/pkg 2020-08-25 15:58:59 +01:00
Robert
c039c3b7a4 Merge pull request #158 from github/tracer-config
Break out tracer-config.ts to its own file
2020-08-25 15:20:36 +01:00
Robert
2df51e04f7 Merge branch 'main' into tracer-config 2020-08-25 15:05:15 +01:00
Sam Partington
abddb8bf00 Merge pull request #127 from github/query-overriding
Allow queries to be specified in the workflow configuration, overriding those set in the config file
2020-08-25 14:29:40 +01:00
Sam Partington
e997bdf637 Merge branch 'main' into query-overriding 2020-08-25 14:21:33 +01:00
Sam Partington
ab4e7216d3 Don't refer to config file in contexts where it's not relevant
https://github.com/github/codeql-action/pull/127#discussion_r476366221
2020-08-25 14:19:16 +01:00
Robert Brignull
1fb8d3b712 fix test workflow again 2020-08-25 12:00:04 +01:00
Robert Brignull
c3c86d82ae Merge remote-tracking branch 'origin/main' into vercel/pkg
the commit.
2020-08-25 11:49:44 +01:00
Esben Sparre Andreasen
ab457bef49 Merge pull request #139 from github/support-multiple-checkouts
Support checkout of multiple refs for a single repository
2020-08-25 12:48:09 +02:00
Robert Brignull
d6daa45d0b update test workflow 2020-08-25 11:46:13 +01:00
Robert Brignull
0bb8872e19 remove build-cli from top-level package.json 2020-08-25 11:45:01 +01:00
Esben Sparre Andreasen
c562bfbd92 Merge branch 'main' into support-multiple-checkouts 2020-08-25 12:35:47 +02:00
Robert Brignull
8efabe9ec9 Merge branch 'main' into tracer-config 2020-08-25 11:35:22 +01:00
Robert
e05e9e6b5d Merge pull request #160 from github/rename_entrypoints
Rename the action entrypoint files
2020-08-25 11:31:17 +01:00
Esben Sparre Andreasen
05f620754e Merge branch 'main' into support-multiple-checkouts 2020-08-25 12:25:42 +02:00
Robert
85c7ad0ecd Merge branch 'main' into rename_entrypoints 2020-08-25 11:10:22 +01:00
Robert
570a0d7142 Merge pull request #161 from github/lib_clean
Clean up the lib directory
2020-08-25 11:10:11 +01:00
Robert Brignull
82fb31ed5e make tests clearer 2020-08-25 11:09:07 +01:00
Nick Fyson
cd22abcda8 add example regex match for stdout/err string 2020-08-25 10:54:54 +01:00
Sam Partington
bdfd48264f Merge branch 'main' into query-overriding 2020-08-25 10:39:53 +01:00
Esben Sparre Andreasen
eecc25f914 build typescript 2020-08-25 11:22:11 +02:00
Esben Sparre Andreasen
45b9e967ef support checkout of multiple refs for a single repository 2020-08-25 11:22:08 +02:00
Sam Partington
129713f1a0 Handle errors in workflow queries correctly 2020-08-25 10:17:54 +01:00
Nick Fyson
45e00a8e6a match existing behaviour when custom listeners defined 2020-08-24 18:35:22 +01:00
Sam Partington
7f19f9198a Refactor common code to function and add missing test 2020-08-24 15:53:24 +01:00
Robert Brignull
9c015b7e83 cleanup lib 2020-08-24 15:18:26 +01:00
Robert Brignull
4bc0c2b0e7 clean the lib dir before building 2020-08-24 15:18:01 +01:00
Robert Brignull
9e342a9b83 rename the action entrypoint files 2020-08-24 15:15:26 +01:00
Sam Partington
c6f02973ac Prevent queries in workflow overriding default queries
https://github.com/github/codeql-action/pull/127#pullrequestreview-463207781
2020-08-24 14:42:05 +01:00
Robert Brignull
88d9e95da6 add minimal instructions for building CLI 2020-08-24 14:04:29 +01:00
Robert Brignull
3c5b7fba82 update node_modules 2020-08-24 14:02:55 +01:00
Robert Brignull
a6e6d4b72b move dependencies needed to build CLI to separate package.json 2020-08-24 14:02:49 +01:00
Robert
06fb4821bc Merge pull request #157 from github/update-v1-bd54c20d
Merge main into v1
2020-08-24 13:46:18 +01:00
Robert Brignull
407ef0ac11 Break out tracer-config.ts 2020-08-24 12:53:09 +01:00
Nick Fyson
2b27c68c84 add exec_wrapper function 2020-08-24 11:22:25 +01:00
Robert
bd54c20d3e Merge pull request #153 from github/add_env_to_config
Add tempDir and codeQLCmd fields to the config
2020-08-21 11:00:48 +01:00
Robert Brignull
0e8b30af75 Merge branch 'main' into add_env_to_config 2020-08-21 10:32:58 +01:00
Robert
1f5a571dfc Merge pull request #151 from github/action_libs_query
Add query to detect use of actions libs
2020-08-21 10:31:20 +01:00
Robert
d193e5f876 Merge branch 'main' into action_libs_query 2020-08-21 10:07:22 +01:00
Robert
5d49d011a2 Merge pull request #144 from github/language_parsing
Refactor language parsing
2020-08-21 10:07:06 +01:00
Robert Brignull
038c4ebdf7 add CodeQL cmd to config 2020-08-19 15:57:13 +01:00
Robert Brignull
360e77a083 remove direct accesses to RUNNER_TEMP 2020-08-19 15:25:27 +01:00
Robert Brignull
9c29fe283d add tempDir and toolCacheDir to config 2020-08-19 15:11:49 +01:00
Robert Brignull
574b5dc4e9 Merge branch 'main' into language_parsing 2020-08-19 09:40:47 +01:00
Sam Partington
517d9fad41 Improve description of queries property
Co-authored-by: Alistair Christie <54933897+hubwriter@users.noreply.github.com>
2020-08-17 16:33:47 +01:00
Robert Brignull
d6fbafb242 add tests 2020-08-17 13:34:55 +01:00
Robert Brignull
1943ed432b use enum 2020-08-17 13:34:34 +01:00
Robert Brignull
00eee2b7ee Merge branch 'main' into language_parsing 2020-08-17 13:21:02 +01:00
Robert
ce75b488a5 Merge pull request #150 from github/update-v1-e9efcf19
Merge main into v1
2020-08-17 12:52:09 +01:00
Robert Brignull
d49b8673bb fix alerts 2020-08-17 12:42:23 +01:00
Robert Brignull
f92a68048c add query to detect use of actions libs 2020-08-17 12:32:22 +01:00
Robert Brignull
591359cae6 introduce languages.ts 2020-08-10 16:03:09 +01:00
Robert
9494a25e76 Merge pull request #143 from github/update-v1-86081052
Merge main into v1
2020-08-10 10:46:27 +01:00
Sam Partington
443d18adb7 Improve variable name 2020-07-31 11:15:09 +01:00
Sam Partington
d998a87469 Fix typo 2020-07-31 11:13:06 +01:00
Sam Partington
f57bf21879 Tidy up test 2020-07-31 11:11:47 +01:00
Sam Partington
ca2b74f2f8 Make test name more descriptive 2020-07-30 17:05:58 +01:00
Sam Partington
7b0f432c26 Add missing JavaScript
https://github.com/github/codeql-action/pull/127/checks?check_run_id=915325110
2020-07-30 16:49:50 +01:00
Sam Partington
f03827d513 Add new input to the init action configuration 2020-07-27 16:50:03 +01:00
Sam Partington
2da457060b Allow multiple overriding queries, comma-separated 2020-07-27 16:50:03 +01:00
Sam Partington
95cef22589 Add support for basic query overriding in action file itself
See https://github.com/github/dsp-code-scanning/issues/1446
2020-07-27 16:50:03 +01:00
9172 changed files with 540444 additions and 616474 deletions

5
.eslintignore Normal file
View File

@@ -0,0 +1,5 @@
**/webpack.config.js
lib/**
runner/dist/**
src/testdata/**
tests/**

55
.eslintrc.json Normal file
View File

@@ -0,0 +1,55 @@
{
"parser": "@typescript-eslint/parser",
"parserOptions": {
"project": "./tsconfig.json"
},
"plugins": ["@typescript-eslint", "filenames", "github", "import", "no-async-foreach"],
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/recommended",
"plugin:@typescript-eslint/recommended-requiring-type-checking",
"plugin:github/recommended",
"plugin:github/typescript"
],
"rules": {
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],
"import/extensions": "error",
"import/no-amd": "error",
"import/no-commonjs": "error",
"import/no-dynamic-require": "error",
"import/no-extraneous-dependencies": ["error"],
"import/no-namespace": "off",
"import/no-unresolved": "error",
"import/no-webpack-loader-syntax": "error",
"import/order": ["error", {
"alphabetize": {"order": "asc"},
"newlines-between": "always"
}],
"no-async-foreach/no-async-foreach": "error",
"no-console": "off",
"no-sequences": "error",
"no-shadow": "off",
"@typescript-eslint/no-shadow": ["error"],
"one-var": ["error", "never"]
},
"overrides": [{
// "temporarily downgraded during transition to eslint
"files": "**",
"rules": {
"@typescript-eslint/ban-types": "off",
"@typescript-eslint/explicit-module-boundary-types": "off",
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/no-unsafe-assignment": "off",
"@typescript-eslint/no-unsafe-call": "off",
"@typescript-eslint/no-unsafe-member-access": "off",
"@typescript-eslint/no-unsafe-return": "off",
"@typescript-eslint/no-unused-vars": "off",
"@typescript-eslint/no-var-requires": "off",
"@typescript-eslint/prefer-regexp-exec": "off",
"@typescript-eslint/require-await": "off",
"@typescript-eslint/restrict-template-expressions": "off",
"func-style": "off"
}
}]
}

View File

@@ -1,5 +1,5 @@
blank_issues_enabled: true
contact_links:
- name: Contact GitHub Support
url: https://support.github.com/contact?subject=Code+Scanning+Beta+Support&tags=code-scanning-support
about: Contact Support about code scanning
url: https://support.github.com/request
about: Contact Support

View File

@@ -123,8 +123,8 @@ def get_pr_for_commit(repo, commit):
if prs.totalCount > 0:
# In the case that there are multiple PRs, return the earliest one
prs = list(prs)
sorted(prs, key=lambda pr: int(pr.number))
return prs[0]
sorted_prs = sorted(prs, key=lambda pr: int(pr.number))
return sorted_prs[0]
else:
return None

View File

@@ -1,18 +0,0 @@
name: "CodeScanning CLI"
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
# Build the CLI
- name: Build CLI
run: npm run build-cli
# Upload an empty SARIF file
- name: Upload with CLI
run: node cli/code-scanning-cli.js upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_API_URL --github-auth ${{ github.token }}

View File

@@ -1,6 +1,9 @@
name: "CodeQL action"
on: [push, pull_request]
on:
push:
branches: [main, v1]
pull_request:
jobs:
build:
@@ -11,18 +14,12 @@ jobs:
steps:
- uses: actions/checkout@v2
with:
# Must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head of the pull request.
fetch-depth: 2
# If this run was triggered by a pull request event then checkout
# the head of the pull request instead of the merge commit.
- run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }}
- uses: ./init
id: init
with:
languages: javascript
config-file: ./.github/codeql/codeql-config.yml
# confirm steps.init.outputs.codeql-path points to the codeql binary
- name: Print CodeQL Version
run: ${{steps.init.outputs.codeql-path}} version --format=json
- uses: ./analyze

View File

@@ -1,6 +1,9 @@
name: "Integration Testing"
on: [push, pull_request]
on:
push:
branches: [main, v1]
pull_request:
jobs:
multi-language-repo_test-autodetect-languages:
@@ -41,6 +44,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
tools: [~, latest]
runs-on: ${{ matrix.os }}
steps:
@@ -53,6 +57,7 @@ jobs:
mv ../action/tests/multi-language-repo/{*,.github} .
- uses: ./../action/init
with:
tools: ${{ matrix.tools }}
languages: cpp,csharp,java,javascript,python
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
- name: Build code
@@ -93,6 +98,37 @@ jobs:
env:
TEST_MODE: true
go-custom-tracing:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
env:
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
steps:
- uses: actions/setup-go@v2
if: ${{ matrix.os == 'macos-latest' }}
with:
go-version: '^1.13.1'
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- uses: ./../action/init
with:
languages: go
- name: Build code
shell: bash
run: go build main.go
- uses: ./../action/analyze
env:
TEST_MODE: true
multi-language-repo_rubocop:
runs-on: ubuntu-latest
@@ -150,3 +186,299 @@ jobs:
- uses: ./../action/analyze
env:
TEST_MODE: true
runner-analyze-javascript-ubuntu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
# Pass --config-file here, but not for other jobs in this workflow.
# This means we're testing the config file parsing in the runner
# but not slowing down all jobs unnecessarily as it doesn't add much
# testing the parsing on different operating systems and languages.
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-ubuntu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
. ./codeql-runner/codeql-env.sh
dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
. ./codeql-runner/codeql-env.sh
dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-ubuntu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
../action/runner/dist/codeql-runner-linux autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
../action/runner/dist/codeql-runner-win.exe autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
../action/runner/dist/codeql-runner-macos autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-upload-sarif:
runs-on: ubuntu-latest
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Upload with runner
run: |
# Deliberately don't use TEST_MODE here. This is specifically testing
# the compatibility with the API.
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}

View File

@@ -1,21 +1,24 @@
name: "PR checks"
on: [push, pull_request]
on:
push:
branches: [main, v1]
pull_request:
jobs:
tslint:
lint-js:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: tslint
- uses: actions/checkout@v2
- name: Run Lint
run: npm run-script lint
check-js:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v2
- name: Check generated JavaScript
run: |
# Sanity check that repo is clean to start with
@@ -24,6 +27,8 @@ jobs:
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Wipe the lib directory incase there are extra unnecessary files in there
rm -rf lib
# Generate the JavaScript files
npm run-script build
# Check that repo is still clean
@@ -39,7 +44,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v2
- name: Check node modules up to date
run: |
# Sanity check that repo is clean to start with
@@ -48,7 +53,6 @@ jobs:
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Reinstall modules and then clean to remove absolute paths
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
npm ci
@@ -63,9 +67,12 @@ jobs:
echo "Success: node_modules are up to date"
npm-test:
runs-on: ubuntu-latest
strategy:
matrix:
os: [ubuntu-latest,macos-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v2
- name: npm run-script test
run: npm run-script test
run: npm run-script test

133
.github/workflows/python-deps.yml vendored Normal file
View File

@@ -0,0 +1,133 @@
name: Test Python Package Installation on Linux and Mac
on:
push:
branches: [main, v1]
pull_request:
jobs:
test-setup-python-scripts:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest]
include:
- test_dir: python-setup/tests/pipenv/requests-2
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 2
- test_dir: python-setup/tests/pipenv/requests-3
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 3
- test_dir: python-setup/tests/poetry/requests-2
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 2
- test_dir: python-setup/tests/poetry/requests-3
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 3
- test_dir: python-setup/tests/requirements/requests-2
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 2
- test_dir: python-setup/tests/requirements/requests-3
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 3
- test_dir: python-setup/tests/setup_py/requests-2
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 2
- test_dir: python-setup/tests/setup_py/requests-3
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 3
# This one shouldn't fail, but also won't install packages
- test_dir: python-setup/tests/requirements/non-standard-location
test_script: test -z $LGTM_INDEX_IMPORT_PATH
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
- name: Initialize CodeQL
uses: ./init
id: init
with:
tools: latest
languages: python
setup-python-dependencies: false
- name: Test Auto Package Installation
run: |
set -x
$GITHUB_WORKSPACE/python-setup/install_tools.sh
cd $GITHUB_WORKSPACE/${{ matrix.test_dir }}
case ${{ matrix.os }} in
ubuntu-latest*) basePath="/opt";;
macos-latest*) basePath="/Users/runner";;
esac
echo ${basePath}
$GITHUB_WORKSPACE/python-setup/auto_install_packages.py "$(dirname ${{steps.init.outputs.codeql-path}})"
- name: Setup for extractor
run: |
echo $CODEQL_PYTHON
# only run if $CODEQL_PYTHON is set
if [ ! -z $CODEQL_PYTHON ]; then
$GITHUB_WORKSPACE/python-setup/tests/from_python_exe.py $CODEQL_PYTHON;
fi
- name: Verify packages installed
run: |
${{ matrix.test_script }}
test-setup-python-scripts-windows:
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
include:
- test_dir: python-setup/tests/pipenv/requests-2
python_version: 2
- test_dir: python-setup/tests/pipenv/requests-3
python_version: 3
- test_dir: python-setup/tests/poetry/requests-2
python_version: 2
- test_dir: python-setup/tests/poetry/requests-3
python_version: 3
- test_dir: python-setup/tests/requirements/requests-2
python_version: 2
- test_dir: python-setup/tests/requirements/requests-3
python_version: 3
- test_dir: python-setup/tests/setup_py/requests-2
python_version: 2
- test_dir: python-setup/tests/setup_py/requests-3
python_version: 3
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
- name: Initialize CodeQL
uses: ./init
with:
tools: latest
languages: python
setup-python-dependencies: false
- name: Test Auto Package Installation
run: |
$cmd = $Env:GITHUB_WORKSPACE + "\\python-setup\\install_tools.ps1"
powershell -File $cmd
cd $Env:GITHUB_WORKSPACE\\${{ matrix.test_dir }}
$DefaultsPath = Join-Path (Join-Path $Env:GITHUB_WORKSPACE "src") "defaults.json"
$CodeQLBundleName = (Get-Content -Raw -Path $DefaultsPath | ConvertFrom-Json).bundleVersion
$CodeQLVersion = "0.0.0-" + $CodeQLBundleName.split("-")[-1]
py -3 $Env:GITHUB_WORKSPACE\\python-setup\\auto_install_packages.py C:\\hostedtoolcache\\windows\\CodeQL\\$CodeQLVersion\\x64\\codeql
- name: Setup for extractor
run: |
echo $Env:CODEQL_PYTHON
py -3 $Env:GITHUB_WORKSPACE\\python-setup\\tests\\from_python_exe.py $Env:CODEQL_PYTHON
- name: Verify packages installed
run: |
$cmd = $Env:GITHUB_WORKSPACE + "\\python-setup\\tests\\check_requests_123.ps1"
powershell -File $cmd ${{ matrix.python_version }}

54
.github/workflows/release-runner.yml vendored Normal file
View File

@@ -0,0 +1,54 @@
name: Release runner
on:
workflow_dispatch:
inputs:
bundle-tag:
description: 'Tag of the bundle release (e.g., "codeql-bundle-20200826")'
required: false
jobs:
release-runner:
runs-on: ubuntu-latest
env:
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
strategy:
matrix:
extension: ["linux", "macos", "win.exe"]
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- uses: actions/upload-artifact@v2
with:
name: codeql-runner-${{matrix.extension}}
path: runner/dist/codeql-runner-${{matrix.extension}}
- name: Resolve Upload URL for the release
if: ${{ github.event.inputs.bundle-tag != null }}
id: save_url
run: |
UPLOAD_URL=$(curl -sS \
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}" \
-H "Accept: application/json" \
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" | jq .upload_url | sed s/\"//g)
echo ${UPLOAD_URL}
echo "::set-output name=upload_url::${UPLOAD_URL}"
- name: Upload Platform Package
if: ${{ github.event.inputs.bundle-tag != null }}
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.save_url.outputs.upload_url }}
asset_path: runner/dist/codeql-runner-${{matrix.extension}}
asset_name: codeql-runner-${{matrix.extension}}
asset_content_type: application/octet-stream

73
.github/workflows/split.yml vendored Normal file
View File

@@ -0,0 +1,73 @@
#
# Split the CodeQL Bundle into platform bundles
#
# Instructions:
# 1. Upload the new codeql-bundle (codeql-bundle.tar.gz) as an asset of the
# release (codeql-bundle-20200826)
# 2. Take note of the CLI Release used by the bundle (e.g., v2.2.5)
# 3. Manually launch this workflow file (via the Actions UI) specifying
# - The CLI Release (e.g., v2.2.5)
# - The release tag (e.g., codeql-bundle-20200826)
# 4. If everything succeeds you should see 3 new assets.
#
name: Split Bundle
on:
workflow_dispatch:
inputs:
cli-release:
description: 'CodeQL CLI Release (e.g., "v2.2.5")'
required: true
bundle-tag:
description: 'Tag of the bundle release (e.g., "codeql-bundle-20200826")'
required: true
jobs:
build:
runs-on: ubuntu-latest
env:
CLI_RELEASE: "${{ github.event.inputs.cli-release }}"
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
strategy:
fail-fast: false
matrix:
platform: ["linux64", "osx64", "win64"]
steps:
- name: Resolve Upload URL for the release
id: save_url
run: |
UPLOAD_URL=$(curl -sS \
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}" \
-H "Accept: application/json" \
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" | jq .upload_url | sed s/\"//g)
echo ${UPLOAD_URL}
echo "::set-output name=upload_url::${UPLOAD_URL}"
- name: Download CodeQL CLI and Bundle
run: |
wget --no-verbose "https://github.com/${GITHUB_REPOSITORY}/releases/download/${RELEASE_TAG}/codeql-bundle.tar.gz"
wget --no-verbose "https://github.com/github/codeql-cli-binaries/releases/download/${CLI_RELEASE}/codeql-${{matrix.platform}}.zip"
- name: Create Platform Package
# Replace the codeql-binaries with the platform specific ones
run: |
gunzip codeql-bundle.tar.gz
tar -f codeql-bundle.tar --delete codeql
unzip -q codeql-${{matrix.platform}}.zip
tar -f codeql-bundle.tar --append codeql
gzip codeql-bundle.tar
mv codeql-bundle.tar.gz codeql-bundle-${{matrix.platform}}.tar.gz
du -sh codeql-bundle-${{matrix.platform}}.tar.gz
- name: Upload Platform Package
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.save_url.outputs.upload_url }}
asset_path: ./codeql-bundle-${{matrix.platform}}.tar.gz
asset_name: codeql-bundle-${{matrix.platform}}.tar.gz
asset_content_type: application/tar+gzip

View File

@@ -0,0 +1,43 @@
name: Update Supported Enterprise Server Versions
on:
schedule:
- cron: "0 0 * * *"
jobs:
update-supported-enterprise-server-versions:
runs-on: ubuntu-latest
steps:
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: "3.7"
- name: Checkout CodeQL Action
uses: actions/checkout@v2
- name: Checkout Enterprise Releases
uses: actions/checkout@v2
with:
repository: github/enterprise-releases
ssh-key: ${{ secrets.ENTERPRISE_RELEASES_SSH_KEY }}
path: ${{ github.workspace }}/enterprise-releases/
- name: Update Supported Enterprise Server Versions
run: |
cd ./.github/workflows/update-supported-enterprise-server-versions/
python3 -m pip install pipenv
pipenv install
pipenv run ./update.py
rm --recursive "$ENTERPRISE_RELEASES_PATH"
npm run build
env:
ENTERPRISE_RELEASES_PATH: ${{ github.workspace }}/enterprise-releases/
- name: Commit Changes
uses: peter-evans/create-pull-request@c7f493a8000b8aeb17a1332e326ba76b57cb83eb # v3.4.1
with:
commit-message: Update supported GitHub Enterprise Server versions.
title: Update supported GitHub Enterprise Server versions.
body: ""
author: GitHub <noreply@github.com>
branch: update-supported-enterprise-server-versions
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -0,0 +1,9 @@
[[source]]
name = "pypi"
url = "https://pypi.org/simple"
verify_ssl = true
[dev-packages]
[packages]
semver = "*"

View File

@@ -0,0 +1,27 @@
{
"_meta": {
"hash": {
"sha256": "e3ba923dcb4888e05de5448c18a732bf40197e80fabfa051a61c01b22c504879"
},
"pipfile-spec": 6,
"requires": {},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"semver": {
"hashes": [
"sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4",
"sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"
],
"index": "pypi",
"version": "==2.13.0"
}
},
"develop": {}
}

View File

@@ -0,0 +1,43 @@
#!/usr/bin/env python3
import datetime
import json
import os
import pathlib
import semver
_API_COMPATIBILITY_PATH = pathlib.Path(__file__).absolute().parents[3] / "src" / "api-compatibility.json"
_ENTERPRISE_RELEASES_PATH = pathlib.Path(os.environ["ENTERPRISE_RELEASES_PATH"])
_RELEASE_FILE_PATH = _ENTERPRISE_RELEASES_PATH / "releases.json"
_FIRST_SUPPORTED_RELEASE = semver.VersionInfo.parse("2.22.0") # Versions older than this did not include Code Scanning.
def main():
api_compatibility_data = json.loads(_API_COMPATIBILITY_PATH.read_text())
releases = json.loads(_RELEASE_FILE_PATH.read_text())
oldest_supported_release = None
newest_supported_release = semver.VersionInfo.parse(api_compatibility_data["maximumVersion"] + ".0")
for release_version_string, release_data in releases.items():
release_version = semver.VersionInfo.parse(release_version_string + ".0")
if release_version < _FIRST_SUPPORTED_RELEASE:
continue
if release_version > newest_supported_release:
feature_freeze_date = datetime.date.fromisoformat(release_data["feature_freeze"])
if feature_freeze_date < datetime.date.today() + datetime.timedelta(weeks=2):
newest_supported_release = release_version
if oldest_supported_release is None or release_version < oldest_supported_release:
end_of_life_date = datetime.date.fromisoformat(release_data["end"])
if end_of_life_date > datetime.date.today():
oldest_supported_release = release_version
api_compatibility_data = {
"minimumVersion": f"{oldest_supported_release.major}.{oldest_supported_release.minor}",
"maximumVersion": f"{newest_supported_release.major}.{newest_supported_release.minor}",
}
_API_COMPATIBILITY_PATH.write_text(json.dumps(api_compatibility_data, sort_keys=True) + "\n")
if __name__ == "__main__":
main()

4
.gitignore vendored
View File

@@ -1,2 +1,2 @@
/cli/
/runner/dist/
/runner/node_modules/

View File

@@ -36,6 +36,7 @@ It is possible to run this action locally via [act](https://github.com/nektos/ac
```bash
CODEQL_LOCAL_RUN=true
GITHUB_SERVER_URL=https://github.com
# Optional, for better logging
GITHUB_JOB=<ANY_JOB_NAME>
@@ -49,6 +50,10 @@ Running locally will generate the CodeQL database and run all the queries, but i
As well as the unit tests (see _Common tasks_ above), there are integration tests, defined in `.github/workflows/integration-testing.yml`. These are run by a CI check. Depending on the change youre making, you may want to add a test to this file or extend an existing one.
### Building the CodeQL runner
Navigate to the `runner` directory and run `npm install` to install dependencies needed only for compiling the CodeQL runner. Run `npm run build-runner` to output files to the `runner/dist` directory.
## Submitting a pull request
1. [Fork][fork] and clone the repository

View File

@@ -22,7 +22,16 @@ on:
push:
pull_request:
schedule:
- cron: '0 0 * * 0'
# ┌───────────── minute (0 - 59)
# │ ┌───────────── hour (0 - 23)
# │ │ ┌───────────── day of the month (1 - 31)
# │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
# │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
# │ │ │ │ │
# │ │ │ │ │
# │ │ │ │ │
# * * * * *
- cron: '30 1 * * 0'
jobs:
CodeQL-Build:
@@ -32,17 +41,6 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v2
with:
# Must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head of the pull request.
# Only include this option if you are running this workflow on pull requests.
fetch-depth: 2
# If this run was triggered by a pull request event then checkout
# the head of the pull request instead of the merge commit.
# Only include this step if you are running this workflow on pull requests.
- run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }}
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
@@ -98,7 +96,23 @@ Use the `config-file` parameter of the `init` action to enable the configuration
config-file: ./.github/codeql/codeql-config.yml
```
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration)."
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration file](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration-file)."
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:
```yaml
- uses: github/codeql-action/init@v1
with:
queries: <local-or-remote-query>,<another-query>
```
By default, this will override any queries specified in a config file. If you wish to use both sets of queries, prefix the list of queries in the workflow with `+`:
```yaml
- uses: github/codeql-action/init@v1
with:
queries: +<local-or-remote-query>,<another-query>
```
## Troubleshooting

View File

@@ -16,11 +16,15 @@ inputs:
ram:
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
required: false
add-snippets:
description: Specify whether or not to add code snippets to the output sarif file.
required: false
default: "false"
threads:
description: The number of threads to be used by CodeQL.
required: false
checkout_path:
description: "The path at which the analyzed repository was checked out. Used to relativeize any absolute paths in the uploaded SARIF file."
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
required: false
default: ${{ github.workspace }}
token:
@@ -29,4 +33,4 @@ inputs:
default: ${{ toJson(matrix) }}
runs:
using: 'node12'
main: '../lib/finalize-db.js'
main: '../lib/analyze-action.js'

View File

@@ -8,4 +8,4 @@ inputs:
default: ${{ toJson(matrix) }}
runs:
using: 'node12'
main: '../lib/autobuild.js'
main: '../lib/autobuild-action.js'

View File

@@ -1,5 +1,5 @@
name: 'CodeQL: Init'
description: 'Setup the CodeQL tracer'
description: 'Set up CodeQL'
author: 'GitHub'
inputs:
tools:
@@ -16,6 +16,16 @@ inputs:
config-file:
description: Path of the config file to use
required: false
queries:
description: Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file; prefix with "+" to use both sets of queries.
required: false
setup-python-dependencies:
description: Try to auto-install your python dependencies
required: true
default: 'true'
outputs:
codeql-path:
description: The path of the CodeQL binary used for analysis
runs:
using: 'node12'
main: '../lib/setup-tracer.js'
main: '../lib/init-action.js'

461
lib/actions-util.js generated Normal file
View File

@@ -0,0 +1,461 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const core = __importStar(require("@actions/core"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
const yaml = __importStar(require("js-yaml"));
const api = __importStar(require("./api-client"));
const sharedEnv = __importStar(require("./shared-environment"));
const util_1 = require("./util");
/**
* Wrapper around core.getInput for inputs that always have a value.
* Also see getOptionalInput.
*
* This allows us to get stronger type checking of required/optional inputs
* and make behaviour more consistent between actions and the runner.
*/
function getRequiredInput(name) {
return core.getInput(name, { required: true });
}
exports.getRequiredInput = getRequiredInput;
/**
* Wrapper around core.getInput that converts empty inputs to undefined.
* Also see getRequiredInput.
*
* This allows us to get stronger type checking of required/optional inputs
* and make behaviour more consistent between actions and the runner.
*/
function getOptionalInput(name) {
const value = core.getInput(name);
return value.length > 0 ? value : undefined;
}
exports.getOptionalInput = getOptionalInput;
/**
* Get an environment parameter, but throw an error if it is not set.
*/
function getRequiredEnvParam(paramName) {
const value = process.env[paramName];
if (value === undefined || value.length === 0) {
throw new Error(`${paramName} environment variable must be set`);
}
core.debug(`${paramName}=${value}`);
return value;
}
exports.getRequiredEnvParam = getRequiredEnvParam;
/**
* Ensures all required environment variables are set in the context of a local run.
*/
function prepareLocalRunEnvironment() {
if (!util_1.isLocalRun()) {
return;
}
core.debug("Action is running locally.");
if (!process.env.GITHUB_JOB) {
core.exportVariable("GITHUB_JOB", "UNKNOWN-JOB");
}
if (!process.env.CODEQL_ACTION_ANALYSIS_KEY) {
core.exportVariable("CODEQL_ACTION_ANALYSIS_KEY", `LOCAL-RUN:${process.env.GITHUB_JOB}`);
}
}
exports.prepareLocalRunEnvironment = prepareLocalRunEnvironment;
/**
* Gets the SHA of the commit that is currently checked out.
*/
exports.getCommitOid = async function () {
// Try to use git to get the current commit SHA. If that fails then
// log but otherwise silently fall back to using the SHA from the environment.
// The only time these two values will differ is during analysis of a PR when
// the workflow has changed the current commit to the head commit instead of
// the merge commit, which must mean that git is available.
// Even if this does go wrong, it's not a huge problem for the alerts to
// reported on the merge commit.
try {
let commitOid = "";
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), ["rev-parse", "HEAD"], {
silent: true,
listeners: {
stdout: (data) => {
commitOid += data.toString();
},
stderr: (data) => {
process.stderr.write(data);
},
},
}).exec();
return commitOid.trim();
}
catch (e) {
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
return getRequiredEnvParam("GITHUB_SHA");
}
};
function isObject(o) {
return o !== null && typeof o === "object";
}
var MissingTriggers;
(function (MissingTriggers) {
MissingTriggers[MissingTriggers["None"] = 0] = "None";
MissingTriggers[MissingTriggers["Push"] = 1] = "Push";
MissingTriggers[MissingTriggers["PullRequest"] = 2] = "PullRequest";
})(MissingTriggers || (MissingTriggers = {}));
function toCodedErrors(errors) {
return Object.entries(errors).reduce((acc, [key, value]) => {
acc[key] = { message: value, code: key };
return acc;
}, {});
}
exports.WorkflowErrors = toCodedErrors({
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
MissingHooks: `Please specify on.push and on.pull_request hooks so that Code Scanning can compare pull requests against the state of the base branch.`,
MissingPullRequestHook: `Please specify an on.pull_request hook so that Code Scanning is run against pull requests.`,
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
});
function validateWorkflow(doc) {
var _a, _b, _c, _d, _e;
const errors = [];
// .jobs[key].steps[].run
for (const job of Object.values(((_a = doc) === null || _a === void 0 ? void 0 : _a.jobs) || {})) {
for (const step of ((_b = job) === null || _b === void 0 ? void 0 : _b.steps) || []) {
// this was advice that we used to give in the README
// we actually want to run the analysis on the merge commit
// to produce results that are more inline with expectations
// (i.e: this is what will happen if you merge this PR)
// and avoid some race conditions
if (((_c = step) === null || _c === void 0 ? void 0 : _c.run) === "git checkout HEAD^2") {
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
}
}
}
let missing = MissingTriggers.None;
if (doc.on === undefined) {
missing = MissingTriggers.Push | MissingTriggers.PullRequest;
}
else if (typeof doc.on === "string") {
switch (doc.on) {
case "push":
missing = MissingTriggers.PullRequest;
break;
case "pull_request":
missing = MissingTriggers.Push;
break;
default:
missing = MissingTriggers.Push | MissingTriggers.PullRequest;
break;
}
}
else if (Array.isArray(doc.on)) {
if (!doc.on.includes("push")) {
missing = missing | MissingTriggers.Push;
}
if (!doc.on.includes("pull_request")) {
missing = missing | MissingTriggers.PullRequest;
}
}
else if (isObject(doc.on)) {
if (!Object.prototype.hasOwnProperty.call(doc.on, "pull_request")) {
missing = missing | MissingTriggers.PullRequest;
}
if (!Object.prototype.hasOwnProperty.call(doc.on, "push")) {
missing = missing | MissingTriggers.Push;
}
else {
const paths = (_d = doc.on.push) === null || _d === void 0 ? void 0 : _d.paths;
// if you specify paths or paths-ignore you can end up with commits that have no baseline
// if they didn't change any files
// currently we cannot go back through the history and find the most recent baseline
if (Array.isArray(paths) && paths.length > 0) {
errors.push(exports.WorkflowErrors.PathsSpecified);
}
const pathsIgnore = (_e = doc.on.push) === null || _e === void 0 ? void 0 : _e["paths-ignore"];
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
}
}
if (doc.on.push) {
const push = doc.on.push.branches || [];
if (doc.on.pull_request) {
const pull_request = doc.on.pull_request.branches || [];
const difference = pull_request.filter((value) => !push.includes(value));
if (difference.length > 0) {
// there are branches in pull_request that may not have a baseline
// because we are not building them on push
errors.push(exports.WorkflowErrors.MismatchedBranches);
}
}
else if (push.length > 0) {
// push is set up to run on a subset of branches
// and you could open a PR against a branch with no baseline
errors.push(exports.WorkflowErrors.MismatchedBranches);
}
}
}
switch (missing) {
case MissingTriggers.PullRequest | MissingTriggers.Push:
errors.push(exports.WorkflowErrors.MissingHooks);
break;
case MissingTriggers.PullRequest:
errors.push(exports.WorkflowErrors.MissingPullRequestHook);
break;
case MissingTriggers.Push:
errors.push(exports.WorkflowErrors.MissingPushHook);
break;
}
return errors;
}
exports.validateWorkflow = validateWorkflow;
async function getWorkflowErrors() {
const workflow = await getWorkflow();
if (workflow === undefined) {
return [];
}
return validateWorkflow(workflow);
}
exports.getWorkflowErrors = getWorkflowErrors;
function formatWorkflowErrors(errors) {
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
const errorsList = errors.map((e) => e.message).join(" ");
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
}
exports.formatWorkflowErrors = formatWorkflowErrors;
function formatWorkflowCause(errors) {
if (errors.length === 0) {
return undefined;
}
return errors.map((e) => e.code).join(",");
}
exports.formatWorkflowCause = formatWorkflowCause;
async function getWorkflow() {
const relativePath = await getWorkflowPath();
const absolutePath = path.join(getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath);
try {
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
}
catch (e) {
core.warning(`Could not read workflow: ${e.toString()}`);
return undefined;
}
}
exports.getWorkflow = getWorkflow;
/**
* Get the path of the currently executing workflow.
*/
async function getWorkflowPath() {
if (util_1.isLocalRun()) {
return getRequiredEnvParam("WORKFLOW_PATH");
}
const repo_nwo = getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
const owner = repo_nwo[0];
const repo = repo_nwo[1];
const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID"));
const apiClient = api.getActionsApiClient();
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
owner,
repo,
run_id,
});
const workflowUrl = runsResponse.data.workflow_url;
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
return workflowResponse.data.path;
}
/**
* Get the workflow run ID.
*/
function getWorkflowRunID() {
const workflowRunID = parseInt(getRequiredEnvParam("GITHUB_RUN_ID"), 10);
if (Number.isNaN(workflowRunID)) {
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
}
return workflowRunID;
}
exports.getWorkflowRunID = getWorkflowRunID;
/**
* Get the analysis key paramter for the current job.
*
* This will combine the workflow path and current job name.
* Computing this the first time requires making requests to
* the github API, but after that the result will be cached.
*/
async function getAnalysisKey() {
const analysisKeyEnvVar = "CODEQL_ACTION_ANALYSIS_KEY";
let analysisKey = process.env[analysisKeyEnvVar];
if (analysisKey !== undefined) {
return analysisKey;
}
const workflowPath = await getWorkflowPath();
const jobName = getRequiredEnvParam("GITHUB_JOB");
analysisKey = `${workflowPath}:${jobName}`;
core.exportVariable(analysisKeyEnvVar, analysisKey);
return analysisKey;
}
exports.getAnalysisKey = getAnalysisKey;
/**
* Get the ref currently being analyzed.
*/
async function getRef() {
// Will be in the form "refs/heads/master" on a push event
// or in the form "refs/pull/N/merge" on a pull_request event
const ref = getRequiredEnvParam("GITHUB_REF");
// For pull request refs we want to detect whether the workflow
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
// than the 'merge' ref. If so, we want to convert the ref that
// we report back.
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
const checkoutSha = await exports.getCommitOid();
if (pull_ref_regex.test(ref) &&
checkoutSha !== getRequiredEnvParam("GITHUB_SHA")) {
return ref.replace(pull_ref_regex, "refs/pull/$1/head");
}
else {
return ref;
}
}
exports.getRef = getRef;
/**
* Compose a StatusReport.
*
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
* @param status The status. Must be 'success', 'failure', or 'starting'
* @param startedAt The time this action started executing.
* @param cause Cause of failure (only supply if status is 'failure')
* @param exception Exception (only supply if status is 'failure')
*/
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
const commitOid = process.env["GITHUB_SHA"] || "";
const ref = await getRef();
const workflowRunIDStr = process.env["GITHUB_RUN_ID"];
let workflowRunID = -1;
if (workflowRunIDStr) {
workflowRunID = parseInt(workflowRunIDStr, 10);
}
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
const jobName = process.env["GITHUB_JOB"] || "";
const analysis_key = await getAnalysisKey();
let workflowStartedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
if (workflowStartedAt === undefined) {
workflowStartedAt = actionStartedAt.toISOString();
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
}
// If running locally then the GITHUB_ACTION_REF cannot be trusted as it may be for the previous action
// See https://github.com/actions/runner/issues/803
const actionRef = isRunningLocalAction()
? undefined
: process.env["GITHUB_ACTION_REF"];
const statusReport = {
workflow_run_id: workflowRunID,
workflow_name: workflowName,
job_name: jobName,
analysis_key,
commit_oid: commitOid,
ref,
action_name: actionName,
action_ref: actionRef,
action_oid: "unknown",
started_at: workflowStartedAt,
action_started_at: actionStartedAt.toISOString(),
status,
};
// Add optional parameters
if (cause) {
statusReport.cause = cause;
}
if (exception) {
statusReport.exception = exception;
}
if (status === "success" || status === "failure" || status === "aborted") {
statusReport.completed_at = new Date().toISOString();
}
const matrix = getRequiredInput("matrix");
if (matrix) {
statusReport.matrix_vars = matrix;
}
return statusReport;
}
exports.createStatusReportBase = createStatusReportBase;
function isHTTPError(arg) {
var _a;
return ((_a = arg) === null || _a === void 0 ? void 0 : _a.status) !== undefined && Number.isInteger(arg.status);
}
/**
* Send a status report to the code_scanning/analysis/status endpoint.
*
* Optionally checks the response from the API endpoint and sets the action
* as failed if the status report failed. This is only expected to be used
* when sending a 'starting' report.
*
* Returns whether sending the status report was successful of not.
*/
async function sendStatusReport(statusReport) {
if (util_1.isLocalRun()) {
core.debug("Not sending status report because this is a local run");
return true;
}
const statusReportJSON = JSON.stringify(statusReport);
core.debug(`Sending status report: ${statusReportJSON}`);
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
const [owner, repo] = nwo.split("/");
const client = api.getActionsApiClient();
try {
await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
owner,
repo,
data: statusReportJSON,
});
return true;
}
catch (e) {
if (isHTTPError(e)) {
switch (e.status) {
case 403:
core.setFailed("The repo on which this action is running is not opted-in to CodeQL code scanning.");
return false;
case 404:
core.setFailed("Not authorized to used the CodeQL code scanning feature on this repo.");
return false;
case 422:
// schema incompatibility when reporting status
// this means that this action version is no longer compatible with the API
// we still want to continue as it is likely the analysis endpoint will work
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
core.warning("CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.");
}
else {
core.warning("CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.");
}
return true;
}
}
// something else has gone wrong and the request/response will be logged by octokit
// it's possible this is a transient error and we should continue scanning
core.error("An unexpected error occured when sending code scanning status report.");
return true;
}
}
exports.sendStatusReport = sendStatusReport;
// Is the current action executing a local copy (i.e. we're running a workflow on the codeql-action repo itself)
// as opposed to running a remote action (i.e. when another repo references us)
function isRunningLocalAction() {
const relativeScriptPath = getRelativeScriptPath();
return (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath));
}
exports.isRunningLocalAction = isRunningLocalAction;
// Get the location where the action is running from.
// This can be used to get the actions name or tell if we're running a local action.
function getRelativeScriptPath() {
const runnerTemp = getRequiredEnvParam("RUNNER_TEMP");
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
return path.relative(actionsDirectory, __filename);
}
exports.getRelativeScriptPath = getRelativeScriptPath;
//# sourceMappingURL=actions-util.js.map

1
lib/actions-util.js.map Normal file

File diff suppressed because one or more lines are too long

186
lib/actions-util.test.js generated Normal file
View File

@@ -0,0 +1,186 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const sinon_1 = __importDefault(require("sinon"));
const actionsutil = __importStar(require("./actions-util"));
const testing_utils_1 = require("./testing-utils");
testing_utils_1.setupTests(ava_1.default);
ava_1.default("getRef() throws on the empty string", async (t) => {
process.env["GITHUB_REF"] = "";
await t.throwsAsync(actionsutil.getRef);
});
ava_1.default("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
const expectedRef = "refs/pull/1/merge";
const currentSha = "a".repeat(40);
process.env["GITHUB_REF"] = expectedRef;
process.env["GITHUB_SHA"] = currentSha;
sinon_1.default.stub(actionsutil, "getCommitOid").resolves(currentSha);
const actualRef = await actionsutil.getRef();
t.deepEqual(actualRef, expectedRef);
});
ava_1.default("getRef() returns head PR ref if GITHUB_SHA not currently checked out", async (t) => {
process.env["GITHUB_REF"] = "refs/pull/1/merge";
process.env["GITHUB_SHA"] = "a".repeat(40);
sinon_1.default.stub(actionsutil, "getCommitOid").resolves("b".repeat(40));
const actualRef = await actionsutil.getRef();
t.deepEqual(actualRef, "refs/pull/1/head");
});
ava_1.default("getAnalysisKey() when a local run", async (t) => {
process.env.CODEQL_LOCAL_RUN = "true";
process.env.CODEQL_ACTION_ANALYSIS_KEY = "";
process.env.GITHUB_JOB = "";
actionsutil.prepareLocalRunEnvironment();
const actualAnalysisKey = await actionsutil.getAnalysisKey();
t.deepEqual(actualAnalysisKey, "LOCAL-RUN:UNKNOWN-JOB");
});
ava_1.default("prepareEnvironment() when a local run", (t) => {
process.env.CODEQL_LOCAL_RUN = "false";
process.env.GITHUB_JOB = "YYY";
process.env.CODEQL_ACTION_ANALYSIS_KEY = "TEST";
actionsutil.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, "YYY");
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "TEST");
process.env.CODEQL_LOCAL_RUN = "true";
actionsutil.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, "YYY");
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "TEST");
process.env.CODEQL_ACTION_ANALYSIS_KEY = "";
actionsutil.prepareLocalRunEnvironment();
// updated
t.deepEqual(process.env.GITHUB_JOB, "YYY");
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:YYY");
process.env.GITHUB_JOB = "";
process.env.CODEQL_ACTION_ANALYSIS_KEY = "";
actionsutil.prepareLocalRunEnvironment();
// updated
t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB");
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
});
ava_1.default("validateWorkflow() when on is missing", (t) => {
const errors = actionsutil.validateWorkflow({});
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingHooks]);
});
ava_1.default("validateWorkflow() when on.push is missing", (t) => {
const errors = actionsutil.validateWorkflow({ on: {} });
console.log(errors);
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingHooks]);
});
ava_1.default("validateWorkflow() when on.push is an array missing pull_request", (t) => {
const errors = actionsutil.validateWorkflow({ on: ["push"] });
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingPullRequestHook]);
});
ava_1.default("validateWorkflow() when on.push is an array missing push", (t) => {
const errors = actionsutil.validateWorkflow({ on: ["pull_request"] });
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingPushHook]);
});
ava_1.default("validateWorkflow() when on.push is valid", (t) => {
const errors = actionsutil.validateWorkflow({
on: ["push", "pull_request"],
});
t.deepEqual(errors.length, 0);
});
ava_1.default("validateWorkflow() when on.push is a valid superset", (t) => {
const errors = actionsutil.validateWorkflow({
on: ["push", "pull_request", "schedule"],
});
t.deepEqual(errors.length, 0);
});
ava_1.default("validateWorkflow() when on.push should not have a path", (t) => {
const errors = actionsutil.validateWorkflow({
on: {
push: { branches: ["main"], paths: ["test/*"] },
pull_request: { branches: ["main"] },
},
});
t.deepEqual(errors, [actionsutil.WorkflowErrors.PathsSpecified]);
});
ava_1.default("validateWorkflow() when on.push is a correct object", (t) => {
const errors = actionsutil.validateWorkflow({
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
});
t.deepEqual(errors.length, 0);
});
ava_1.default("validateWorkflow() when on.push is correct with empty objects", (t) => {
const errors = actionsutil.validateWorkflow({
on: { push: undefined, pull_request: undefined },
});
console.log(errors);
t.deepEqual(errors.length, 0);
});
ava_1.default("validateWorkflow() when on.push is mismatched", (t) => {
const errors = actionsutil.validateWorkflow({
on: {
push: { branches: ["main"] },
pull_request: { branches: ["feature"] },
},
});
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
});
ava_1.default("validateWorkflow() when on.push is not mismatched", (t) => {
const errors = actionsutil.validateWorkflow({
on: {
push: { branches: ["main", "feature"] },
pull_request: { branches: ["main"] },
},
});
t.deepEqual(errors.length, 0);
});
ava_1.default("validateWorkflow() when on.push is mismatched for pull_request", (t) => {
const errors = actionsutil.validateWorkflow({
on: {
push: { branches: ["main"] },
pull_request: { branches: ["main", "feature"] },
},
});
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
});
ava_1.default("validateWorkflow() when on.pull_request for every branch but push specifies branches", (t) => {
const errors = actionsutil.validateWorkflow({
on: {
push: { branches: ["main"] },
pull_request: null,
},
});
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
});
ava_1.default("validateWorkflow() when HEAD^2 is checked out", (t) => {
const errors = actionsutil.validateWorkflow({
on: ["push", "pull_request"],
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
});
t.deepEqual(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]);
});
ava_1.default("formatWorkflowErrors() when there is one error", (t) => {
const message = actionsutil.formatWorkflowErrors([
actionsutil.WorkflowErrors.CheckoutWrongHead,
]);
t.true(message.startsWith("1 issue was detected with this workflow:"));
});
ava_1.default("formatWorkflowErrors() when there are multiple errors", (t) => {
const message = actionsutil.formatWorkflowErrors([
actionsutil.WorkflowErrors.CheckoutWrongHead,
actionsutil.WorkflowErrors.PathsSpecified,
]);
t.true(message.startsWith("2 issues were detected with this workflow:"));
});
ava_1.default("formatWorkflowCause()", (t) => {
const message = actionsutil.formatWorkflowCause([
actionsutil.WorkflowErrors.CheckoutWrongHead,
actionsutil.WorkflowErrors.PathsSpecified,
]);
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
});
//# sourceMappingURL=actions-util.test.js.map

File diff suppressed because one or more lines are too long

53
lib/analysis-paths.js generated
View File

@@ -7,22 +7,31 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const path = __importStar(require("path"));
function isInterpretedLanguage(language) {
return language === 'javascript' || language === 'python';
return language === "javascript" || language === "python";
}
// Matches a string containing only characters that are legal to include in paths on windows.
exports.legalWindowsPathCharactersRegex = /^[^<>:"\|?]*$/;
exports.legalWindowsPathCharactersRegex = /^[^<>:"|?]*$/;
// Builds an environment variable suitable for LGTM_INDEX_INCLUDE or LGTM_INDEX_EXCLUDE
function buildIncludeExcludeEnvVar(paths) {
// Ignore anything containing a *
paths = paths.filter(p => p.indexOf('*') === -1);
paths = paths.filter((p) => p.indexOf("*") === -1);
// Some characters are illegal in path names in windows
if (process.platform === 'win32') {
paths = paths.filter(p => p.match(exports.legalWindowsPathCharactersRegex));
if (process.platform === "win32") {
paths = paths.filter((p) => p.match(exports.legalWindowsPathCharactersRegex));
}
return paths.join('\n');
return paths.join("\n");
}
function printPathFiltersWarning(config, logger) {
// Index include/exclude/filters only work in javascript and python.
// If any other languages are detected/configured then show a warning.
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
!config.languages.every(isInterpretedLanguage)) {
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
}
}
exports.printPathFiltersWarning = printPathFiltersWarning;
function includeAndExcludeAnalysisPaths(config) {
// The 'LGTM_INDEX_INCLUDE' and 'LGTM_INDEX_EXCLUDE' environment variables
// control which files/directories are traversed when scanning.
@@ -32,27 +41,29 @@ function includeAndExcludeAnalysisPaths(config) {
// traverse the entire file tree to determine which files are matched.
// Any paths containing "*" are not included in these.
if (config.paths.length !== 0) {
core.exportVariable('LGTM_INDEX_INCLUDE', buildIncludeExcludeEnvVar(config.paths));
process.env["LGTM_INDEX_INCLUDE"] = buildIncludeExcludeEnvVar(config.paths);
}
if (config.pathsIgnore.length !== 0) {
core.exportVariable('LGTM_INDEX_EXCLUDE', buildIncludeExcludeEnvVar(config.pathsIgnore));
// If the temporary or tools directory is in the working directory ignore that too.
const tempRelativeToWorking = path.relative(process.cwd(), config.tempDir);
const toolsRelativeToWorking = path.relative(process.cwd(), config.toolCacheDir);
let pathsIgnore = config.pathsIgnore;
if (!tempRelativeToWorking.startsWith("..")) {
pathsIgnore = pathsIgnore.concat(tempRelativeToWorking);
}
if (!toolsRelativeToWorking.startsWith("..")) {
pathsIgnore = pathsIgnore.concat(toolsRelativeToWorking);
}
if (pathsIgnore.length !== 0) {
process.env["LGTM_INDEX_EXCLUDE"] = buildIncludeExcludeEnvVar(pathsIgnore);
}
// The 'LGTM_INDEX_FILTERS' environment variable controls which files are
// extracted or ignored. It does not control which directories are traversed.
// This does understand the glob and double-glob syntax.
const filters = [];
filters.push(...config.paths.map(p => 'include:' + p));
filters.push(...config.pathsIgnore.map(p => 'exclude:' + p));
filters.push(...config.paths.map((p) => `include:${p}`));
filters.push(...config.pathsIgnore.map((p) => `exclude:${p}`));
if (filters.length !== 0) {
core.exportVariable('LGTM_INDEX_FILTERS', filters.join('\n'));
}
// Index include/exclude/filters only work in javascript and python.
// If any other languages are detected/configured then show a warning.
if ((config.paths.length !== 0 ||
config.pathsIgnore.length !== 0 ||
filters.length !== 0) &&
!config.languages.every(isInterpretedLanguage)) {
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
process.env["LGTM_INDEX_FILTERS"] = filters.join("\n");
}
}
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEjD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACrE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACpF;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC;KAC1F;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACvD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC/D;IAED,oEAAoE;IACpE,sEAAsE;IACtE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;QACxB,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC;QAC/B,OAAO,CAAC,MAAM,KAAK,CAAC,CAAC;QACvB,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAClD,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC5G;AACH,CAAC;AAjCD,wEAiCC"}
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,oEAAoE;IACpE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,4FAA4F,CAC7F,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}

View File

@@ -1,7 +1,4 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
@@ -9,35 +6,69 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const path = __importStar(require("path"));
const ava_1 = __importDefault(require("ava"));
const analysisPaths = __importStar(require("./analysis-paths"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
ava_1.default("emptyPaths", async (t) => {
const config = {
languages: [],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_FILTERS'], undefined);
return await util.withTmpDir(async (tmpDir) => {
const config = {
languages: [],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
t.is(process.env["LGTM_INDEX_EXCLUDE"], undefined);
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
});
});
ava_1.default("nonEmptyPaths", async (t) => {
const config = {
languages: [],
queries: {},
paths: ['path1', 'path2', '**/path3'],
pathsIgnore: ['path4', 'path5', 'path6/**'],
originalUserInput: {},
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path4\npath5');
t.is(process.env['LGTM_INDEX_FILTERS'], 'include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**');
return await util.withTmpDir(async (tmpDir) => {
const config = {
languages: [],
queries: {},
paths: ["path1", "path2", "**/path3"],
pathsIgnore: ["path4", "path5", "path6/**"],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
t.is(process.env["LGTM_INDEX_EXCLUDE"], "path4\npath5");
t.is(process.env["LGTM_INDEX_FILTERS"], "include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**");
});
});
ava_1.default("exclude temp dir", async (t) => {
return await util.withTmpDir(async (toolCacheDir) => {
const tempDir = path.join(process.cwd(), "codeql-runner-temp");
const config = {
languages: [],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir,
toolCacheDir,
codeQLCmd: "",
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
t.is(process.env["LGTM_INDEX_EXCLUDE"], "codeql-runner-temp");
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
});
});
//# sourceMappingURL=analysis-paths.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA2C;AAE3C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;QACT,iBAAiB,EAAE,EAAE;KACtB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QAC3C,iBAAiB,EAAE,EAAE;KACtB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,gGAAgG,CAAC,CAAC;AAC5I,CAAC,CAAC,CAAC"}
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

69
lib/analyze-action.js generated Normal file
View File

@@ -0,0 +1,69 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const analyze_1 = require("./analyze");
const config_utils_1 = require("./config-utils");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const util = __importStar(require("./util"));
async function sendStatusReport(startedAt, stats, error) {
var _a, _b, _c;
const status = ((_a = stats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined
? "failure"
: "success";
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
const statusReport = {
...statusReportBase,
...(stats || {}),
};
await actionsUtil.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
let stats = undefined;
try {
actionsUtil.prepareLocalRunEnvironment();
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
return;
}
const logger = logging_1.getActionsLogger();
const config = await config_utils_1.getConfig(actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
}
const apiDetails = {
auth: actionsUtil.getRequiredInput("token"),
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
};
stats = await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), apiDetails, actionsUtil.getRequiredInput("upload") === "true", "actions", actionsUtil.getRequiredInput("output"), util.getMemoryFlag(actionsUtil.getOptionalInput("ram")), util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger), config, logger);
}
catch (error) {
core.setFailed(error.message);
console.log(error);
if (error instanceof analyze_1.CodeQLAnalysisError) {
stats = { ...error.queriesStatusReport };
}
await sendStatusReport(startedAt, stats, error);
return;
}
await sendStatusReport(startedAt, stats);
}
async function runWrapper() {
try {
await run();
}
catch (error) {
core.setFailed(`analyze action failed: ${error}`);
console.log(error);
}
}
void runWrapper();
//# sourceMappingURL=analyze-action.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAImB;AACnB,iDAA2C;AAC3C,uCAA6C;AAC7C,6CAAkD;AAClD,6CAA+B;AAM/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,MAAM,GAAG,MAAM,wBAAS,CAC5B,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QACF,KAAK,GAAG,MAAM,oBAAU,CACtB,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,MAAM,WAAW,CAAC,YAAY,EAAE,EAChC,MAAM,WAAW,CAAC,MAAM,EAAE,EAC1B,MAAM,WAAW,CAAC,cAAc,EAAE,EAClC,WAAW,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAClD,WAAW,CAAC,gBAAgB,EAAE,EAC9B,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,UAAU,EACV,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EACjD,SAAS,EACT,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EACvD,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACpE,MAAM,EACN,MAAM,CACP,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAEnB,IAAI,KAAK,YAAY,6BAAmB,EAAE;YACxC,KAAK,GAAG,EAAE,GAAG,KAAK,CAAC,mBAAmB,EAAE,CAAC;SAC1C;QAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

136
lib/analyze.js generated Normal file
View File

@@ -0,0 +1,136 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql");
const languages_1 = require("./languages");
const sharedEnv = __importStar(require("./shared-environment"));
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
class CodeQLAnalysisError extends Error {
constructor(queriesStatusReport, message) {
super(message);
this.name = "CodeQLAnalysisError";
this.queriesStatusReport = queriesStatusReport;
}
}
exports.CodeQLAnalysisError = CodeQLAnalysisError;
async function setupPythonExtractor(logger) {
const codeqlPython = process.env["CODEQL_PYTHON"];
if (codeqlPython === undefined || codeqlPython.length === 0) {
// If CODEQL_PYTHON is not set, no dependencies were installed, so we don't need to do anything
return;
}
let output = "";
const options = {
listeners: {
stdout: (data) => {
output += data.toString();
},
},
};
await new toolrunner.ToolRunner(codeqlPython, [
"-c",
"import os; import pip; print(os.path.dirname(os.path.dirname(pip.__file__)))",
], options).exec();
logger.info(`Setting LGTM_INDEX_IMPORT_PATH=${output}`);
process.env["LGTM_INDEX_IMPORT_PATH"] = output;
output = "";
await new toolrunner.ToolRunner(codeqlPython, ["-c", "import sys; print(sys.version_info[0])"], options).exec();
logger.info(`Setting LGTM_PYTHON_SETUP_VERSION=${output}`);
process.env["LGTM_PYTHON_SETUP_VERSION"] = output;
}
async function createdDBForScannedLanguages(config, logger) {
// Insert the LGTM_INDEX_X env vars at this point so they are set when
// we extract any scanned languages.
analysisPaths.includeAndExcludeAnalysisPaths(config);
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
if (languages_1.isScannedLanguage(language)) {
logger.startGroup(`Extracting ${language}`);
if (language === languages_1.Language.python) {
await setupPythonExtractor(logger);
}
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config.tempDir, language), language);
logger.endGroup();
}
}
}
async function finalizeDatabaseCreation(config, threadsFlag, logger) {
await createdDBForScannedLanguages(config, logger);
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
logger.startGroup(`Finalizing ${language}`);
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config.tempDir, language), threadsFlag);
logger.endGroup();
}
}
// Runs queries and creates sarif files in the given folder
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
const statusReport = {};
for (const language of config.languages) {
logger.startGroup(`Analyzing ${language}`);
const queries = config.queries[language];
if (queries.builtin.length === 0 && queries.custom.length === 0) {
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
}
try {
for (const type of ["builtin", "custom"]) {
if (queries[type].length > 0) {
const startTime = new Date().getTime();
const databasePath = util.getCodeQLDatabasePath(config.tempDir, language);
// Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows.
const querySuitePath = `${databasePath}-queries-${type}.qls`;
const querySuiteContents = queries[type]
.map((q) => `- query: ${q}`)
.join("\n");
fs.writeFileSync(querySuitePath, querySuiteContents);
logger.debug(`Query suite file for ${language}...\n${querySuiteContents}`);
const sarifFile = path.join(sarifFolder, `${language}-${type}.sarif`);
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
await codeql.databaseAnalyze(databasePath, sarifFile, querySuitePath, memoryFlag, addSnippetsFlag, threadsFlag);
logger.debug(`SARIF results for database ${language} created at "${sarifFile}"`);
logger.endGroup();
// Record the performance
const endTime = new Date().getTime();
statusReport[`analyze_${type}_queries_${language}_duration_ms`] =
endTime - startTime;
}
}
}
catch (e) {
logger.info(e);
statusReport.analyze_failure_language = language;
throw new CodeQLAnalysisError(statusReport, `Error running analysis for ${language}: ${e}`);
}
}
return statusReport;
}
exports.runQueries = runQueries;
async function runAnalyze(repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, apiDetails, doUpload, mode, outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
// Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
fs.mkdirSync(outputDir, { recursive: true });
logger.info("Finalizing database creation");
await finalizeDatabaseCreation(config, threadsFlag, logger);
logger.info("Analyzing database");
const queriesStats = await runQueries(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger);
if (!doUpload) {
logger.info("Not uploading results");
return { ...queriesStats };
}
const uploadStats = await upload_lib.upload(outputDir, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, apiDetails, mode, logger);
return { ...queriesStats, ...uploadStats };
}
exports.runAnalyze = runAnalyze;
//# sourceMappingURL=analyze.js.map

1
lib/analyze.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAE3D,gEAAkD;AAElD,qCAAqC;AAErC,2CAA0D;AAG1D,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAE/B,MAAa,mBAAoB,SAAQ,KAAK;IAG5C,YAAY,mBAAwC,EAAE,OAAe;QACnE,KAAK,CAAC,OAAO,CAAC,CAAC;QAEf,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC;QAClC,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;CACF;AATD,kDASC;AAmCD,KAAK,UAAU,oBAAoB,CAAC,MAAc;IAChD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IAClD,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3D,+FAA+F;QAC/F,OAAO;KACR;IAED,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,OAAO,GAAG;QACd,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC;IAEF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ;QACE,IAAI;QACJ,8EAA8E;KAC/E,EACD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,kCAAkC,MAAM,EAAE,CAAC,CAAC;IACxD,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;IAE/C,MAAM,GAAG,EAAE,CAAC;IACZ,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ,CAAC,IAAI,EAAE,wCAAwC,CAAC,EAChD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,qCAAqC,MAAM,EAAE,CAAC,CAAC;IAC3D,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC,GAAG,MAAM,CAAC;AACpD,CAAC;AAED,KAAK,UAAU,4BAA4B,CACzC,MAA0B,EAC1B,MAAc;IAEd,sEAAsE;IACtE,oCAAoC;IACpC,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;YAE5C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,MAAM,EAAE;gBAChC,MAAM,oBAAoB,CAAC,MAAM,CAAC,CAAC;aACpC;YAED,MAAM,MAAM,CAAC,sBAAsB,CACjC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,CACT,CAAC;YACF,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CACrC,MAA0B,EAC1B,WAAmB,EACnB,MAAc;IAEd,MAAM,4BAA4B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,gBAAgB,CAC3B,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,WAAW,CACZ,CAAC;QACF,MAAM,CAAC,QAAQ,EAAE,CAAC;KACnB;AACH,CAAC;AAED,2DAA2D;AACpD,KAAK,UAAU,UAAU,CAC9B,WAAmB,EACnB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,MAAM,YAAY,GAAwB,EAAE,CAAC;IAE7C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,aAAa,QAAQ,EAAE,CAAC,CAAC;QAE3C,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QACzC,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;YAC/D,MAAM,IAAI,KAAK,CACb,qBAAqB,QAAQ,gDAAgD,CAC9E,CAAC;SACH;QAED,IAAI;YACF,KAAK,MAAM,IAAI,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,EAAE;gBACxC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBAEvC,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAC7C,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;oBACF,uEAAuE;oBACvE,2EAA2E;oBAC3E,MAAM,cAAc,GAAG,GAAG,YAAY,YAAY,IAAI,MAAM,CAAC;oBAC7D,MAAM,kBAAkB,GAAG,OAAO,CAAC,IAAI,CAAC;yBACrC,GAAG,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC;yBACnC,IAAI,CAAC,IAAI,CAAC,CAAC;oBACd,EAAE,CAAC,aAAa,CAAC,cAAc,EAAE,kBAAkB,CAAC,CAAC;oBACrD,MAAM,CAAC,KAAK,CACV,wBAAwB,QAAQ,QAAQ,kBAAkB,EAAE,CAC7D,CAAC;oBAEF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,GAAG,QAAQ,IAAI,IAAI,QAAQ,CAAC,CAAC;oBAEtE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;oBAC3C,MAAM,MAAM,CAAC,eAAe,CAC1B,YAAY,EACZ,SAAS,EACT,cAAc,EACd,UAAU,EACV,eAAe,EACf,WAAW,CACZ,CAAC;oBAEF,MAAM,CAAC,KAAK,CACV,8BAA8B,QAAQ,gBAAgB,SAAS,GAAG,CACnE,CAAC;oBACF,MAAM,CAAC,QAAQ,EAAE,CAAC;oBAElB,yBAAyB;oBACzB,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBACrC,YAAY,CAAC,WAAW,IAAI,YAAY,QAAQ,cAAc,CAAC;wBAC7D,OAAO,GAAG,SAAS,CAAC;iBACvB;aACF;SACF;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YACf,YAAY,CAAC,wBAAwB,GAAG,QAAQ,CAAC;YACjD,MAAM,IAAI,mBAAmB,CAC3B,YAAY,EACZ,8BAA8B,QAAQ,KAAK,CAAC,EAAE,CAC/C,CAAC;SACH;KACF;IAED,OAAO,YAAY,CAAC;AACtB,CAAC;AA1ED,gCA0EC;AAEM,KAAK,UAAU,UAAU,CAC9B,aAA4B,EAC5B,SAAiB,EACjB,GAAW,EACX,WAA+B,EAC/B,YAAgC,EAChC,aAAiC,EACjC,YAAoB,EACpB,WAA+B,EAC/B,UAA4B,EAC5B,QAAiB,EACjB,IAAe,EACf,SAAiB,EACjB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,8DAA8D;IAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;IAEzD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7C,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;IAC5C,MAAM,wBAAwB,CAAC,MAAM,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;IAE5D,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,MAAM,UAAU,CACnC,SAAS,EACT,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,MAAM,CACP,CAAC;IAEF,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACrC,OAAO,EAAE,GAAG,YAAY,EAAE,CAAC;KAC5B;IAED,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,SAAS,EACT,aAAa,EACb,SAAS,EACT,GAAG,EACH,WAAW,EACX,YAAY,EACZ,aAAa,EACb,YAAY,EACZ,WAAW,EACX,UAAU,EACV,IAAI,EACJ,MAAM,CACP,CAAC;IAEF,OAAO,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;AAC7C,CAAC;AA1DD,gCA0DC"}

63
lib/analyze.test.js generated Normal file
View File

@@ -0,0 +1,63 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const ava_1 = __importDefault(require("ava"));
const analyze_1 = require("./analyze");
const codeql_1 = require("./codeql");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
// Checks that the duration fields are populated for the correct language
// and correct case of builtin or custom.
ava_1.default("status report fields", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
codeql_1.setCodeQL({
databaseAnalyze: async () => undefined,
});
const memoryFlag = "";
const addSnippetsFlag = "";
const threadsFlag = "";
for (const language of Object.values(languages_1.Language)) {
const config = {
languages: [language],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
};
fs.mkdirSync(util.getCodeQLDatabasePath(config.tempDir, language), {
recursive: true,
});
config.queries[language] = {
builtin: ["foo.ql"],
custom: [],
};
const builtinStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logging_1.getRunnerLogger(true));
t.deepEqual(Object.keys(builtinStatusReport).length, 1);
t.true(`analyze_builtin_queries_${language}_duration_ms` in builtinStatusReport);
config.queries[language] = {
builtin: [],
custom: ["foo.ql"],
};
const customStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logging_1.getRunnerLogger(true));
t.deepEqual(Object.keys(customStatusReport).length, 1);
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
}
});
});
//# sourceMappingURL=analyze.test.js.map

1
lib/analyze.test.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"analyze.test.js","sourceRoot":"","sources":["../src/analyze.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AAEzB,8CAAuB;AAEvB,uCAAuC;AACvC,qCAAqC;AAErC,2CAAuC;AACvC,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,yEAAyE;AACzE,yCAAyC;AACzC,aAAI,CAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,kBAAS,CAAC;YACR,eAAe,EAAE,KAAK,IAAI,EAAE,CAAC,SAAS;SACvC,CAAC,CAAC;QAEH,MAAM,UAAU,GAAG,EAAE,CAAC;QACtB,MAAM,eAAe,GAAG,EAAE,CAAC;QAC3B,MAAM,WAAW,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,MAAM,CAAC,oBAAQ,CAAC,EAAE;YAC9C,MAAM,MAAM,GAAW;gBACrB,SAAS,EAAE,CAAC,QAAQ,CAAC;gBACrB,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,EAAE;gBACf,KAAK,EAAE,EAAE;gBACT,iBAAiB,EAAE,EAAE;gBACrB,OAAO,EAAE,MAAM;gBACf,YAAY,EAAE,MAAM;gBACpB,SAAS,EAAE,EAAE;aACd,CAAC;YACF,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE;gBACjE,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,CAAC,QAAQ,CAAC;gBACnB,MAAM,EAAE,EAAE;aACX,CAAC;YACF,MAAM,mBAAmB,GAAG,MAAM,oBAAU,CAC1C,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACxD,CAAC,CAAC,IAAI,CACJ,2BAA2B,QAAQ,cAAc,IAAI,mBAAmB,CACzE,CAAC;YAEF,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,EAAE;gBACX,MAAM,EAAE,CAAC,QAAQ,CAAC;aACnB,CAAC;YACF,MAAM,kBAAkB,GAAG,MAAM,oBAAU,CACzC,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACvD,CAAC,CAAC,IAAI,CACJ,0BAA0B,QAAQ,cAAc,IAAI,kBAAkB,CACvE,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

93
lib/api-client.js generated
View File

@@ -10,37 +10,90 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const github = __importStar(require("@actions/github"));
const path = __importStar(require("path"));
const core_1 = require("@actions/core");
const githubUtils = __importStar(require("@actions/github/lib/utils"));
const retry = __importStar(require("@octokit/plugin-retry"));
const console_log_level_1 = __importDefault(require("console-log-level"));
const semver = __importStar(require("semver"));
const actions_util_1 = require("./actions-util");
const apiCompatibility = __importStar(require("./api-compatibility.json"));
const logging_1 = require("./logging");
const util_1 = require("./util");
exports.getApiClient = function (githubAuth, githubApiUrl, allowLocalRun = false) {
var DisallowedAPIVersionReason;
(function (DisallowedAPIVersionReason) {
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
const GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
const CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR = "CODEQL_ACTION_WARNED_ABOUT_VERSION";
let hasBeenWarnedAboutVersion = false;
exports.getApiClient = function (apiDetails, mode, logger, allowLocalRun = false, possibleFailureExpected = false) {
if (util_1.isLocalRun() && !allowLocalRun) {
throw new Error('Invalid API call in local run');
throw new Error("Invalid API call in local run");
}
return new github.GitHub({
auth: parseAuth(githubAuth),
baseUrl: githubApiUrl,
userAgent: "CodeQL Action",
log: console_log_level_1.default({ level: "debug" })
const customOctokit = githubUtils.GitHub.plugin(retry.retry, (octokit, _) => {
octokit.hook.after("request", (response, __) => {
if (response.status < 400 && !possibleFailureExpected) {
if (hasBeenWarnedAboutVersion) {
return;
}
}
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === undefined ||
process.env[CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR] === undefined) {
return;
}
const installedVersion = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER];
const disallowedAPIVersionReason = apiVersionInRange(installedVersion, apiCompatibility.minimumVersion, apiCompatibility.maximumVersion);
const toolName = mode === "actions" ? "Action" : "Runner";
if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_OLD) {
logger.warning(`The CodeQL ${toolName} version you are using is too old to be compatible with GitHub Enterprise ${installedVersion}. If you experience issues, please upgrade to a more recent version of the CodeQL ${toolName}.`);
}
if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_NEW) {
logger.warning(`GitHub Enterprise ${installedVersion} is too old to be compatible with this version of the CodeQL ${toolName}. If you experience issues, please upgrade to a more recent version of GitHub Enterprise or use an older version of the CodeQL ${toolName}.`);
}
hasBeenWarnedAboutVersion = true;
if (mode === "actions") {
core_1.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true);
}
});
});
return new customOctokit(githubUtils.getOctokitOptions(apiDetails.auth, {
baseUrl: getApiUrl(apiDetails.url),
userAgent: "CodeQL Action",
log: console_log_level_1.default({ level: "debug" }),
}));
};
// Parses the user input as either a single token,
// or a username and password / PAT.
function parseAuth(auth) {
// Check if it's a username:password pair
const c = auth.indexOf(':');
if (c !== -1) {
return 'basic ' + Buffer.from(auth).toString('base64');
function getApiUrl(githubUrl) {
const url = new URL(githubUrl);
// If we detect this is trying to be to github.com
// then return with a fixed canonical URL.
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
return "https://api.github.com";
}
// Otherwise use the token as it is
return auth;
// Add the /api/v3 API prefix
url.pathname = path.join(url.pathname, "api", "v3");
return url.toString();
}
// Temporary function to aid in the transition to running on and off of github actions.
// Once all code has been coverted this function should be removed or made canonical
// Once all code has been converted this function should be removed or made canonical
// and called only from the action entrypoints.
function getActionsApiClient(allowLocalRun = false) {
return exports.getApiClient(core.getInput('token'), util_1.getRequiredEnvParam('GITHUB_API_URL'), allowLocalRun);
const apiDetails = {
auth: actions_util_1.getRequiredInput("token"),
url: actions_util_1.getRequiredEnvParam("GITHUB_SERVER_URL"),
};
return exports.getApiClient(apiDetails, "actions", logging_1.getActionsLogger(), allowLocalRun);
}
exports.getActionsApiClient = getActionsApiClient;
function apiVersionInRange(version, minimumVersion, maximumVersion) {
if (!semver.satisfies(version, `>=${minimumVersion}`)) {
return DisallowedAPIVersionReason.ACTION_TOO_NEW;
}
if (!semver.satisfies(version, `<=${maximumVersion}`)) {
return DisallowedAPIVersionReason.ACTION_TOO_OLD;
}
return undefined;
}
exports.apiVersionInRange = apiVersionInRange;
//# sourceMappingURL=api-client.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAEhD,iCAAyD;AAE5C,QAAA,YAAY,GAAG,UAAS,UAAkB,EAAE,YAAoB,EAAE,aAAa,GAAG,KAAK;IAClG,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,MAAM,CAAC,MAAM,CACtB;QACE,IAAI,EAAE,SAAS,CAAC,UAAU,CAAC;QAC3B,OAAO,EAAE,YAAY;QACrB,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACP,CAAC,CAAC;AAEF,kDAAkD;AAClD,oCAAoC;AACpC,SAAS,SAAS,CAAC,IAAY;IAC7B,yCAAyC;IACzC,MAAM,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5B,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE;QACZ,OAAO,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;KACxD;IAED,mCAAmC;IACnC,OAAO,IAAI,CAAC;AACd,CAAC;AAED,uFAAuF;AACvF,oFAAoF;AACpF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,OAAO,oBAAY,CACjB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,0BAAmB,CAAC,gBAAgB,CAAC,EACrC,aAAa,CAAC,CAAC;AACnB,CAAC;AALD,kDAKC"}
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,wCAA+C;AAC/C,uEAAyD;AACzD,6DAA+C;AAE/C,0EAAgD;AAChD,+CAAiC;AAEjC,iDAAuE;AACvE,2EAA6D;AAC7D,uCAAqD;AACrD,iCAA0C;AAE1C,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAOD,MAAM,gCAAgC,GAAG,6BAA6B,CAAC;AACvE,MAAM,0CAA0C,GAC9C,oCAAoC,CAAC;AACvC,IAAI,yBAAyB,GAAG,KAAK,CAAC;AAEzB,QAAA,YAAY,GAAG,UAC1B,UAA4B,EAC5B,IAAU,EACV,MAAc,EACd,aAAa,GAAG,KAAK,EACrB,uBAAuB,GAAG,KAAK;IAE/B,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,MAAM,aAAa,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,CAAC,EAAE,EAAE;QAC1E,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,QAA8B,EAAE,EAAE,EAAE,EAAE;YACnE,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,IAAI,CAAC,uBAAuB,EAAE;gBACrD,IAAI,yBAAyB,EAAE;oBAC7B,OAAO;iBACR;aACF;YACD,IACE,QAAQ,CAAC,OAAO,CAAC,gCAAgC,CAAC,KAAK,SAAS;gBAChE,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC,KAAK,SAAS,EACrE;gBACA,OAAO;aACR;YACD,MAAM,gBAAgB,GAAG,QAAQ,CAAC,OAAO,CACvC,gCAAgC,CACvB,CAAC;YACZ,MAAM,0BAA0B,GAAG,iBAAiB,CAClD,gBAAgB,EAChB,gBAAgB,CAAC,cAAc,EAC/B,gBAAgB,CAAC,cAAc,CAChC,CAAC;YAEF,MAAM,QAAQ,GAAG,IAAI,KAAK,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;YAE1D,IACE,0BAA0B,KAAK,0BAA0B,CAAC,cAAc,EACxE;gBACA,MAAM,CAAC,OAAO,CACZ,cAAc,QAAQ,6EAA6E,gBAAgB,qFAAqF,QAAQ,GAAG,CACpN,CAAC;aACH;YACD,IACE,0BAA0B,KAAK,0BAA0B,CAAC,cAAc,EACxE;gBACA,MAAM,CAAC,OAAO,CACZ,qBAAqB,gBAAgB,gEAAgE,QAAQ,kIAAkI,QAAQ,GAAG,CAC3P,CAAC;aACH;YACD,yBAAyB,GAAG,IAAI,CAAC;YACjC,IAAI,IAAI,KAAK,SAAS,EAAE;gBACtB,qBAAc,CAAC,0CAA0C,EAAE,IAAI,CAAC,CAAC;aAClE;QACH,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;IACH,OAAO,IAAI,aAAa,CACtB,WAAW,CAAC,iBAAiB,CAAC,UAAU,CAAC,IAAI,EAAE;QAC7C,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,kDAAkD;IAClD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,+BAAgB,CAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,kCAAmB,CAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,oBAAY,CAAC,UAAU,EAAE,SAAS,EAAE,0BAAgB,EAAE,EAAE,aAAa,CAAC,CAAC;AAChF,CAAC;AAPD,kDAOC;AAED,SAAgB,iBAAiB,CAC/B,OAAe,EACf,cAAsB,EACtB,cAAsB;IAEtB,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,OAAO,EAAE,KAAK,cAAc,EAAE,CAAC,EAAE;QACrD,OAAO,0BAA0B,CAAC,cAAc,CAAC;KAClD;IACD,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,OAAO,EAAE,KAAK,cAAc,EAAE,CAAC,EAAE;QACrD,OAAO,0BAA0B,CAAC,cAAc,CAAC;KAClD;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAZD,8CAYC"}

17
lib/api-client.test.js generated Normal file
View File

@@ -0,0 +1,17 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const api_client_1 = require("./api-client");
ava_1.default("allowed API versions", async (t) => {
t.is(api_client_1.apiVersionInRange("1.33.0", "1.33", "2.0"), undefined);
t.is(api_client_1.apiVersionInRange("1.33.1", "1.33", "2.0"), undefined);
t.is(api_client_1.apiVersionInRange("1.34.0", "1.33", "2.0"), undefined);
t.is(api_client_1.apiVersionInRange("2.0.0", "1.33", "2.0"), undefined);
t.is(api_client_1.apiVersionInRange("2.0.1", "1.33", "2.0"), undefined);
t.is(api_client_1.apiVersionInRange("1.32.0", "1.33", "2.0"), api_client_1.DisallowedAPIVersionReason.ACTION_TOO_NEW);
t.is(api_client_1.apiVersionInRange("2.1.0", "1.33", "2.0"), api_client_1.DisallowedAPIVersionReason.ACTION_TOO_OLD);
});
//# sourceMappingURL=api-client.test.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,6CAA6E;AAE7E,aAAI,CAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,CAAC,CAAC,EAAE,CAAC,8BAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC5D,CAAC,CAAC,EAAE,CAAC,8BAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC5D,CAAC,CAAC,EAAE,CAAC,8BAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC5D,CAAC,CAAC,EAAE,CAAC,8BAAiB,CAAC,OAAO,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC3D,CAAC,CAAC,EAAE,CAAC,8BAAiB,CAAC,OAAO,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC3D,CAAC,CAAC,EAAE,CACF,8BAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,CAAC,EAC1C,uCAA0B,CAAC,cAAc,CAC1C,CAAC;IACF,CAAC,CAAC,EAAE,CACF,8BAAiB,CAAC,OAAO,EAAE,MAAM,EAAE,KAAK,CAAC,EACzC,uCAA0B,CAAC,cAAc,CAC1C,CAAC;AACJ,CAAC,CAAC,CAAC"}

View File

@@ -0,0 +1 @@
{ "maximumVersion": "3.0", "minimumVersion": "2.22" }

64
lib/autobuild-action.js generated Normal file
View File

@@ -0,0 +1,64 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const autobuild_1 = require("./autobuild");
const config_utils = __importStar(require("./config-utils"));
const logging_1 = require("./logging");
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
var _a, _b;
const status = failingLanguage !== undefined || cause !== undefined
? "failure"
: "success";
const statusReportBase = await actionsUtil.createStatusReportBase("autobuild", status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
const statusReport = {
...statusReportBase,
autobuild_languages: allLanguages.join(","),
autobuild_failure: failingLanguage,
};
await actionsUtil.sendStatusReport(statusReport);
}
async function run() {
const logger = logging_1.getActionsLogger();
const startedAt = new Date();
let language = undefined;
try {
actionsUtil.prepareLocalRunEnvironment();
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("autobuild", "starting", startedAt)))) {
return;
}
const config = await config_utils.getConfig(actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
}
language = autobuild_1.determineAutobuildLanguage(config, logger);
if (language !== undefined) {
await autobuild_1.runAutobuild(language, config, logger);
}
}
catch (error) {
core.setFailed(`We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error.message}`);
console.log(error);
await sendCompletedStatusReport(startedAt, language ? [language] : [], language, error);
return;
}
await sendCompletedStatusReport(startedAt, language ? [language] : []);
}
async function runWrapper() {
try {
await run();
}
catch (error) {
core.setFailed(`autobuild action failed. ${error}`);
console.log(error);
}
}
void runWrapper();
//# sourceMappingURL=autobuild-action.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAS7C,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,WAAW,EACX,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,sCAA0B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,wBAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIAAmI,KAAK,CAAC,OAAO,EAAE,CACnJ,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

83
lib/autobuild.js generated
View File

@@ -1,65 +1,32 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const codeql_1 = require("./codeql");
const config_utils = __importStar(require("./config-utils"));
const util = __importStar(require("./util"));
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
var _a, _b;
const status = failingLanguage !== undefined || cause !== undefined ? 'failure' : 'success';
const statusReportBase = await util.createStatusReportBase('autobuild', status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
const statusReport = {
...statusReportBase,
autobuild_languages: allLanguages.join(','),
autobuild_failure: failingLanguage,
};
await util.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
let language;
try {
util.prepareLocalRunEnvironment();
if (!await util.sendStatusReport(await util.createStatusReportBase('autobuild', 'starting', startedAt), true)) {
return;
}
const config = await config_utils.getConfig();
// Attempt to find a language to autobuild
// We want pick the dominant language in the repo from the ones we're able to build
// The languages are sorted in order specified by user or by lines of code if we got
// them from the GitHub API, so try to build the first language on the list.
const autobuildLanguages = config.languages.filter(codeql_1.isTracedLanguage);
language = autobuildLanguages[0];
if (!language) {
core.info("None of the languages in this project require extra build steps");
return;
}
core.debug(`Detected dominant traced language: ${language}`);
if (autobuildLanguages.length > 1) {
core.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages.slice(1).join(' and ')}, you must replace this block with custom build steps.`);
}
core.startGroup(`Attempting to automatically build ${language} code`);
const codeQL = codeql_1.getCodeQL();
await codeQL.runAutobuild(language);
core.endGroup();
const languages_1 = require("./languages");
function determineAutobuildLanguage(config, logger) {
// Attempt to find a language to autobuild
// We want pick the dominant language in the repo from the ones we're able to build
// The languages are sorted in order specified by user or by lines of code if we got
// them from the GitHub API, so try to build the first language on the list.
const autobuildLanguages = config.languages.filter(languages_1.isTracedLanguage);
const language = autobuildLanguages[0];
if (!language) {
logger.info("None of the languages in this project require extra build steps");
return undefined;
}
catch (error) {
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
console.log(error);
await sendCompletedStatusReport(startedAt, [language], language, error);
return;
logger.debug(`Detected dominant traced language: ${language}`);
if (autobuildLanguages.length > 1) {
logger.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages
.slice(1)
.join(" and ")}, you must replace this call with custom build steps.`);
}
await sendCompletedStatusReport(startedAt, [language]);
return language;
}
run().catch(e => {
core.setFailed("autobuild action failed. " + e);
console.log(e);
});
exports.determineAutobuildLanguage = determineAutobuildLanguage;
async function runAutobuild(language, config, logger) {
logger.startGroup(`Attempting to automatically build ${language} code`);
const codeQL = codeql_1.getCodeQL(config.codeQLCmd);
await codeQL.runAutobuild(language);
logger.endGroup();
}
exports.runAutobuild = runAutobuild;
//# sourceMappingURL=autobuild.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,qCAAuD;AACvD,6DAA+C;AAC/C,6CAA+B;AAS/B,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GAAG,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5F,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CAAC,CAAC;IAChB,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,CAAC;IACb,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC7G,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,EAAE,CAAC;QAE9C,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,yBAAgB,CAAC,CAAC;QACrE,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEjC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAEpC,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;QACxE,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;AAAA,qCAAqC;AAErC,2CAAyD;AAGzD,SAAgB,0BAA0B,CACxC,MAA2B,EAC3B,MAAc;IAEd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IACrE,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;IAEvC,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;IAE/D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QACjC,MAAM,CAAC,OAAO,CACZ,oCAAoC,QAAQ,8BAA8B,kBAAkB;aACzF,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CAAC,OAAO,CAAC,uDAAuD,CACxE,CAAC;KACH;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AA7BD,gEA6BC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}

57
lib/cli.js generated
View File

@@ -1,57 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const commander_1 = require("commander");
const path = __importStar(require("path"));
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const program = new commander_1.Command();
program.version('0.0.1');
function parseGithubApiUrl(inputUrl) {
try {
const url = new URL(inputUrl);
// If we detect this is trying to be to github.com
// then return with a fixed canonical URL.
if (url.hostname === 'github.com' || url.hostname === 'api.github.com') {
return 'https://api.github.com';
}
// Add the API path if it's not already present.
if (url.pathname.indexOf('/api/v3') === -1) {
url.pathname = path.join(url.pathname, 'api', 'v3');
}
return url.toString();
}
catch (e) {
throw new Error(`"${inputUrl}" is not a valid URL`);
}
}
const logger = logging_1.getCLILogger();
program
.command('upload')
.description('Uploads a SARIF file, or all SARIF files from a directory, to code scanning')
.requiredOption('--sarif-file <file>', 'SARIF file to upload; can also be a directory for uploading multiple')
.requiredOption('--repository <repository>', 'Repository name')
.requiredOption('--commit <commit>', 'SHA of commit that was analyzed')
.requiredOption('--ref <ref>', 'Name of ref that was analyzed')
.requiredOption('--github-url <url>', 'URL of GitHub instance')
.requiredOption('--github-auth <auth>', 'GitHub Apps token, or of the form "username:token" if using a personal access token')
.option('--checkout-path <path>', 'Checkout path (default: current working directory)')
.action(async (cmd) => {
try {
await upload_lib.upload(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, cmd.ref, undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubApiUrl(cmd.githubUrl), 'cli', logger);
}
catch (e) {
logger.error('Upload failed');
logger.error(e);
process.exitCode = 1;
}
});
program.parse(process.argv);
//# sourceMappingURL=cli.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"cli.js","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";;;;;;;;;AAAA,yCAAoC;AACpC,2CAA6B;AAE7B,uCAAyC;AACzC,6CAAkD;AAClD,yDAA2C;AAE3C,MAAM,OAAO,GAAG,IAAI,mBAAO,EAAE,CAAC;AAC9B,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAYzB,SAAS,iBAAiB,CAAC,QAAgB;IACzC,IAAI;QACF,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC;QAE9B,kDAAkD;QAClD,0CAA0C;QAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;YACtE,OAAO,wBAAwB,CAAC;SACjC;QAED,gDAAgD;QAChD,IAAI,GAAG,CAAC,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE;YAC1C,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;SACrD;QAED,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;KAEvB;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,IAAI,KAAK,CAAC,IAAI,QAAQ,sBAAsB,CAAC,CAAC;KACrD;AACH,CAAC;AAED,MAAM,MAAM,GAAG,sBAAY,EAAE,CAAC;AAE9B,OAAO;KACJ,OAAO,CAAC,QAAQ,CAAC;KACjB,WAAW,CAAC,6EAA6E,CAAC;KAC1F,cAAc,CAAC,qBAAqB,EAAE,sEAAsE,CAAC;KAC7G,cAAc,CAAC,2BAA2B,EAAE,iBAAiB,CAAC;KAC9D,cAAc,CAAC,mBAAmB,EAAE,iCAAiC,CAAC;KACtE,cAAc,CAAC,aAAa,EAAE,+BAA+B,CAAC;KAC9D,cAAc,CAAC,oBAAoB,EAAE,wBAAwB,CAAC;KAC9D,cAAc,CAAC,sBAAsB,EAAE,qFAAqF,CAAC;KAC7H,MAAM,CAAC,wBAAwB,EAAE,oDAAoD,CAAC;KACtF,MAAM,CAAC,KAAK,EAAE,GAAe,EAAE,EAAE;IAChC,IAAI;QACF,MAAM,UAAU,CAAC,MAAM,CACrB,GAAG,CAAC,SAAS,EACb,+BAAkB,CAAC,GAAG,CAAC,UAAU,CAAC,EAClC,GAAG,CAAC,MAAM,EACV,GAAG,CAAC,GAAG,EACP,SAAS,EACT,SAAS,EACT,SAAS,EACT,GAAG,CAAC,YAAY,IAAI,OAAO,CAAC,GAAG,EAAE,EACjC,SAAS,EACT,GAAG,CAAC,UAAU,EACd,iBAAiB,CAAC,GAAG,CAAC,SAAS,CAAC,EAChC,KAAK,EACL,MAAM,CAAC,CAAC;KACX;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC;QAC9B,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QAChB,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC;KACtB;AACH,CAAC,CAAC,CAAC;AAEL,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC"}

504
lib/codeql.js generated
View File

@@ -6,148 +6,188 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const http = __importStar(require("@actions/http-client"));
const toolcache = __importStar(require("@actions/tool-cache"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const semver = __importStar(require("semver"));
const stream = __importStar(require("stream"));
const globalutil = __importStar(require("util"));
const v4_1 = __importDefault(require("uuid/v4"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const http = __importStar(require("@actions/http-client"));
const toolcache = __importStar(require("@actions/tool-cache"));
const semver = __importStar(require("semver"));
const uuid_1 = require("uuid");
const actions_util_1 = require("./actions-util");
const api = __importStar(require("./api-client"));
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
const error_matcher_1 = require("./error-matcher");
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
const util = __importStar(require("./util"));
/**
* Stores the CodeQL object, and is populated by `setupCodeQL` or `getCodeQL`.
* Can be overridden in tests using `setCodeQL`.
*/
let cachedCodeQL = undefined;
/**
* Environment variable used to store the location of the CodeQL CLI executable.
* Value is set by setupCodeQL and read by getCodeQL.
*/
const CODEQL_ACTION_CMD = "CODEQL_ACTION_CMD";
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
const CODEQL_BUNDLE_NAME = "codeql-bundle.tar.gz";
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
function getCodeQLActionRepository() {
// Actions do not know their own repository name,
// so we currently use this hack to find the name based on where our files are.
// This can be removed once the change to the runner in https://github.com/actions/runner/pull/585 is deployed.
const runnerTemp = util.getRequiredEnvParam("RUNNER_TEMP");
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
const relativeScriptPath = path.relative(actionsDirectory, __filename);
// This handles the case where the Action does not come from an Action repository,
// e.g. our integration tests which use the Action code from the current checkout.
if (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath)) {
function getCodeQLBundleName() {
let platform;
if (process.platform === "win32") {
platform = "win64";
}
else if (process.platform === "linux") {
platform = "linux64";
}
else if (process.platform === "darwin") {
platform = "osx64";
}
else {
return "codeql-bundle.tar.gz";
}
return `codeql-bundle-${platform}.tar.gz`;
}
function getCodeQLActionRepository(mode, logger) {
if (mode !== "actions") {
return CODEQL_DEFAULT_ACTION_REPOSITORY;
}
const relativeScriptPathParts = relativeScriptPath.split(path.sep);
return relativeScriptPathParts[0] + "/" + relativeScriptPathParts[1];
if (process.env["GITHUB_ACTION_REPOSITORY"] !== undefined) {
return process.env["GITHUB_ACTION_REPOSITORY"];
}
// The Actions Runner used with GitHub Enterprise Server 2.22 did not set the GITHUB_ACTION_REPOSITORY variable.
// This fallback logic can be removed after the end-of-support for 2.22 on 2021-09-23.
if (actions_util_1.isRunningLocalAction()) {
// This handles the case where the Action does not come from an Action repository,
// e.g. our integration tests which use the Action code from the current checkout.
logger.info("The CodeQL Action is checked out locally. Using the default CodeQL Action repository.");
return CODEQL_DEFAULT_ACTION_REPOSITORY;
}
logger.info("GITHUB_ACTION_REPOSITORY environment variable was not set. Falling back to legacy method of finding the GitHub Action.");
const relativeScriptPathParts = actions_util_1.getRelativeScriptPath().split(path.sep);
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
}
async function getCodeQLBundleDownloadURL() {
const codeQLActionRepository = getCodeQLActionRepository();
async function getCodeQLBundleDownloadURL(apiDetails, mode, logger) {
const codeQLActionRepository = getCodeQLActionRepository(mode, logger);
const potentialDownloadSources = [
// This GitHub instance, and this Action.
[util.getInstanceAPIURL(), codeQLActionRepository],
[apiDetails.url, codeQLActionRepository],
// This GitHub instance, and the canonical Action.
[util.getInstanceAPIURL(), CODEQL_DEFAULT_ACTION_REPOSITORY],
[apiDetails.url, CODEQL_DEFAULT_ACTION_REPOSITORY],
// GitHub.com, and the canonical Action.
[util.GITHUB_DOTCOM_API_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
[util.GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
];
// We now filter out any duplicates.
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
const uniqueDownloadSources = potentialDownloadSources.filter((url, index, self) => index === self.indexOf(url));
for (let downloadSource of uniqueDownloadSources) {
let [apiURL, repository] = downloadSource;
const codeQLBundleName = getCodeQLBundleName();
for (const downloadSource of uniqueDownloadSources) {
const [apiURL, repository] = downloadSource;
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
if (apiURL === util.GITHUB_DOTCOM_API_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
if (apiURL === util.GITHUB_DOTCOM_URL &&
repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
break;
}
let [repositoryOwner, repositoryName] = repository.split("/");
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release = await api.getActionsApiClient().repos.getReleaseByTag({
const release = await api
.getApiClient(apiDetails, mode, logger, false, true)
.repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: CODEQL_BUNDLE_VERSION
tag: CODEQL_BUNDLE_VERSION,
});
for (let asset of release.data.assets) {
if (asset.name === CODEQL_BUNDLE_NAME) {
core.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
for (const asset of release.data.assets) {
if (asset.name === codeQLBundleName) {
logger.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
return asset.url;
}
}
}
catch (e) {
core.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
logger.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
}
}
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${CODEQL_BUNDLE_NAME}`;
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
}
// We have to download CodeQL manually because the toolcache doesn't support Accept headers.
// This can be removed once https://github.com/actions/toolkit/pull/530 is merged and released.
async function toolcacheDownloadTool(url, headers) {
const client = new http.HttpClient('CodeQL Action');
const dest = path.join(util.getRequiredEnvParam('RUNNER_TEMP'), v4_1.default());
async function toolcacheDownloadTool(url, headers, tempDir, logger) {
const client = new http.HttpClient("CodeQL Action");
const dest = path.join(tempDir, uuid_1.v4());
const response = await client.get(url, headers);
if (response.message.statusCode !== 200) {
const err = new toolcache.HTTPError(response.message.statusCode);
core.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
throw err;
logger.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
throw new Error(`Unexpected HTTP response: ${response.message.statusCode}`);
}
const pipeline = globalutil.promisify(stream.pipeline);
fs.mkdirSync(path.dirname(dest), { recursive: true });
await pipeline(response.message, fs.createWriteStream(dest));
return dest;
}
async function setupCodeQL() {
async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolsDir, mode, logger) {
// Setting these two env vars makes the toolcache code safe to use outside,
// of actions but this is obviously not a great thing we're doing and it would
// be better to write our own implementation to use outside of actions.
process.env["RUNNER_TEMP"] = tempDir;
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
try {
let codeqlURL = core.getInput('tools');
// We use the special value of 'latest' to prioritize the version in the
// defaults over any pinned cached version.
const forceLatest = codeqlURL === "latest";
if (forceLatest) {
codeqlURL = undefined;
}
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
const codeqlURLSemVer = convertToSemVer(codeqlURLVersion, logger);
// If we find the specified version, we always use that.
let codeqlFolder = toolcache.find("CodeQL", codeqlURLSemVer);
// If we don't find the requested version, in some cases we may allow a
// different version to save download time if the version hasn't been
// specified explicitly (in which case we always honor it).
if (!codeqlFolder && !codeqlURL && !forceLatest) {
const codeqlVersions = toolcache.findAllVersions("CodeQL");
if (codeqlVersions.length === 1) {
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0]);
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
codeqlFolder = tmpCodeqlFolder;
}
}
}
if (codeqlFolder) {
core.debug(`CodeQL found in cache ${codeqlFolder}`);
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
}
else {
if (!codeqlURL) {
codeqlURL = await getCodeQLBundleDownloadURL();
codeqlURL = await getCodeQLBundleDownloadURL(apiDetails, mode, logger);
}
const headers = { accept: 'application/octet-stream' };
const headers = { accept: "application/octet-stream" };
// We only want to provide an authorization header if we are downloading
// from the same GitHub instance the Action is running on.
// This avoids leaking Enterprise tokens to dotcom.
if (codeqlURL.startsWith(util.getInstanceAPIURL() + "/")) {
core.debug('Downloading CodeQL bundle with token.');
let token = core.getInput('token', { required: true });
headers.authorization = `token ${token}`;
if (codeqlURL.startsWith(`${apiDetails.url}/`)) {
logger.debug("Downloading CodeQL bundle with token.");
headers.authorization = `token ${apiDetails.auth}`;
}
else {
core.debug('Downloading CodeQL bundle without token.');
logger.debug("Downloading CodeQL bundle without token.");
}
let codeqlPath = await toolcacheDownloadTool(codeqlURL, headers);
core.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`);
const codeqlPath = await toolcacheDownloadTool(codeqlURL, headers, tempDir, logger);
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLSemVer);
}
let codeqlCmd = path.join(codeqlFolder, 'codeql', 'codeql');
if (process.platform === 'win32') {
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
if (process.platform === "win32") {
codeqlCmd += ".exe";
}
else if (process.platform !== 'linux' && process.platform !== 'darwin') {
throw new Error("Unsupported platform: " + process.platform);
else if (process.platform !== "linux" && process.platform !== "darwin") {
throw new Error(`Unsupported platform: ${process.platform}`);
}
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
core.exportVariable(CODEQL_ACTION_CMD, codeqlCmd);
return cachedCodeQL;
return { codeql: cachedCodeQL, toolsVersion: codeqlURLVersion };
}
catch (e) {
core.error(e);
logger.error(e);
throw new Error("Unable to download and extract CodeQL CLI");
}
}
@@ -157,30 +197,38 @@ function getCodeQLURLVersion(url) {
if (match === null || match.length < 2) {
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
}
let version = match[1];
return match[1];
}
exports.getCodeQLURLVersion = getCodeQLURLVersion;
function convertToSemVer(version, logger) {
if (!semver.valid(version)) {
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
version = '0.0.0-' + version;
logger.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
version = `0.0.0-${version}`;
}
const s = semver.clean(version);
if (!s) {
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
throw new Error(`Bundle version ${version} is not in SemVer format.`);
}
return s;
}
exports.getCodeQLURLVersion = getCodeQLURLVersion;
function getCodeQL() {
exports.convertToSemVer = convertToSemVer;
/**
* Use the CodeQL executable located at the given path.
*/
function getCodeQL(cmd) {
if (cachedCodeQL === undefined) {
const codeqlCmd = util.getRequiredEnvParam(CODEQL_ACTION_CMD);
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
cachedCodeQL = getCodeQLForCmd(cmd);
}
return cachedCodeQL;
}
exports.getCodeQL = getCodeQL;
function resolveFunction(partialCodeql, methodName) {
if (typeof partialCodeql[methodName] !== 'function') {
function resolveFunction(partialCodeql, methodName, defaultImplementation) {
if (typeof partialCodeql[methodName] !== "function") {
if (defaultImplementation !== undefined) {
return defaultImplementation;
}
const dummyMethod = () => {
throw new Error('CodeQL ' + methodName + ' method not correctly defined');
throw new Error(`CodeQL ${methodName} method not correctly defined`);
};
return dummyMethod;
}
@@ -194,189 +242,219 @@ function resolveFunction(partialCodeql, methodName) {
*/
function setCodeQL(partialCodeql) {
cachedCodeQL = {
getDir: resolveFunction(partialCodeql, 'getDir'),
printVersion: resolveFunction(partialCodeql, 'printVersion'),
getTracerEnv: resolveFunction(partialCodeql, 'getTracerEnv'),
databaseInit: resolveFunction(partialCodeql, 'databaseInit'),
runAutobuild: resolveFunction(partialCodeql, 'runAutobuild'),
extractScannedLanguage: resolveFunction(partialCodeql, 'extractScannedLanguage'),
finalizeDatabase: resolveFunction(partialCodeql, 'finalizeDatabase'),
resolveQueries: resolveFunction(partialCodeql, 'resolveQueries'),
databaseAnalyze: resolveFunction(partialCodeql, 'databaseAnalyze')
getPath: resolveFunction(partialCodeql, "getPath", () => "/tmp/dummy-path"),
printVersion: resolveFunction(partialCodeql, "printVersion"),
getTracerEnv: resolveFunction(partialCodeql, "getTracerEnv"),
databaseInit: resolveFunction(partialCodeql, "databaseInit"),
runAutobuild: resolveFunction(partialCodeql, "runAutobuild"),
extractScannedLanguage: resolveFunction(partialCodeql, "extractScannedLanguage"),
finalizeDatabase: resolveFunction(partialCodeql, "finalizeDatabase"),
resolveQueries: resolveFunction(partialCodeql, "resolveQueries"),
databaseAnalyze: resolveFunction(partialCodeql, "databaseAnalyze"),
};
return cachedCodeQL;
}
exports.setCodeQL = setCodeQL;
/**
* Get the cached CodeQL object. Should only be used from tests.
*
* TODO: Work out a good way for tests to get this from the test context
* instead of having to have this method.
*/
function getCachedCodeQL() {
if (cachedCodeQL === undefined) {
// Should never happen as setCodeQL is called by testing-utils.setupTests
throw new Error("cachedCodeQL undefined");
}
return cachedCodeQL;
}
exports.getCachedCodeQL = getCachedCodeQL;
function getCodeQLForCmd(cmd) {
return {
getDir: function () {
return path.dirname(cmd);
getPath() {
return cmd;
},
printVersion: async function () {
await exec.exec(cmd, [
'version',
'--format=json'
]);
async printVersion() {
await new toolrunner.ToolRunner(cmd, ["version", "--format=json"]).exec();
},
getTracerEnv: async function (databasePath, compilerSpec) {
let envFile = path.resolve(databasePath, 'working', 'env.tmp');
const compilerSpecArg = compilerSpec ? ["--compiler-spec=" + compilerSpec] : [];
await exec.exec(cmd, [
'database',
'trace-command',
async getTracerEnv(databasePath) {
// Write tracer-env.js to a temp location.
const tracerEnvJs = path.resolve(databasePath, "working", "tracer-env.js");
fs.mkdirSync(path.dirname(tracerEnvJs), { recursive: true });
fs.writeFileSync(tracerEnvJs, `
const fs = require('fs');
const env = {};
for (let entry of Object.entries(process.env)) {
const key = entry[0];
const value = entry[1];
if (typeof value !== 'undefined' && key !== '_' && !key.startsWith('JAVA_MAIN_CLASS_')) {
env[key] = value;
}
}
process.stdout.write(process.argv[2]);
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');`);
const envFile = path.resolve(databasePath, "working", "env.tmp");
await new toolrunner.ToolRunner(cmd, [
"database",
"trace-command",
databasePath,
...compilerSpecArg,
...getExtraOptionsFromEnv(['database', 'trace-command']),
...getExtraOptionsFromEnv(["database", "trace-command"]),
process.execPath,
path.resolve(__dirname, 'tracer-env.js'),
envFile
]);
return JSON.parse(fs.readFileSync(envFile, 'utf-8'));
tracerEnvJs,
envFile,
]).exec();
return JSON.parse(fs.readFileSync(envFile, "utf-8"));
},
databaseInit: async function (databasePath, language, sourceRoot) {
await exec.exec(cmd, [
'database',
'init',
async databaseInit(databasePath, language, sourceRoot) {
await new toolrunner.ToolRunner(cmd, [
"database",
"init",
databasePath,
'--language=' + language,
'--source-root=' + sourceRoot,
...getExtraOptionsFromEnv(['database', 'init']),
]);
`--language=${language}`,
`--source-root=${sourceRoot}`,
...getExtraOptionsFromEnv(["database", "init"]),
]).exec();
},
runAutobuild: async function (language) {
const cmdName = process.platform === 'win32' ? 'autobuild.cmd' : 'autobuild.sh';
const autobuildCmd = path.join(path.dirname(cmd), language, 'tools', cmdName);
async runAutobuild(language) {
const cmdName = process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh";
const autobuildCmd = path.join(path.dirname(cmd), language, "tools", cmdName);
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
// This is because of an issue with Azure pipelines timing out connections after 4 minutes
// and Maven not properly handling closed connections
// Otherwise long build processes will timeout when pulling down Java packages
// https://developercommunity.visualstudio.com/content/problem/292284/maven-hosted-agent-connection-timeout.html
let javaToolOptions = process.env['JAVA_TOOL_OPTIONS'] || "";
process.env['JAVA_TOOL_OPTIONS'] = [...javaToolOptions.split(/\s+/), '-Dhttp.keepAlive=false', '-Dmaven.wagon.http.pool=false'].join(' ');
await exec.exec(autobuildCmd);
const javaToolOptions = process.env["JAVA_TOOL_OPTIONS"] || "";
process.env["JAVA_TOOL_OPTIONS"] = [
...javaToolOptions.split(/\s+/),
"-Dhttp.keepAlive=false",
"-Dmaven.wagon.http.pool=false",
].join(" ");
await new toolrunner.ToolRunner(autobuildCmd).exec();
},
extractScannedLanguage: async function (databasePath, language) {
async extractScannedLanguage(databasePath, language) {
// Get extractor location
let extractorPath = '';
await exec.exec(cmd, [
'resolve',
'extractor',
'--format=json',
'--language=' + language,
...getExtraOptionsFromEnv(['resolve', 'extractor']),
let extractorPath = "";
await new toolrunner.ToolRunner(cmd, [
"resolve",
"extractor",
"--format=json",
`--language=${language}`,
...getExtraOptionsFromEnv(["resolve", "extractor"]),
], {
silent: true,
listeners: {
stdout: (data) => { extractorPath += data.toString(); },
stderr: (data) => { process.stderr.write(data); }
}
});
stdout: (data) => {
extractorPath += data.toString();
},
stderr: (data) => {
process.stderr.write(data);
},
},
}).exec();
// Set trace command
const ext = process.platform === 'win32' ? '.cmd' : '.sh';
const traceCommand = path.resolve(JSON.parse(extractorPath), 'tools', 'autobuild' + ext);
const ext = process.platform === "win32" ? ".cmd" : ".sh";
const traceCommand = path.resolve(JSON.parse(extractorPath), "tools", `autobuild${ext}`);
// Run trace command
await exec.exec(cmd, [
'database',
'trace-command',
...getExtraOptionsFromEnv(['database', 'trace-command']),
await toolrunner_error_catcher_1.toolrunnerErrorCatcher(cmd, [
"database",
"trace-command",
...getExtraOptionsFromEnv(["database", "trace-command"]),
databasePath,
'--',
traceCommand
]);
"--",
traceCommand,
], error_matcher_1.errorMatchers);
},
finalizeDatabase: async function (databasePath) {
await exec.exec(cmd, [
'database',
'finalize',
...getExtraOptionsFromEnv(['database', 'finalize']),
databasePath
]);
async finalizeDatabase(databasePath, threadsFlag) {
await toolrunner_error_catcher_1.toolrunnerErrorCatcher(cmd, [
"database",
"finalize",
threadsFlag,
...getExtraOptionsFromEnv(["database", "finalize"]),
databasePath,
], error_matcher_1.errorMatchers);
},
resolveQueries: async function (queries, extraSearchPath) {
async resolveQueries(queries, extraSearchPath) {
const codeqlArgs = [
'resolve',
'queries',
"resolve",
"queries",
...queries,
'--format=bylanguage',
...getExtraOptionsFromEnv(['resolve', 'queries'])
"--format=bylanguage",
...getExtraOptionsFromEnv(["resolve", "queries"]),
];
if (extraSearchPath !== undefined) {
codeqlArgs.push('--search-path', extraSearchPath);
codeqlArgs.push("--search-path", extraSearchPath);
}
let output = '';
await exec.exec(cmd, codeqlArgs, {
let output = "";
await new toolrunner.ToolRunner(cmd, codeqlArgs, {
listeners: {
stdout: (data) => {
output += data.toString();
}
}
});
},
},
}).exec();
return JSON.parse(output);
},
databaseAnalyze: async function (databasePath, sarifFile, querySuite) {
await exec.exec(cmd, [
'database',
'analyze',
util.getMemoryFlag(),
util.getThreadsFlag(),
async databaseAnalyze(databasePath, sarifFile, querySuite, memoryFlag, addSnippetsFlag, threadsFlag) {
await new toolrunner.ToolRunner(cmd, [
"database",
"analyze",
memoryFlag,
threadsFlag,
databasePath,
'--format=sarif-latest',
'--output=' + sarifFile,
'--no-sarif-add-snippets',
...getExtraOptionsFromEnv(['database', 'analyze']),
querySuite
]);
}
"--min-disk-free=1024",
"--format=sarif-latest",
"--sarif-multicause-markdown",
`--output=${sarifFile}`,
addSnippetsFlag,
...getExtraOptionsFromEnv(["database", "analyze"]),
querySuite,
]).exec();
},
};
}
function isTracedLanguage(language) {
return ['cpp', 'java', 'csharp'].includes(language);
}
exports.isTracedLanguage = isTracedLanguage;
function isScannedLanguage(language) {
return !isTracedLanguage(language);
}
exports.isScannedLanguage = isScannedLanguage;
/**
* Gets the options for `path` of `options` as an array of extra option strings.
*/
function getExtraOptionsFromEnv(path) {
let options = util.getExtraOptionsEnvParam();
return getExtraOptions(options, path, []);
function getExtraOptionsFromEnv(paths) {
const options = util.getExtraOptionsEnvParam();
return getExtraOptions(options, paths, []);
}
/**
* Gets `options` as an array of extra option strings.
*
* - throws an exception mentioning `pathInfo` if this conversion is impossible.
*/
function asExtraOptions(options, pathInfo) {
if (options === undefined) {
return [];
}
if (!Array.isArray(options)) {
const msg = `The extra options for '${pathInfo.join(".")}' ('${JSON.stringify(options)}') are not in an array.`;
throw new Error(msg);
}
return options.map((o) => {
const t = typeof o;
if (t !== "string" && t !== "number" && t !== "boolean") {
const msg = `The extra option for '${pathInfo.join(".")}' ('${JSON.stringify(o)}') is not a primitive value.`;
throw new Error(msg);
}
return `${o}`;
});
}
/**
* Gets the options for `path` of `options` as an array of extra option strings.
*
* - the special terminal step name '*' in `options` matches all path steps
* - throws an exception if this conversion is impossible.
*
* Exported for testing.
*/
function getExtraOptions(options, path, pathInfo) {
function getExtraOptions(options, paths, pathInfo) {
var _a, _b, _c;
/**
* Gets `options` as an array of extra option strings.
*
* - throws an exception mentioning `pathInfo` if this conversion is impossible.
*/
function asExtraOptions(options, pathInfo) {
if (options === undefined) {
return [];
}
if (!Array.isArray(options)) {
const msg = `The extra options for '${pathInfo.join('.')}' ('${JSON.stringify(options)}') are not in an array.`;
throw new Error(msg);
}
return options.map(o => {
const t = typeof o;
if (t !== 'string' && t !== 'number' && t !== 'boolean') {
const msg = `The extra option for '${pathInfo.join('.')}' ('${JSON.stringify(o)}') is not a primitive value.`;
throw new Error(msg);
}
return o + '';
});
}
let all = asExtraOptions((_a = options) === null || _a === void 0 ? void 0 : _a['*'], pathInfo.concat('*'));
let specific = path.length === 0 ?
asExtraOptions(options, pathInfo) :
getExtraOptions((_b = options) === null || _b === void 0 ? void 0 : _b[path[0]], (_c = path) === null || _c === void 0 ? void 0 : _c.slice(1), pathInfo.concat(path[0]));
const all = asExtraOptions((_a = options) === null || _a === void 0 ? void 0 : _a["*"], pathInfo.concat("*"));
const specific = paths.length === 0
? asExtraOptions(options, pathInfo)
: getExtraOptions((_b = options) === null || _b === void 0 ? void 0 : _b[paths[0]], (_c = paths) === null || _c === void 0 ? void 0 : _c.slice(1), pathInfo.concat(paths[0]));
return all.concat(specific);
}
exports.getExtraOptions = getExtraOptions;

File diff suppressed because one or more lines are too long

146
lib/codeql.test.js generated
View File

@@ -10,46 +10,116 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const path = __importStar(require("path"));
const toolcache = __importStar(require("@actions/tool-cache"));
const ava_1 = __importDefault(require("ava"));
const nock_1 = __importDefault(require("nock"));
const path = __importStar(require("path"));
const codeql = __importStar(require("./codeql"));
const defaults = __importStar(require("./defaults.json"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
ava_1.default('download codeql bundle cache', async (t) => {
const sampleApiDetails = {
auth: "token",
url: "https://github.com",
};
ava_1.default("download codeql bundle cache", async (t) => {
await util.withTmpDir(async (tmpDir) => {
process.env['GITHUB_WORKSPACE'] = tmpDir;
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
const versions = ['20200601', '20200610'];
const versions = ["20200601", "20200610"];
for (let i = 0; i < versions.length; i++) {
const version = versions[i];
nock_1.default('https://example.com')
nock_1.default("https://example.com")
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
await codeql.setupCodeQL();
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
}
const cachedVersions = toolcache.findAllVersions('CodeQL');
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
ava_1.default('parse codeql bundle url version', t => {
ava_1.default("download codeql bundle cache explicitly requested with pinned different version cached", async (t) => {
await util.withTmpDir(async (tmpDir) => {
nock_1.default("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
nock_1.default("https://example.com")
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
});
});
ava_1.default("don't download codeql bundle cache with pinned different version cached", async (t) => {
await util.withTmpDir(async (tmpDir) => {
nock_1.default("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1);
});
});
ava_1.default("download codeql bundle cache with different version cached (not pinned)", async (t) => {
await util.withTmpDir(async (tmpDir) => {
nock_1.default("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
const platform = process.platform === "win32"
? "win64"
: process.platform === "linux"
? "linux64"
: "osx64";
nock_1.default("https://github.com")
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
ava_1.default('download codeql bundle cache with pinned different version cached if "latests" tools specified', async (t) => {
await util.withTmpDir(async (tmpDir) => {
nock_1.default("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
const platform = process.platform === "win32"
? "win64"
: process.platform === "linux"
? "linux64"
: "osx64";
nock_1.default("https://github.com")
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
ava_1.default("parse codeql bundle url version", (t) => {
t.deepEqual(codeql.getCodeQLURLVersion("https://github.com/.../codeql-bundle-20200601/..."), "20200601");
});
ava_1.default("convert to semver", (t) => {
const tests = {
'20200601': '0.0.0-20200601',
'20200601.0': '0.0.0-20200601.0',
'20200601.0.0': '20200601.0.0',
'1.2.3': '1.2.3',
'1.2.3-alpha': '1.2.3-alpha',
'1.2.3-beta.1': '1.2.3-beta.1',
"20200601": "0.0.0-20200601",
"20200601.0": "0.0.0-20200601.0",
"20200601.0.0": "20200601.0.0",
"1.2.3": "1.2.3",
"1.2.3-alpha": "1.2.3-alpha",
"1.2.3-beta.1": "1.2.3-beta.1",
};
for (const [version, expectedVersion] of Object.entries(tests)) {
const url = `https://github.com/.../codeql-bundle-${version}/...`;
try {
const parsedVersion = codeql.getCodeQLURLVersion(url);
const parsedVersion = codeql.convertToSemVer(version, logging_1.getRunnerLogger(true));
t.deepEqual(parsedVersion, expectedVersion);
}
catch (e) {
@@ -57,26 +127,26 @@ ava_1.default('parse codeql bundle url version', t => {
}
}
});
ava_1.default('getExtraOptions works for explicit paths', t => {
t.deepEqual(codeql.getExtraOptions({}, ['foo'], []), []);
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ['foo'], []), ['42']);
t.deepEqual(codeql.getExtraOptions({ foo: { bar: [42] } }, ['foo', 'bar'], []), ['42']);
ava_1.default("getExtraOptions works for explicit paths", (t) => {
t.deepEqual(codeql.getExtraOptions({}, ["foo"], []), []);
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ["foo"], []), ["42"]);
t.deepEqual(codeql.getExtraOptions({ foo: { bar: [42] } }, ["foo", "bar"], []), ["42"]);
});
ava_1.default('getExtraOptions works for wildcards', t => {
t.deepEqual(codeql.getExtraOptions({ '*': [42] }, ['foo'], []), ['42']);
ava_1.default("getExtraOptions works for wildcards", (t) => {
t.deepEqual(codeql.getExtraOptions({ "*": [42] }, ["foo"], []), ["42"]);
});
ava_1.default('getExtraOptions works for wildcards and explicit paths', t => {
let o1 = { '*': [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o1, ['foo'], []), ['42', '87']);
let o2 = { '*': [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o2, ['foo', 'bar'], []), ['42']);
let o3 = { '*': [42], foo: { '*': [87], bar: [99] } };
let p = ['foo', 'bar'];
t.deepEqual(codeql.getExtraOptions(o3, p, []), ['42', '87', '99']);
ava_1.default("getExtraOptions works for wildcards and explicit paths", (t) => {
const o1 = { "*": [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o1, ["foo"], []), ["42", "87"]);
const o2 = { "*": [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o2, ["foo", "bar"], []), ["42"]);
const o3 = { "*": [42], foo: { "*": [87], bar: [99] } };
const p = ["foo", "bar"];
t.deepEqual(codeql.getExtraOptions(o3, p, []), ["42", "87", "99"]);
});
ava_1.default('getExtraOptions throws for bad content', t => {
t.throws(() => codeql.getExtraOptions({ '*': 42 }, ['foo'], []));
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ['foo'], []));
t.throws(() => codeql.getExtraOptions({ '*': [42], foo: { '*': 87, bar: [99] } }, ['foo', 'bar'], []));
ava_1.default("getExtraOptions throws for bad content", (t) => {
t.throws(() => codeql.getExtraOptions({ "*": 42 }, ["foo"], []));
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ["foo"], []));
t.throws(() => codeql.getExtraOptions({ "*": [42], foo: { "*": 87, bar: [99] } }, ["foo", "bar"], []));
});
//# sourceMappingURL=codeql.test.js.map

File diff suppressed because one or more lines are too long

464
lib/config-utils.js generated
View File

@@ -7,28 +7,19 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const fs = __importStar(require("fs"));
const yaml = __importStar(require("js-yaml"));
const path = __importStar(require("path"));
const yaml = __importStar(require("js-yaml"));
const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
const externalQueries = __importStar(require("./external-queries"));
const util = __importStar(require("./util"));
const languages_1 = require("./languages");
// Property names from the user-supplied config file.
const NAME_PROPERTY = 'name';
const DISABLE_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
const QUERIES_PROPERTY = 'queries';
const QUERIES_USES_PROPERTY = 'uses';
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
const PATHS_PROPERTY = 'paths';
// All the languages supported by CodeQL
const ALL_LANGUAGES = ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'];
// Some alternate names for languages
const LANGUAGE_ALIASES = {
'c': 'cpp',
'typescript': 'javascript',
};
const NAME_PROPERTY = "name";
const DISABLE_DEFAULT_QUERIES_PROPERTY = "disable-default-queries";
const QUERIES_PROPERTY = "queries";
const QUERIES_USES_PROPERTY = "uses";
const PATHS_IGNORE_PROPERTY = "paths-ignore";
const PATHS_PROPERTY = "paths";
/**
* A list of queries from https://github.com/github/codeql that
* we don't want to run. Disabling them here is a quicker alternative to
@@ -39,14 +30,13 @@ const LANGUAGE_ALIASES = {
* Format is a map from language to an array of path suffixes of .ql files.
*/
const DISABLED_BUILTIN_QUERIES = {
'csharp': [
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
]
csharp: [
"ql/src/Security Features/CWE-937/VulnerablePackage.ql",
"ql/src/Security Features/CWE-451/MissingXFrameOptions.ql",
],
};
function queryIsDisabled(language, query) {
return (DISABLED_BUILTIN_QUERIES[language] || [])
.some(disabledQuery => query.endsWith(disabledQuery));
return (DISABLED_BUILTIN_QUERIES[language] || []).some((disabledQuery) => query.endsWith(disabledQuery));
}
/**
* Asserts that the noDeclaredLanguage and multipleDeclaredLanguages fields are
@@ -56,63 +46,73 @@ function validateQueries(resolvedQueries) {
const noDeclaredLanguage = resolvedQueries.noDeclaredLanguage;
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
if (noDeclaredLanguageQueries.length !== 0) {
throw new Error('The following queries do not declare a language. ' +
'Their qlpack.yml files are either missing or is invalid.\n' +
noDeclaredLanguageQueries.join('\n'));
throw new Error(`${"The following queries do not declare a language. " +
"Their qlpack.yml files are either missing or is invalid.\n"}${noDeclaredLanguageQueries.join("\n")}`);
}
const multipleDeclaredLanguages = resolvedQueries.multipleDeclaredLanguages;
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
if (multipleDeclaredLanguagesQueries.length !== 0) {
throw new Error('The following queries declare multiple languages. ' +
'Their qlpack.yml files are either missing or is invalid.\n' +
multipleDeclaredLanguagesQueries.join('\n'));
throw new Error(`${"The following queries declare multiple languages. " +
"Their qlpack.yml files are either missing or is invalid.\n"}${multipleDeclaredLanguagesQueries.join("\n")}`);
}
}
/**
* Run 'codeql resolve queries' and add the results to resultMap
*
* If a checkout path is given then the queries are assumed to be custom queries
* and an error will be thrown if there is anything invalid about the queries.
* If a checkout path is not given then the queries are assumed to be builtin
* queries, and error checking will be suppressed.
*/
async function runResolveQueries(resultMap, toResolve, extraSearchPath, errorOnInvalidQueries) {
const codeQl = codeql_1.getCodeQL();
const resolvedQueries = await codeQl.resolveQueries(toResolve, extraSearchPath);
for (const [language, queries] of Object.entries(resolvedQueries.byLanguage)) {
if (resultMap[language] === undefined) {
resultMap[language] = [];
}
resultMap[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
}
if (errorOnInvalidQueries) {
async function runResolveQueries(codeQL, resultMap, toResolve, extraSearchPath) {
const resolvedQueries = await codeQL.resolveQueries(toResolve, extraSearchPath);
if (extraSearchPath !== undefined) {
validateQueries(resolvedQueries);
}
for (const [language, queryPaths] of Object.entries(resolvedQueries.byLanguage)) {
if (resultMap[language] === undefined) {
resultMap[language] = {
builtin: [],
custom: [],
};
}
const queries = Object.keys(queryPaths).filter((q) => !queryIsDisabled(language, q));
if (extraSearchPath !== undefined) {
resultMap[language].custom.push(...queries);
}
else {
resultMap[language].builtin.push(...queries);
}
}
}
/**
* Get the set of queries included by default.
*/
async function addDefaultQueries(languages, resultMap) {
const suites = languages.map(l => l + '-code-scanning.qls');
await runResolveQueries(resultMap, suites, undefined, false);
async function addDefaultQueries(codeQL, languages, resultMap) {
const suites = languages.map((l) => `${l}-code-scanning.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined);
}
// The set of acceptable values for built-in suites from the codeql bundle
const builtinSuites = ['security-extended', 'security-and-quality'];
const builtinSuites = ["security-extended", "security-and-quality"];
/**
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
* Throws an error if suiteName is not a valid builtin suite.
*/
async function addBuiltinSuiteQueries(configFile, languages, resultMap, suiteName) {
const suite = builtinSuites.find((suite) => suite === suiteName);
if (!suite) {
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, suiteName, configFile) {
const found = builtinSuites.find((suite) => suite === suiteName);
if (!found) {
throw new Error(getQueryUsesInvalid(configFile, suiteName));
}
const suites = languages.map(l => l + '-' + suiteName + '.qls');
await runResolveQueries(resultMap, suites, undefined, false);
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined);
}
/**
* Retrieve the set of queries at localQueryPath and add them to resultMap.
*/
async function addLocalQueries(configFile, resultMap, localQueryPath) {
async function addLocalQueries(codeQL, resultMap, localQueryPath, checkoutPath, configFile) {
// Resolve the local path against the workspace so that when this is
// passed to codeql it resolves to exactly the path we expect it to resolve to.
const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
let absoluteQueryPath = path.join(checkoutPath, localQueryPath);
// Check the file exists
if (!fs.existsSync(absoluteQueryPath)) {
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
@@ -120,23 +120,21 @@ async function addLocalQueries(configFile, resultMap, localQueryPath) {
// Call this after checking file exists, because it'll fail if file doesn't exist
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
// Check the local path doesn't jump outside the repo using '..' or symlinks
if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
if (!(absoluteQueryPath + path.sep).startsWith(fs.realpathSync(checkoutPath) + path.sep)) {
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
}
// Get the root of the current repo to use when resolving query dependencies
const rootOfRepo = util.getRequiredEnvParam('GITHUB_WORKSPACE');
await runResolveQueries(resultMap, [absoluteQueryPath], rootOfRepo, true);
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], checkoutPath);
}
/**
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
*/
async function addRemoteQueries(configFile, resultMap, queryUses) {
let tok = queryUses.split('@');
async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile) {
let tok = queryUses.split("@");
if (tok.length !== 2) {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
const ref = tok[1];
tok = tok[0].split('/');
tok = tok[0].split("/");
// The first token is the owner
// The second token is the repo
// The rest is a path, if there is more than one token combine them to form the full path
@@ -144,16 +142,16 @@ async function addRemoteQueries(configFile, resultMap, queryUses) {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
// Check none of the parts of the repository name are empty
if (tok[0].trim() === '' || tok[1].trim() === '') {
if (tok[0].trim() === "" || tok[1].trim() === "") {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
const nwo = tok[0] + '/' + tok[1];
const nwo = `${tok[0]}/${tok[1]}`;
// Checkout the external repository
const rootOfRepo = await externalQueries.checkoutExternalRepository(nwo, ref);
const checkoutPath = await externalQueries.checkoutExternalRepository(nwo, ref, githubUrl, tempDir, logger);
const queryPath = tok.length > 2
? path.join(rootOfRepo, tok.slice(2).join('/'))
: rootOfRepo;
await runResolveQueries(resultMap, [queryPath], rootOfRepo, true);
? path.join(checkoutPath, tok.slice(2).join("/"))
: checkoutPath;
await runResolveQueries(codeQL, resultMap, [queryPath], checkoutPath);
}
/**
* Parse a query 'uses' field to a discrete set of query files and update resultMap.
@@ -163,23 +161,23 @@ async function addRemoteQueries(configFile, resultMap, queryUses) {
* local paths starting with './', or references to remote repos, or
* a finite set of hardcoded terms for builtin suites.
*/
async function parseQueryUses(configFile, languages, resultMap, queryUses) {
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, checkoutPath, githubUrl, logger, configFile) {
queryUses = queryUses.trim();
if (queryUses === "") {
throw new Error(getQueryUsesInvalid(configFile));
}
// Check for the local path case before we start trying to parse the repository name
if (queryUses.startsWith("./")) {
await addLocalQueries(configFile, resultMap, queryUses.slice(2));
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), checkoutPath, configFile);
return;
}
// Check for one of the builtin suites
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
await addBuiltinSuiteQueries(configFile, languages, resultMap, queryUses);
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
await addBuiltinSuiteQueries(languages, codeQL, resultMap, queryUses, configFile);
return;
}
// Otherwise, must be a reference to another repo
await addRemoteQueries(configFile, resultMap, queryUses);
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile);
}
// Regex validating stars in paths or paths-ignore entries.
// The intention is to only allow ** to appear when immediately
@@ -187,155 +185,143 @@ async function parseQueryUses(configFile, languages, resultMap, queryUses) {
const pathStarsRegex = /.*(?:\*\*[^/].*|\*\*$|[^/]\*\*.*)/;
// Characters that are supported by filters in workflows, but not by us.
// See https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
const filterPatternCharactersRegex = /.*[\?\+\[\]!].*/;
const filterPatternCharactersRegex = /.*[?+[\]!].*/;
// Checks that a paths of paths-ignore entry is valid, possibly modifying it
// to make it valid, or if not possible then throws an error.
function validateAndSanitisePath(originalPath, propertyName, configFile) {
function validateAndSanitisePath(originalPath, propertyName, configFile, logger) {
// Take a copy so we don't modify the original path, so we can still construct error messages
let path = originalPath;
let newPath = originalPath;
// All paths are relative to the src root, so strip off leading slashes.
while (path.charAt(0) === '/') {
path = path.substring(1);
while (newPath.charAt(0) === "/") {
newPath = newPath.substring(1);
}
// Trailing ** are redundant, so strip them off
if (path.endsWith('/**')) {
path = path.substring(0, path.length - 2);
if (newPath.endsWith("/**")) {
newPath = newPath.substring(0, newPath.length - 2);
}
// An empty path is not allowed as it's meaningless
if (path === '') {
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" is not an invalid path. ' +
'It is not necessary to include it, and it is not allowed to exclude it.'));
if (newPath === "") {
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" is not an invalid path. ` +
`It is not necessary to include it, and it is not allowed to exclude it.`));
}
// Check for illegal uses of **
if (path.match(pathStarsRegex)) {
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an invalid "**" wildcard. ' +
'They must be immediately preceeded and followed by a slash as in "/**/", or come at the start or end.'));
if (newPath.match(pathStarsRegex)) {
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an invalid "**" wildcard. ` +
`They must be immediately preceded and followed by a slash as in "/**/", or come at the start or end.`));
}
// Check for other regex characters that we don't support.
// Output a warning so the user knows, but otherwise continue normally.
if (path.match(filterPatternCharactersRegex)) {
core.warning(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an unsupported character. ' +
'The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.'));
if (newPath.match(filterPatternCharactersRegex)) {
logger.warning(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an unsupported character. ` +
`The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.`));
}
// Ban any uses of backslash for now.
// This may not play nicely with project layouts.
// This restriction can be lifted later if we determine they are ok.
if (path.indexOf('\\') !== -1) {
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an "\\" character. These are not allowed in filters. ' +
'If running on windows we recommend using "/" instead for path filters.'));
if (newPath.indexOf("\\") !== -1) {
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an "\\" character. These are not allowed in filters. ` +
`If running on windows we recommend using "/" instead for path filters.`));
}
return path;
return newPath;
}
exports.validateAndSanitisePath = validateAndSanitisePath;
// An undefined configFile in some of these functions indicates that
// the property was in a workflow file, not a config file
function getNameInvalid(configFile) {
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
return getConfigFilePropertyError(configFile, NAME_PROPERTY, "must be a non-empty string");
}
exports.getNameInvalid = getNameInvalid;
function getDisableDefaultQueriesInvalid(configFile) {
return getConfigFilePropertyError(configFile, DISABLE_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
return getConfigFilePropertyError(configFile, DISABLE_DEFAULT_QUERIES_PROPERTY, "must be a boolean");
}
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
function getQueriesInvalid(configFile) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, "must be an array");
}
exports.getQueriesInvalid = getQueriesInvalid;
function getQueryUsesInvalid(configFile, queryUses) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'must be a built-in suite (' + builtinSuites.join(' or ') +
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `must be a built-in suite (${builtinSuites.join(" or ")}), a relative path, or be of the form "owner/repo[/path]@ref"${queryUses !== undefined ? `\n Found: ${queryUses}` : ""}`);
}
exports.getQueryUsesInvalid = getQueryUsesInvalid;
function getPathsIgnoreInvalid(configFile) {
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, "must be an array of non-empty strings");
}
exports.getPathsIgnoreInvalid = getPathsIgnoreInvalid;
function getPathsInvalid(configFile) {
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, "must be an array of non-empty strings");
}
exports.getPathsInvalid = getPathsInvalid;
function getLocalPathOutsideOfRepository(configFile, localPath) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" is outside of the repository');
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `is invalid as the local path "${localPath}" is outside of the repository`);
}
exports.getLocalPathOutsideOfRepository = getLocalPathOutsideOfRepository;
function getLocalPathDoesNotExist(configFile, localPath) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" does not exist in the repository');
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `is invalid as the local path "${localPath}" does not exist in the repository`);
}
exports.getLocalPathDoesNotExist = getLocalPathDoesNotExist;
function getConfigFileOutsideWorkspaceErrorMessage(configFile) {
return 'The configuration file "' + configFile + '" is outside of the workspace';
return `The configuration file "${configFile}" is outside of the workspace`;
}
exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage;
function getConfigFileDoesNotExistErrorMessage(configFile) {
return 'The configuration file "' + configFile + '" does not exist';
return `The configuration file "${configFile}" does not exist`;
}
exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage;
function getConfigFileRepoFormatInvalidMessage(configFile) {
let error = 'The configuration file "' + configFile + '" is not a supported remote file reference.';
error += ' Expected format <owner>/<repository>/<file-path>@<ref>';
let error = `The configuration file "${configFile}" is not a supported remote file reference.`;
error += " Expected format <owner>/<repository>/<file-path>@<ref>";
return error;
}
exports.getConfigFileRepoFormatInvalidMessage = getConfigFileRepoFormatInvalidMessage;
function getConfigFileFormatInvalidMessage(configFile) {
return 'The configuration file "' + configFile + '" could not be read';
return `The configuration file "${configFile}" could not be read`;
}
exports.getConfigFileFormatInvalidMessage = getConfigFileFormatInvalidMessage;
function getConfigFileDirectoryGivenMessage(configFile) {
return 'The configuration file "' + configFile + '" looks like a directory, not a file';
return `The configuration file "${configFile}" looks like a directory, not a file`;
}
exports.getConfigFileDirectoryGivenMessage = getConfigFileDirectoryGivenMessage;
function getConfigFilePropertyError(configFile, property, error) {
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
if (configFile === undefined) {
return `The workflow property "${property}" is invalid: ${error}`;
}
else {
return `The configuration file "${configFile}" is invalid: property "${property}" ${error}`;
}
}
function getNoLanguagesError() {
return "Did not detect any languages to analyze. " +
"Please update input in workflow or check that GitHub detects the correct languages in your repository.";
return ("Did not detect any languages to analyze. " +
"Please update input in workflow or check that GitHub detects the correct languages in your repository.");
}
exports.getNoLanguagesError = getNoLanguagesError;
function getUnknownLanguagesError(languages) {
return "Did not recognise the following languages: " + languages.join(', ');
return `Did not recognise the following languages: ${languages.join(", ")}`;
}
exports.getUnknownLanguagesError = getUnknownLanguagesError;
/**
* Gets the set of languages in the current repository
*/
async function getLanguagesInRepo() {
var _a;
// Translate between GitHub's API names for languages and ours
const codeqlLanguages = {
'C': 'cpp',
'C++': 'cpp',
'C#': 'csharp',
'Go': 'go',
'Java': 'java',
'JavaScript': 'javascript',
'TypeScript': 'javascript',
'Python': 'python',
};
let repo_nwo = (_a = process.env['GITHUB_REPOSITORY']) === null || _a === void 0 ? void 0 : _a.split("/");
if (repo_nwo) {
let owner = repo_nwo[0];
let repo = repo_nwo[1];
core.debug(`GitHub repo ${owner} ${repo}`);
const response = await api.getActionsApiClient(true).repos.listLanguages({
owner,
repo
});
core.debug("Languages API response: " + JSON.stringify(response));
// The GitHub API is going to return languages in order of popularity,
// When we pick a language to autobuild we want to pick the most popular traced language
// Since sets in javascript maintain insertion order, using a set here and then splatting it
// into an array gives us an array of languages ordered by popularity
let languages = new Set();
for (let lang in response.data) {
if (lang in codeqlLanguages) {
languages.add(codeqlLanguages[lang]);
}
async function getLanguagesInRepo(repository, apiDetails, mode, logger) {
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
const response = await api
.getApiClient(apiDetails, mode, logger, true)
.repos.listLanguages({
owner: repository.owner,
repo: repository.repo,
});
logger.debug(`Languages API response: ${JSON.stringify(response)}`);
// The GitHub API is going to return languages in order of popularity,
// When we pick a language to autobuild we want to pick the most popular traced language
// Since sets in javascript maintain insertion order, using a set here and then splatting it
// into an array gives us an array of languages ordered by popularity
const languages = new Set();
for (const lang of Object.keys(response.data)) {
const parsedLang = languages_1.parseLanguage(lang);
if (parsedLang !== undefined) {
languages.add(parsedLang);
}
return [...languages];
}
else {
return [];
}
return [...languages];
}
/**
* Get the languages to analyse.
@@ -347,17 +333,17 @@ async function getLanguagesInRepo() {
* If no languages could be detected from either the workflow or the repository
* then throw an error.
*/
async function getLanguages() {
async function getLanguages(languagesInput, repository, apiDetails, mode, logger) {
// Obtain from action input 'languages' if set
let languages = core.getInput('languages', { required: false })
.split(',')
.map(x => x.trim())
.filter(x => x.length > 0);
core.info("Languages from configuration: " + JSON.stringify(languages));
let languages = (languagesInput || "")
.split(",")
.map((x) => x.trim())
.filter((x) => x.length > 0);
logger.info(`Languages from configuration: ${JSON.stringify(languages)}`);
if (languages.length === 0) {
// Obtain languages as all languages in the repo that can be analysed
languages = await getLanguagesInRepo();
core.info("Automatically detected languages: " + JSON.stringify(languages));
languages = await getLanguagesInRepo(repository, apiDetails, mode, logger);
logger.info(`Automatically detected languages: ${JSON.stringify(languages)}`);
}
// If the languages parameter was not given and no languages were
// detected then fail here as this is a workflow configuration error.
@@ -365,57 +351,74 @@ async function getLanguages() {
throw new Error(getNoLanguagesError());
}
// Make sure they are supported
const checkedLanguages = [];
const parsedLanguages = [];
const unknownLanguages = [];
for (let language of languages) {
// Normalise to lower case
language = language.toLowerCase();
// Resolve any known aliases
if (language in LANGUAGE_ALIASES) {
language = LANGUAGE_ALIASES[language];
}
const checkedLanguage = ALL_LANGUAGES.find(l => l === language);
if (checkedLanguage === undefined) {
for (const language of languages) {
const parsedLanguage = languages_1.parseLanguage(language);
if (parsedLanguage === undefined) {
unknownLanguages.push(language);
}
else if (checkedLanguages.indexOf(checkedLanguage) === -1) {
checkedLanguages.push(checkedLanguage);
else if (parsedLanguages.indexOf(parsedLanguage) === -1) {
parsedLanguages.push(parsedLanguage);
}
}
if (unknownLanguages.length > 0) {
throw new Error(getUnknownLanguagesError(unknownLanguages));
}
return checkedLanguages;
return parsedLanguages;
}
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, checkoutPath, githubUrl, logger) {
queriesInput = queriesInput.trim();
// "+" means "don't override config file" - see shouldAddConfigFileQueries
queriesInput = queriesInput.replace(/^\+/, "");
for (const query of queriesInput.split(",")) {
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, githubUrl, logger);
}
}
// Returns true if either no queries were provided in the workflow.
// or if the queries in the workflow were provided in "additive" mode,
// indicating that they shouldn't override the config queries but
// should instead be added in addition
function shouldAddConfigFileQueries(queriesInput) {
if (queriesInput) {
return queriesInput.trimStart().substr(0, 1) === "+";
}
return true;
}
/**
* Get the default config for when the user has not supplied one.
*/
async function getDefaultConfig() {
const languages = await getLanguages();
async function getDefaultConfig(languagesInput, queriesInput, repository, tempDir, toolCacheDir, codeQL, checkoutPath, apiDetails, mode, logger) {
const languages = await getLanguages(languagesInput, repository, apiDetails, mode, logger);
const queries = {};
await addDefaultQueries(languages, queries);
await addDefaultQueries(codeQL, languages, queries);
if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails.url, logger);
}
return {
languages: languages,
queries: queries,
languages,
queries,
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(),
};
}
exports.getDefaultConfig = getDefaultConfig;
/**
* Load the config from the given file.
*/
async function loadConfig(configFile) {
async function loadConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, apiDetails, mode, logger) {
let parsedYAML;
if (isLocal(configFile)) {
// Treat the config file as relative to the workspace
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
configFile = path.resolve(workspacePath, configFile);
parsedYAML = getLocalConfig(configFile, workspacePath);
configFile = path.resolve(checkoutPath, configFile);
parsedYAML = getLocalConfig(configFile, checkoutPath);
}
else {
parsedYAML = await getRemoteConfig(configFile);
parsedYAML = await getRemoteConfig(configFile, apiDetails, mode, logger);
}
// Validate that the 'name' property is syntactically correct,
// even though we don't use the value yet.
@@ -427,7 +430,7 @@ async function loadConfig(configFile) {
throw new Error(getNameInvalid(configFile));
}
}
const languages = await getLanguages();
const languages = await getLanguages(languagesInput, repository, apiDetails, mode, logger);
const queries = {};
const pathsIgnore = [];
const paths = [];
@@ -439,45 +442,56 @@ async function loadConfig(configFile) {
disableDefaultQueries = parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY];
}
if (!disableDefaultQueries) {
await addDefaultQueries(languages, queries);
await addDefaultQueries(codeQL, languages, queries);
}
if (QUERIES_PROPERTY in parsedYAML) {
// If queries were provided using `with` in the action configuration,
// they should take precedence over the queries in the config file
// unless they're prefixed with "+", in which case they supplement those
// in the config file.
if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails.url, logger);
}
if (shouldAddConfigFileQueries(queriesInput) &&
QUERIES_PROPERTY in parsedYAML) {
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
throw new Error(getQueriesInvalid(configFile));
}
for (const query of parsedYAML[QUERIES_PROPERTY]) {
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
if (!(QUERIES_USES_PROPERTY in query) ||
typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueryUsesInvalid(configFile));
}
await parseQueryUses(configFile, languages, queries, query[QUERIES_USES_PROPERTY]);
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, checkoutPath, apiDetails.url, logger, configFile);
}
}
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
throw new Error(getPathsIgnoreInvalid(configFile));
}
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
if (typeof path !== "string" || path === '') {
for (const ignorePath of parsedYAML[PATHS_IGNORE_PROPERTY]) {
if (typeof ignorePath !== "string" || ignorePath === "") {
throw new Error(getPathsIgnoreInvalid(configFile));
}
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile));
});
pathsIgnore.push(validateAndSanitisePath(ignorePath, PATHS_IGNORE_PROPERTY, configFile, logger));
}
}
if (PATHS_PROPERTY in parsedYAML) {
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
throw new Error(getPathsInvalid(configFile));
}
parsedYAML[PATHS_PROPERTY].forEach(path => {
if (typeof path !== "string" || path === '') {
for (const includePath of parsedYAML[PATHS_PROPERTY]) {
if (typeof includePath !== "string" || includePath === "") {
throw new Error(getPathsInvalid(configFile));
}
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile));
});
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
}
}
// The list of queries should not be empty for any language. If it is then
// it is a user configuration error.
for (const language of languages) {
if (queries[language] === undefined || queries[language].length === 0) {
if (queries[language] === undefined ||
(queries[language].builtin.length === 0 &&
queries[language].custom.length === 0)) {
throw new Error(`Did not detect any queries to run for ${language}. ` +
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
}
@@ -487,7 +501,10 @@ async function loadConfig(configFile) {
queries,
pathsIgnore,
paths,
originalUserInput: parsedYAML
originalUserInput: parsedYAML,
tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(),
};
}
/**
@@ -496,19 +513,18 @@ async function loadConfig(configFile) {
* This will parse the config from the user input if present, or generate
* a default config. The parsed config is then stored to a known location.
*/
async function initConfig() {
const configFile = core.getInput('config-file');
async function initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, apiDetails, mode, logger) {
let config;
// If no config file was provided create an empty one
if (configFile === '') {
core.debug('No configuration file was provided');
config = await getDefaultConfig();
if (!configFile) {
logger.debug("No configuration file was provided");
config = await getDefaultConfig(languagesInput, queriesInput, repository, tempDir, toolCacheDir, codeQL, checkoutPath, apiDetails, mode, logger);
}
else {
config = await loadConfig(configFile);
config = await loadConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, apiDetails, mode, logger);
}
// Save the config so we can easily access it again in the future
await saveConfig(config);
await saveConfig(config, logger);
return config;
}
exports.initConfig = initConfig;
@@ -517,28 +533,30 @@ function isLocal(configPath) {
if (configPath.indexOf("./") === 0) {
return true;
}
return (configPath.indexOf("@") === -1);
return configPath.indexOf("@") === -1;
}
function getLocalConfig(configFile, workspacePath) {
function getLocalConfig(configFile, checkoutPath) {
// Error if the config file is now outside of the workspace
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
if (!(configFile + path.sep).startsWith(checkoutPath + path.sep)) {
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
}
// Error if the file does not exist
if (!fs.existsSync(configFile)) {
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
}
return yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
return yaml.safeLoad(fs.readFileSync(configFile, "utf8"));
}
async function getRemoteConfig(configFile) {
async function getRemoteConfig(configFile, apiDetails, mode, logger) {
// retrieve the various parts of the config location, and ensure they're present
const format = new RegExp('(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)');
const format = new RegExp("(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)");
const pieces = format.exec(configFile);
// 5 = 4 groups + the whole expression
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
}
const response = await api.getActionsApiClient(true).repos.getContents({
const response = await api
.getApiClient(apiDetails, mode, logger, true)
.repos.getContent({
owner: pieces.groups.owner,
repo: pieces.groups.repo,
path: pieces.groups.path,
@@ -554,42 +572,38 @@ async function getRemoteConfig(configFile) {
else {
throw new Error(getConfigFileFormatInvalidMessage(configFile));
}
return yaml.safeLoad(Buffer.from(fileContents, 'base64').toString('binary'));
return yaml.safeLoad(Buffer.from(fileContents, "base64").toString("binary"));
}
/**
* Get the file path where the parsed config will be stored.
*/
function getPathToParsedConfigFile() {
return path.join(util.getRequiredEnvParam('RUNNER_TEMP'), 'config');
function getPathToParsedConfigFile(tempDir) {
return path.join(tempDir, "config");
}
exports.getPathToParsedConfigFile = getPathToParsedConfigFile;
/**
* Store the given config to the path returned from getPathToParsedConfigFile.
*/
async function saveConfig(config) {
async function saveConfig(config, logger) {
const configString = JSON.stringify(config);
const configFile = getPathToParsedConfigFile();
const configFile = getPathToParsedConfigFile(config.tempDir);
fs.mkdirSync(path.dirname(configFile), { recursive: true });
fs.writeFileSync(configFile, configString, 'utf8');
core.debug('Saved config:');
core.debug(configString);
fs.writeFileSync(configFile, configString, "utf8");
logger.debug("Saved config:");
logger.debug(configString);
}
/**
* Get the config.
*
* If this is the first time in a workflow that this is being called then
* this will parse the config from the user input. The parsed config is then
* stored to a known location. On the second and further calls, this will
* return the contents of the parsed config from the known location.
* Get the config that has been saved to the given temp dir.
* If the config could not be found then returns undefined.
*/
async function getConfig() {
const configFile = getPathToParsedConfigFile();
async function getConfig(tempDir, logger) {
const configFile = getPathToParsedConfigFile(tempDir);
if (!fs.existsSync(configFile)) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
return undefined;
}
const configString = fs.readFileSync(configFile, 'utf8');
core.debug('Loaded config:');
core.debug(configString);
const configString = fs.readFileSync(configFile, "utf8");
logger.debug("Loaded config:");
logger.debug(configString);
return JSON.parse(configString);
}
exports.getConfig = getConfig;

File diff suppressed because one or more lines are too long

543
lib/config-utils.test.js generated
View File

@@ -10,41 +10,44 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const github = __importStar(require("@actions/github"));
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const github = __importStar(require("@actions/github"));
const ava_1 = __importDefault(require("ava"));
const sinon_1 = __importDefault(require("sinon"));
const api = __importStar(require("./api-client"));
const CodeQL = __importStar(require("./codeql"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
function setInput(name, value) {
// Transformation copied from
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
if (value !== undefined) {
process.env[envVar] = value;
}
else {
delete process.env[envVar];
}
const sampleApiDetails = {
auth: "token",
url: "https://github.example.com",
};
// Returns the filepath of the newly-created file
function createConfigFile(inputFileContents, tmpDir) {
const configFilePath = path.join(tmpDir, "input");
fs.writeFileSync(configFilePath, inputFileContents, "utf8");
return configFilePath;
}
function mockGetContents(content) {
// Passing an auth token is required, so we just use a dummy value
let client = new github.GitHub('123');
const client = github.getOctokit("123");
const response = {
data: content
data: content,
};
const spyGetContents = sinon_1.default.stub(client.repos, "getContents").resolves(response);
const spyGetContents = sinon_1.default
.stub(client.repos, "getContent")
.resolves(response);
sinon_1.default.stub(api, "getApiClient").value(() => client);
return spyGetContents;
}
function mockListLanguages(languages) {
// Passing an auth token is required, so we just use a dummy value
let client = new github.GitHub('123');
const client = github.getOctokit("123");
const response = {
data: {},
};
@@ -56,12 +59,10 @@ function mockListLanguages(languages) {
}
ava_1.default("load empty config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', undefined);
setInput('languages', 'javascript,python');
CodeQL.setCodeQL({
resolveQueries: async function () {
const logger = logging_1.getRunnerLogger(true);
const languages = "javascript,python";
const codeQL = codeql_1.setCodeQL({
async resolveQueries() {
return {
byLanguage: {},
noDeclaredLanguage: {},
@@ -69,18 +70,15 @@ ava_1.default("load empty config", async (t) => {
};
},
});
const config = await configUtils.initConfig();
t.deepEqual(config, await configUtils.getDefaultConfig());
const config = await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logger);
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logger));
});
});
ava_1.default("loading config saves config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', undefined);
setInput('languages', 'javascript,python');
CodeQL.setCodeQL({
resolveQueries: async function () {
const logger = logging_1.getRunnerLogger(true);
const codeQL = codeql_1.setCodeQL({
async resolveQueries() {
return {
byLanguage: {},
noDeclaredLanguage: {},
@@ -89,73 +87,64 @@ ava_1.default("loading config saves config", async (t) => {
},
});
// Sanity check the saved config file does not already exist
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile()));
// Sanity check that getConfig throws before we have called initConfig
await t.throwsAsync(configUtils.getConfig);
const config1 = await configUtils.initConfig();
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// Sanity check that getConfig returns undefined before we have called initConfig
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logger);
// The saved config file should now exist
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile()));
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// And that same newly-initialised config should now be returned by getConfig
const config2 = await configUtils.getConfig();
const config2 = await configUtils.getConfig(tmpDir, logger);
t.deepEqual(config1, config2);
});
});
ava_1.default("load input outside of workspace", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', '../input');
try {
await configUtils.initConfig();
throw new Error('initConfig did not throw error');
await configUtils.initConfig(undefined, undefined, "../input", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, "../input"))));
}
});
});
ava_1.default("load non-local input with invalid repo syntax", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// no filename given, just a repo
setInput('config-file', 'octo-org/codeql-config@main');
const configFile = "octo-org/codeql-config@main";
try {
await configUtils.initConfig();
throw new Error('initConfig did not throw error');
await configUtils.initConfig(undefined, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage('octo-org/codeql-config@main')));
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage("octo-org/codeql-config@main")));
}
});
});
ava_1.default("load non-existent input", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
t.false(fs.existsSync(path.join(tmpDir, 'input')));
setInput('config-file', 'input');
setInput('languages', 'javascript');
const languages = "javascript";
const configFile = "input";
t.false(fs.existsSync(path.join(tmpDir, configFile)));
try {
await configUtils.initConfig();
throw new Error('initConfig did not throw error');
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, "input"))));
}
});
});
ava_1.default("load non-empty input", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
CodeQL.setCodeQL({
resolveQueries: async function () {
const codeQL = codeql_1.setCodeQL({
async resolveQueries() {
return {
byLanguage: {
'javascript': {
'/foo/a.ql': {},
'/bar/b.ql': {},
javascript: {
"/foo/a.ql": {},
"/bar/b.ql": {},
},
},
noDeclaredLanguage: {},
@@ -174,46 +163,51 @@ ava_1.default("load non-empty input", async (t) => {
- b
paths:
- c/d`;
fs.mkdirSync(path.join(tmpDir, 'foo'));
fs.mkdirSync(path.join(tmpDir, "foo"));
// And the config we expect it to parse to
const expectedConfig = {
languages: ['javascript'],
queries: { 'javascript': ['/foo/a.ql', '/bar/b.ql'] },
pathsIgnore: ['a', 'b'],
paths: ['c/d'],
originalUserInput: {
name: 'my config',
'disable-default-queries': true,
queries: [{ uses: './foo' }],
'paths-ignore': ['a', 'b'],
paths: ['c/d'],
languages: [languages_1.Language.javascript],
queries: {
javascript: {
builtin: [],
custom: ["/foo/a.ql", "/bar/b.ql"],
},
},
pathsIgnore: ["a", "b"],
paths: ["c/d"],
originalUserInput: {
name: "my config",
"disable-default-queries": true,
queries: [{ uses: "./foo" }],
"paths-ignore": ["a", "b"],
paths: ["c/d"],
},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: codeQL.getPath(),
};
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
setInput('languages', 'javascript');
const actualConfig = await configUtils.initConfig();
const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir);
const actualConfig = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
});
ava_1.default("default queries are used", async (t) => {
ava_1.default("Default queries are used", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// Check that the default behaviour is to add the default queries.
// In this case if a config file is specified but does not include
// the disable-default-queries field.
// We determine this by whether CodeQL.resolveQueries is called
// with the correct arguments.
const resolveQueriesArgs = [];
CodeQL.setCodeQL({
resolveQueries: async function (queries, extraSearchPath) {
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return {
byLanguage: {
'javascript': {
'foo.ql': {},
javascript: {
"foo.ql": {},
},
},
noDeclaredLanguage: {},
@@ -223,31 +217,241 @@ ava_1.default("default queries are used", async (t) => {
});
// The important point of this config is that it doesn't specify
// the disable-default-queries field.
// Any other details are hopefully irrelevant for this tetst.
// Any other details are hopefully irrelevant for this test.
const inputFileContents = `
paths:
- foo`;
fs.mkdirSync(path.join(tmpDir, 'foo'));
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
setInput('languages', 'javascript');
await configUtils.initConfig();
fs.mkdirSync(path.join(tmpDir, "foo"));
const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir);
await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
// Check resolve queries was called correctly
t.deepEqual(resolveQueriesArgs.length, 1);
t.deepEqual(resolveQueriesArgs[0].queries, ['javascript-code-scanning.qls']);
t.deepEqual(resolveQueriesArgs[0].queries, [
"javascript-code-scanning.qls",
]);
t.deepEqual(resolveQueriesArgs[0].extraSearchPath, undefined);
});
});
/**
* Returns the provided queries, just in the right format for a resolved query
* This way we can test by seeing which returned items are in the final
* configuration.
*/
function queriesToResolvedQueryForm(queries) {
const dummyResolvedQueries = {};
for (const q of queries) {
dummyResolvedQueries[q] = {};
}
return {
byLanguage: {
javascript: dummyResolvedQueries,
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
}
ava_1.default("Queries can be specified in config file", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const inputFileContents = `
name: my config
queries:
- uses: ./foo`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
fs.mkdirSync(path.join(tmpDir, "foo"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries
// and once for `./foo` from the config file.
t.deepEqual(resolveQueriesArgs.length, 2);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/foo$/);
// Now check that the end result contains the default queries and the query from config
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 1);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/foo$/);
});
});
ava_1.default("Queries from config file can be overridden in workflow file", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const inputFileContents = `
name: my config
queries:
- uses: ./foo`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
// This config item should take precedence over the config file but shouldn't affect the default queries.
const testQueries = "./override";
fs.mkdirSync(path.join(tmpDir, "foo"));
fs.mkdirSync(path.join(tmpDir, "override"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries and once for `./override`,
// but won't be called for './foo' from the config file.
t.deepEqual(resolveQueriesArgs.length, 2);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override$/);
// Now check that the end result contains only the default queries and the override query
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 1);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/override$/);
});
});
ava_1.default("Queries in workflow file can be used in tandem with the 'disable default queries' option", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
process.env["GITHUB_WORKSPACE"] = tmpDir;
const inputFileContents = `
name: my config
disable-default-queries: true`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
const testQueries = "./workflow-query";
fs.mkdirSync(path.join(tmpDir, "workflow-query"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for `./workflow-query`,
// but won't be called for the default one since that was disabled
t.deepEqual(resolveQueriesArgs.length, 1);
t.deepEqual(resolveQueriesArgs[0].queries.length, 1);
t.regex(resolveQueriesArgs[0].queries[0], /.*\/workflow-query$/);
// Now check that the end result contains only the workflow query, and not the default one
t.deepEqual(config.queries["javascript"].builtin.length, 0);
t.deepEqual(config.queries["javascript"].custom.length, 1);
t.regex(config.queries["javascript"].custom[0], /.*\/workflow-query$/);
});
});
ava_1.default("Multiple queries can be specified in workflow file, no config file required", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
fs.mkdirSync(path.join(tmpDir, "override1"));
fs.mkdirSync(path.join(tmpDir, "override2"));
const testQueries = "./override1,./override2";
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly:
// It'll be called once for the default queries,
// and then once for each of the two queries from the workflow
t.deepEqual(resolveQueriesArgs.length, 3);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.deepEqual(resolveQueriesArgs[2].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override1$/);
t.regex(resolveQueriesArgs[2].queries[0], /.*\/override2$/);
// Now check that the end result contains both the queries from the workflow, as well as the defaults
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 2);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/override1$/);
t.regex(config.queries["javascript"].custom[1], /.*\/override2$/);
});
});
ava_1.default("Queries in workflow file can be added to the set of queries without overriding config file", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
process.env["GITHUB_WORKSPACE"] = tmpDir;
const inputFileContents = `
name: my config
queries:
- uses: ./foo`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
// These queries shouldn't override anything, because the value is prefixed with "+"
const testQueries = "+./additional1,./additional2";
fs.mkdirSync(path.join(tmpDir, "foo"));
fs.mkdirSync(path.join(tmpDir, "additional1"));
fs.mkdirSync(path.join(tmpDir, "additional2"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries,
// once for each of additional1 and additional2,
// and once for './foo' from the config file
t.deepEqual(resolveQueriesArgs.length, 4);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/additional1$/);
t.deepEqual(resolveQueriesArgs[2].queries.length, 1);
t.regex(resolveQueriesArgs[2].queries[0], /.*\/additional2$/);
t.deepEqual(resolveQueriesArgs[3].queries.length, 1);
t.regex(resolveQueriesArgs[3].queries[0], /.*\/foo$/);
// Now check that the end result contains all the queries
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 3);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/additional1$/);
t.regex(config.queries["javascript"].custom[1], /.*\/additional2$/);
t.regex(config.queries["javascript"].custom[2], /.*\/foo$/);
});
});
ava_1.default("Invalid queries in workflow file handled correctly", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const queries = "foo/bar@v1@v3";
const languages = "javascript";
// This function just needs to be type-correct; it doesn't need to do anything,
// since we're deliberately passing in invalid data
const codeQL = codeql_1.setCodeQL({
async resolveQueries(_queries, _extraSearchPath) {
return {
byLanguage: {
javascript: {},
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
},
});
try {
await configUtils.initConfig(languages, queries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
t.fail("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getQueryUsesInvalid(undefined, "foo/bar@v1@v3")));
}
});
});
ava_1.default("API client used when reading remote config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
CodeQL.setCodeQL({
resolveQueries: async function () {
const codeQL = codeql_1.setCodeQL({
async resolveQueries() {
return {
byLanguage: {
'javascript': {
'foo.ql': {},
javascript: {
"foo.ql": {},
},
},
noDeclaredLanguage: {},
@@ -272,24 +476,21 @@ ava_1.default("API client used when reading remote config", async (t) => {
};
const spyGetContents = mockGetContents(dummyResponse);
// Create checkout directory for remote queries repository
fs.mkdirSync(path.join(tmpDir, 'foo/bar'), { recursive: true });
setInput('config-file', 'octo-org/codeql-config/config.yaml@main');
setInput('languages', 'javascript');
await configUtils.initConfig();
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
const configFile = "octo-org/codeql-config/config.yaml@main";
const languages = "javascript";
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
t.assert(spyGetContents.called);
});
});
ava_1.default("Remote config handles the case where a directory is provided", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
const dummyResponse = []; // directories are returned as arrays
mockGetContents(dummyResponse);
const repoReference = 'octo-org/codeql-config/config.yaml@main';
setInput('config-file', repoReference);
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig();
throw new Error('initConfig did not throw error');
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileDirectoryGivenMessage(repoReference)));
@@ -298,17 +499,14 @@ ava_1.default("Remote config handles the case where a directory is provided", as
});
ava_1.default("Invalid format of remote config handled correctly", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
const dummyResponse = {
// note no "content" property here
};
mockGetContents(dummyResponse);
const repoReference = 'octo-org/codeql-config/config.yaml@main';
setInput('config-file', repoReference);
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig();
throw new Error('initConfig did not throw error');
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference)));
@@ -317,12 +515,10 @@ ava_1.default("Invalid format of remote config handled correctly", async (t) =>
});
ava_1.default("No detected languages", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
mockListLanguages([]);
try {
await configUtils.initConfig();
throw new Error('initConfig did not throw error');
await configUtils.initConfig(undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getNoLanguagesError()));
@@ -331,25 +527,21 @@ ava_1.default("No detected languages", async (t) => {
});
ava_1.default("Unknown languages", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('languages', 'ruby,english');
const languages = "ruby,english";
try {
await configUtils.initConfig();
throw new Error('initConfig did not throw error');
await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getUnknownLanguagesError(['ruby', 'english'])));
t.deepEqual(err, new Error(configUtils.getUnknownLanguagesError(["ruby", "english"])));
}
});
});
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
ava_1.default("load invalid input - " + testName, async (t) => {
ava_1.default(`load invalid input - ${testName}`, async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
CodeQL.setCodeQL({
resolveQueries: async function () {
const codeQL = codeql_1.setCodeQL({
async resolveQueries() {
return {
byLanguage: {},
noDeclaredLanguage: {},
@@ -357,13 +549,13 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
};
},
});
const inputFile = path.join(tmpDir, 'input');
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
setInput('config-file', 'input');
setInput('languages', 'javascript');
const languages = "javascript";
const configFile = "input";
const inputFile = path.join(tmpDir, configFile);
fs.writeFileSync(inputFile, inputFileContents, "utf8");
try {
await configUtils.initConfig();
throw new Error('initConfig did not throw error');
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
@@ -371,14 +563,14 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
});
});
}
doInvalidInputTest('name invalid type', `
doInvalidInputTest("name invalid type", `
name:
- foo: bar`, configUtils.getNameInvalid);
doInvalidInputTest('disable-default-queries invalid type', `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
doInvalidInputTest('queries invalid type', `queries: foo`, configUtils.getQueriesInvalid);
doInvalidInputTest('paths-ignore invalid type', `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
doInvalidInputTest('paths invalid type', `paths: 17`, configUtils.getPathsInvalid);
doInvalidInputTest('queries uses invalid type', `
doInvalidInputTest("disable-default-queries invalid type", `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
doInvalidInputTest("queries invalid type", `queries: foo`, configUtils.getQueriesInvalid);
doInvalidInputTest("paths-ignore invalid type", `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
doInvalidInputTest("paths invalid type", `paths: 17`, configUtils.getPathsInvalid);
doInvalidInputTest("queries uses invalid type", `
queries:
- uses:
- hello: world`, configUtils.getQueryUsesInvalid);
@@ -389,52 +581,47 @@ function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) {
name: my config
queries:
- name: foo
uses: ` + input;
doInvalidInputTest("queries uses \"" + input + "\"", inputFileContents, expectedErrorMessageGenerator);
uses: ${input}`;
doInvalidInputTest(`queries uses "${input}"`, inputFileContents, expectedErrorMessageGenerator);
}
// Various "uses" fields, and the errors they should produce
doInvalidQueryUsesTest("''", c => configUtils.getQueryUsesInvalid(c, undefined));
doInvalidQueryUsesTest("foo/bar", c => configUtils.getQueryUsesInvalid(c, "foo/bar"));
doInvalidQueryUsesTest("foo/bar@v1@v2", c => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
doInvalidQueryUsesTest("foo@master", c => configUtils.getQueryUsesInvalid(c, "foo@master"));
doInvalidQueryUsesTest("https://github.com/foo/bar@master", c => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
doInvalidQueryUsesTest("./foo", c => configUtils.getLocalPathDoesNotExist(c, "foo"));
doInvalidQueryUsesTest("./..", c => configUtils.getLocalPathOutsideOfRepository(c, ".."));
doInvalidQueryUsesTest("''", (c) => configUtils.getQueryUsesInvalid(c, undefined));
doInvalidQueryUsesTest("foo/bar", (c) => configUtils.getQueryUsesInvalid(c, "foo/bar"));
doInvalidQueryUsesTest("foo/bar@v1@v2", (c) => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
doInvalidQueryUsesTest("foo@master", (c) => configUtils.getQueryUsesInvalid(c, "foo@master"));
doInvalidQueryUsesTest("https://github.com/foo/bar@master", (c) => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
doInvalidQueryUsesTest("./foo", (c) => configUtils.getLocalPathDoesNotExist(c, "foo"));
doInvalidQueryUsesTest("./..", (c) => configUtils.getLocalPathOutsideOfRepository(c, ".."));
const validPaths = [
'foo',
'foo/',
'foo/**',
'foo/**/',
'foo/**/**',
'foo/**/bar/**/baz',
'**/',
'**/foo',
'/foo',
"foo",
"foo/",
"foo/**",
"foo/**/",
"foo/**/**",
"foo/**/bar/**/baz",
"**/",
"**/foo",
"/foo",
];
const invalidPaths = [
'a/***/b',
'a/**b',
'a/b**',
'**',
];
ava_1.default('path validations', t => {
const invalidPaths = ["a/***/b", "a/**b", "a/b**", "**"];
ava_1.default("path validations", (t) => {
// Dummy values to pass to validateAndSanitisePath
const propertyName = 'paths';
const configFile = './.github/codeql/config.yml';
for (const path of validPaths) {
t.truthy(configUtils.validateAndSanitisePath(path, propertyName, configFile));
const propertyName = "paths";
const configFile = "./.github/codeql/config.yml";
for (const validPath of validPaths) {
t.truthy(configUtils.validateAndSanitisePath(validPath, propertyName, configFile, logging_1.getRunnerLogger(true)));
}
for (const path of invalidPaths) {
t.throws(() => configUtils.validateAndSanitisePath(path, propertyName, configFile));
for (const invalidPath of invalidPaths) {
t.throws(() => configUtils.validateAndSanitisePath(invalidPath, propertyName, configFile, logging_1.getRunnerLogger(true)));
}
});
ava_1.default('path sanitisation', t => {
ava_1.default("path sanitisation", (t) => {
// Dummy values to pass to validateAndSanitisePath
const propertyName = 'paths';
const configFile = './.github/codeql/config.yml';
const propertyName = "paths";
const configFile = "./.github/codeql/config.yml";
// Valid paths are not modified
t.deepEqual(configUtils.validateAndSanitisePath('foo/bar', propertyName, configFile), 'foo/bar');
t.deepEqual(configUtils.validateAndSanitisePath("foo/bar", propertyName, configFile, logging_1.getRunnerLogger(true)), "foo/bar");
// Trailing stars are stripped
t.deepEqual(configUtils.validateAndSanitisePath('foo/**', propertyName, configFile), 'foo/');
t.deepEqual(configUtils.validateAndSanitisePath("foo/**", propertyName, configFile, logging_1.getRunnerLogger(true)), "foo/");
});
//# sourceMappingURL=config-utils.test.js.map

File diff suppressed because one or more lines are too long

View File

@@ -1,3 +1,3 @@
{
"bundleVersion": "codeql-bundle-20200630"
"bundleVersion": "codeql-bundle-20201106"
}

17
lib/error-matcher.js generated Normal file
View File

@@ -0,0 +1,17 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// exported only for testing purposes
exports.namedMatchersForTesting = {
/*
In due course it may be possible to remove the regex, if/when javascript also exits with code 32.
*/
noSourceCodeFound: {
exitCode: 32,
outputRegex: new RegExp("No JavaScript or TypeScript code found\\."),
message: "No code found during the build. Please see:\n" +
"https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning#no-code-found-during-the-build",
},
};
// we collapse the matches into an array for use in execErrorCatcher
exports.errorMatchers = Object.values(exports.namedMatchersForTesting);
//# sourceMappingURL=error-matcher.js.map

1
lib/error-matcher.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"error-matcher.js","sourceRoot":"","sources":["../src/error-matcher.ts"],"names":[],"mappings":";;AAQA,qCAAqC;AACxB,QAAA,uBAAuB,GAAoC;IACtE;;MAEE;IACF,iBAAiB,EAAE;QACjB,QAAQ,EAAE,EAAE;QACZ,WAAW,EAAE,IAAI,MAAM,CAAC,2CAA2C,CAAC;QACpE,OAAO,EACL,+CAA+C;YAC/C,yJAAyJ;KAC5J;CACF,CAAC;AAEF,oEAAoE;AACvD,QAAA,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC,+BAAuB,CAAC,CAAC"}

29
lib/error-matcher.test.js generated Normal file
View File

@@ -0,0 +1,29 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const error_matcher_1 = require("./error-matcher");
/*
NB We test the regexes for all the matchers against example log output snippets.
*/
ava_1.default("noSourceCodeFound matches against example javascript output", async (t) => {
t.assert(testErrorMatcher("noSourceCodeFound", `
2020-09-07T17:39:53.9050522Z [2020-09-07 17:39:53] [build] Done extracting /opt/hostedtoolcache/CodeQL/0.0.0-20200630/x64/codeql/javascript/tools/data/externs/web/ie_vml.js (3 ms)
2020-09-07T17:39:53.9051849Z [2020-09-07 17:39:53] [build-err] No JavaScript or TypeScript code found.
2020-09-07T17:39:53.9052444Z [2020-09-07 17:39:53] [build-err] No JavaScript or TypeScript code found.
2020-09-07T17:39:53.9251124Z [2020-09-07 17:39:53] [ERROR] Spawned process exited abnormally (code 255; tried to run: [/opt/hostedtoolcache/CodeQL/0.0.0-20200630/x64/codeql/javascript/tools/autobuild.sh])
`));
});
function testErrorMatcher(matcherName, logSample) {
if (!(matcherName in error_matcher_1.namedMatchersForTesting)) {
throw new Error(`Unknown matcher ${matcherName}`);
}
const regex = error_matcher_1.namedMatchersForTesting[matcherName].outputRegex;
if (regex === undefined) {
throw new Error(`Cannot test matcher ${matcherName} with null regex`);
}
return regex.test(logSample);
}
//# sourceMappingURL=error-matcher.test.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"error-matcher.test.js","sourceRoot":"","sources":["../src/error-matcher.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,mDAA0D;AAE1D;;EAEE;AAEF,aAAI,CAAC,6DAA6D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9E,CAAC,CAAC,MAAM,CACN,gBAAgB,CACd,mBAAmB,EACnB;;;;;GAKH,CACE,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,gBAAgB,CAAC,WAAmB,EAAE,SAAiB;IAC9D,IAAI,CAAC,CAAC,WAAW,IAAI,uCAAuB,CAAC,EAAE;QAC7C,MAAM,IAAI,KAAK,CAAC,mBAAmB,WAAW,EAAE,CAAC,CAAC;KACnD;IACD,MAAM,KAAK,GAAG,uCAAuB,CAAC,WAAW,CAAC,CAAC,WAAW,CAAC;IAC/D,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,uBAAuB,WAAW,kBAAkB,CAAC,CAAC;KACvE;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AAC/B,CAAC"}

View File

@@ -7,26 +7,33 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const util = __importStar(require("./util"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
/**
* Check out repository at the given ref, and return the directory of the checkout.
*/
async function checkoutExternalRepository(repository, ref) {
const folder = util.getRequiredEnvParam('RUNNER_TEMP');
core.info('Checking out ' + repository);
const checkoutLocation = path.join(folder, repository);
async function checkoutExternalRepository(repository, ref, githubUrl, tempDir, logger) {
logger.info(`Checking out ${repository}`);
const checkoutLocation = path.join(tempDir, repository, ref);
if (!checkoutLocation.startsWith(tempDir)) {
// this still permits locations that mess with sibling repositories in `tempDir`, but that is acceptable
throw new Error(`'${repository}@${ref}' is not a valid repository and reference.`);
}
if (!fs.existsSync(checkoutLocation)) {
const repoURL = 'https://github.com/' + repository + '.git';
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
await exec.exec('git', [
'--work-tree=' + checkoutLocation,
'--git-dir=' + checkoutLocation + '/.git',
'checkout', ref,
]);
const repoURL = `${githubUrl}/${repository}`;
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
"clone",
repoURL,
checkoutLocation,
]).exec();
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
`--work-tree=${checkoutLocation}`,
`--git-dir=${checkoutLocation}/.git`,
"checkout",
ref,
]).exec();
}
return checkoutLocation;
}

View File

@@ -1 +1 @@
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAE7B,6CAA+B;AAE/B;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAAC,UAAkB,EAAE,GAAW;IAC9E,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC;IAEvD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,UAAU,CAAC,CAAC;IAExC,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;IACvD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,UAAU,GAAG,MAAM,CAAC;QAC5D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;QAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YACrB,cAAc,GAAG,gBAAgB;YACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;YACzC,UAAU,EAAE,GAAG;SAChB,CAAC,CAAC;KACJ;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAjBD,gEAiBC"}
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAIpD;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,SAAiB,EACjB,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,GAAG,SAAS,IAAI,UAAU,EAAE,CAAC;QAC7C,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,OAAO;YACP,OAAO;YACP,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC"}

View File

@@ -1,7 +1,4 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
@@ -9,20 +6,92 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
const ava_1 = __importDefault(require("ava"));
const externalQueries = __importStar(require("./external-queries"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
ava_1.default("checkoutExternalQueries", async (t) => {
await util.withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
await externalQueries.checkoutExternalRepository("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b");
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in the default branch
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
// Create a test repo in a subdir of the temp dir.
// It should have a default branch with two commits after the initial commit, where
// - the first commit contains files 'a' and 'b'
// - the second commit contains only 'a'
// Place the repo in a subdir because we're going to checkout a copy in tmpDir
const testRepoBaseDir = path.join(tmpDir, "test-repo-dir");
const repoName = "some/repo";
const repoPath = path.join(testRepoBaseDir, repoName);
const repoGitDir = path.join(repoPath, ".git");
// Run the given git command, and return the output.
// Passes --git-dir and --work-tree.
// Any stderr output is suppressed until the command fails.
const runGit = async function (command) {
let stdout = "";
let stderr = "";
command = [
`--git-dir=${repoGitDir}`,
`--work-tree=${repoPath}`,
...command,
];
console.log(`Running: git ${command.join(" ")}`);
try {
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), command, {
silent: true,
listeners: {
stdout: (data) => {
stdout += data.toString();
},
stderr: (data) => {
stderr += data.toString();
},
},
}).exec();
}
catch (e) {
console.log(`Command failed: git ${command.join(" ")}`);
process.stderr.write(stderr);
throw e;
}
return stdout.trim();
};
fs.mkdirSync(repoPath, { recursive: true });
await runGit(["init", repoPath]);
await runGit(["config", "user.email", "test@github.com"]);
await runGit(["config", "user.name", "Test Test"]);
await runGit(["config", "commit.gpgsign", "false"]);
fs.writeFileSync(path.join(repoPath, "a"), "a content");
await runGit(["add", "a"]);
await runGit(["commit", "-m", "commit1"]);
fs.writeFileSync(path.join(repoPath, "b"), "b content");
await runGit(["add", "b"]);
await runGit(["commit", "-m", "commit1"]);
const commit1Sha = await runGit(["rev-parse", "HEAD"]);
fs.unlinkSync(path.join(repoPath, "b"));
await runGit(["add", "b"]);
await runGit(["commit", "-m", "commit2"]);
const commit2Sha = await runGit(["rev-parse", "HEAD"]);
// Checkout the first commit, which should contain 'a' and 'b'
t.false(fs.existsSync(path.join(tmpDir, repoName)));
await externalQueries.checkoutExternalRepository(repoName, commit1Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true));
t.true(fs.existsSync(path.join(tmpDir, repoName)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "a")));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "b")));
// Checkout the second commit as well, which should only contain 'a'
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
await externalQueries.checkoutExternalRepository(repoName, commit2Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "a")));
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
});
});
//# sourceMappingURL=external-queries.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,oEAAsD;AACtD,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACxC,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QACpC,MAAM,eAAe,CAAC,0BAA0B,CAAC,kBAAkB,EAAE,0CAA0C,CAAC,CAAC;QAEjH,mGAAmG;QACnG,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IAC/E,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AACpD,8CAAuB;AAEvB,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAChC,OAAO,EACP;oBACE,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CACF,CAAC,IAAI,EAAE,CAAC;aACV;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,gBAAgB,EAAE,OAAO,CAAC,CAAC,CAAC;QAEpD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

115
lib/finalize-db.js generated
View File

@@ -1,115 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const sharedEnv = __importStar(require("./shared-environment"));
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
async function sendStatusReport(startedAt, queriesStats, uploadStats, error) {
var _a, _b, _c;
const status = ((_a = queriesStats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined ? 'failure' : 'success';
const statusReportBase = await util.createStatusReportBase('finish', status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
const statusReport = {
...statusReportBase,
...(queriesStats || {}),
...(uploadStats || {}),
};
await util.sendStatusReport(statusReport);
}
async function createdDBForScannedLanguages(databaseFolder, config) {
const codeql = codeql_1.getCodeQL();
for (const language of config.languages) {
if (codeql_1.isScannedLanguage(language)) {
core.startGroup('Extracting ' + language);
await codeql.extractScannedLanguage(path.join(databaseFolder, language), language);
core.endGroup();
}
}
}
async function finalizeDatabaseCreation(databaseFolder, config) {
await createdDBForScannedLanguages(databaseFolder, config);
const codeql = codeql_1.getCodeQL();
for (const language of config.languages) {
core.startGroup('Finalizing ' + language);
await codeql.finalizeDatabase(path.join(databaseFolder, language));
core.endGroup();
}
}
// Runs queries and creates sarif files in the given folder
async function runQueries(databaseFolder, sarifFolder, config) {
const codeql = codeql_1.getCodeQL();
for (let language of fs.readdirSync(databaseFolder)) {
core.startGroup('Analyzing ' + language);
const queries = config.queries[language] || [];
if (queries.length === 0) {
throw new Error('Unable to analyse ' + language + ' as no queries were selected for this language');
}
try {
// Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows.
const querySuite = path.join(databaseFolder, language + '-queries.qls');
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
fs.writeFileSync(querySuite, querySuiteContents);
core.debug('Query suite file for ' + language + '...\n' + querySuiteContents);
const sarifFile = path.join(sarifFolder, language + '.sarif');
await codeql.databaseAnalyze(path.join(databaseFolder, language), sarifFile, querySuite);
core.debug('SARIF results for database ' + language + ' created at "' + sarifFile + '"');
core.endGroup();
}
catch (e) {
// For now the fields about query performance are not populated
return {
analyze_failure_language: language,
};
}
}
return {};
}
async function run() {
const startedAt = new Date();
let queriesStats = undefined;
let uploadStats = undefined;
try {
util.prepareLocalRunEnvironment();
if (!await util.sendStatusReport(await util.createStatusReportBase('finish', 'starting', startedAt), true)) {
return;
}
const config = await configUtils.getConfig();
core.exportVariable(sharedEnv.ODASA_TRACER_CONFIGURATION, '');
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
const databaseFolder = util.getCodeQLDatabasesDir();
const sarifFolder = core.getInput('output');
fs.mkdirSync(sarifFolder, { recursive: true });
core.info('Finalizing database creation');
await finalizeDatabaseCreation(databaseFolder, config);
core.info('Analyzing database');
queriesStats = await runQueries(databaseFolder, sarifFolder, config);
if ('true' === core.getInput('upload')) {
uploadStats = await upload_lib.upload(sarifFolder, repository_1.parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')), await util.getCommitOid(), util.getRef(), await util.getAnalysisKey(), util.getRequiredEnvParam('GITHUB_WORKFLOW'), util.getWorkflowRunID(), core.getInput('checkout_path'), core.getInput('matrix'), core.getInput('token'), util.getRequiredEnvParam('GITHUB_API_URL'), 'actions', logging_1.getActionsLogger());
}
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await sendStatusReport(startedAt, queriesStats, uploadStats, error);
return;
}
await sendStatusReport(startedAt, queriesStats, uploadStats);
}
run().catch(e => {
core.setFailed("analyze action failed: " + e);
console.log(e);
});
//# sourceMappingURL=finalize-db.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"finalize-db.js","sourceRoot":"","sources":["../src/finalize-db.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAE7B,qCAAwD;AACxD,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAiC/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,YAA6C,EAC7C,WAAsD,EACtD,KAAa;;IAEb,MAAM,MAAM,GAAG,OAAA,YAAY,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IACnH,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,QAAE,KAAK,0CAAE,OAAO,QAAE,KAAK,0CAAE,KAAK,CAAC,CAAC;IACtH,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,YAAY,IAAI,EAAE,CAAC;QACvB,GAAG,CAAC,WAAW,IAAI,EAAE,CAAC;KACvB,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,4BAA4B,CAAC,cAAsB,EAAE,MAA0B;IAC5F,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,0BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;YAC1C,MAAM,MAAM,CAAC,sBAAsB,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,EAAE,QAAQ,CAAC,CAAC;YACnF,IAAI,CAAC,QAAQ,EAAE,CAAC;SACjB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CAAC,cAAsB,EAAE,MAA0B;IACxF,MAAM,4BAA4B,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;IAE3D,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;QAC1C,MAAM,MAAM,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC,CAAC;QACnE,IAAI,CAAC,QAAQ,EAAE,CAAC;KACjB;AACH,CAAC;AAED,2DAA2D;AAC3D,KAAK,UAAU,UAAU,CACvB,cAAsB,EACtB,WAAmB,EACnB,MAA0B;IAE1B,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,IAAI,QAAQ,IAAI,EAAE,CAAC,WAAW,CAAC,cAAc,CAAC,EAAE;QACnD,IAAI,CAAC,UAAU,CAAC,YAAY,GAAG,QAAQ,CAAC,CAAC;QAEzC,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;QAC/C,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,oBAAoB,GAAG,QAAQ,GAAG,gDAAgD,CAAC,CAAC;SACrG;QAED,IAAI;YACF,uEAAuE;YACvE,2EAA2E;YAC3E,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,GAAG,cAAc,CAAC,CAAC;YACxE,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACxE,EAAE,CAAC,aAAa,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;YACjD,IAAI,CAAC,KAAK,CAAC,uBAAuB,GAAG,QAAQ,GAAG,OAAO,GAAG,kBAAkB,CAAC,CAAC;YAE9E,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,QAAQ,GAAG,QAAQ,CAAC,CAAC;YAE9D,MAAM,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,EAAE,SAAS,EAAE,UAAU,CAAC,CAAC;YAEzF,IAAI,CAAC,KAAK,CAAC,6BAA6B,GAAG,QAAQ,GAAG,eAAe,GAAG,SAAS,GAAG,GAAG,CAAC,CAAC;YACzF,IAAI,CAAC,QAAQ,EAAE,CAAC;SAEjB;QAAC,OAAO,CAAC,EAAE;YACV,+DAA+D;YAC/D,OAAO;gBACL,wBAAwB,EAAE,QAAQ;aACnC,CAAC;SACH;KACF;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAoC,SAAS,CAAC;IAC9D,IAAI,WAAW,GAA8C,SAAS,CAAC;IACvE,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC1G,OAAO;SACR;QACD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,EAAE,CAAC;QAE7C,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,0BAA0B,EAAE,EAAE,CAAC,CAAC;QAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;QAEzD,MAAM,cAAc,GAAG,IAAI,CAAC,qBAAqB,EAAE,CAAC;QAEpD,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;QAC5C,EAAE,CAAC,SAAS,CAAC,WAAW,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAE/C,IAAI,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;QAC1C,MAAM,wBAAwB,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;QAEvD,IAAI,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QAChC,YAAY,GAAG,MAAM,UAAU,CAAC,cAAc,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;QAErE,IAAI,MAAM,KAAK,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACtC,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACnC,WAAW,EACX,+BAAkB,CAAC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACjE,MAAM,IAAI,CAAC,YAAY,EAAE,EACzB,IAAI,CAAC,MAAM,EAAE,EACb,MAAM,IAAI,CAAC,cAAc,EAAE,EAC3B,IAAI,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAC3C,IAAI,CAAC,gBAAgB,EAAE,EACvB,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC,EAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EACvB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,mBAAmB,CAAC,gBAAgB,CAAC,EAC1C,SAAS,EACT,0BAAgB,EAAE,CAAC,CAAC;SACvB;KAEF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,gBAAgB,CAAC,SAAS,EAAE,YAAY,EAAE,WAAW,EAAE,KAAK,CAAC,CAAC;QACpE,OAAO;KACR;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,YAAY,EAAE,WAAW,CAAC,CAAC;AAC/D,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,yBAAyB,GAAG,CAAC,CAAC,CAAC;IAC9C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

View File

@@ -1,2 +0,0 @@
"use strict";
//# sourceMappingURL=finalize-db.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"finalize-db.test.js","sourceRoot":"","sources":["../src/finalize-db.test.ts"],"names":[],"mappings":""}

84
lib/fingerprints.js generated
View File

@@ -10,13 +10,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const fs = __importStar(require("fs"));
const long_1 = __importDefault(require("long"));
const tab = '\t'.charCodeAt(0);
const space = ' '.charCodeAt(0);
const lf = '\n'.charCodeAt(0);
const cr = '\r'.charCodeAt(0);
const tab = "\t".charCodeAt(0);
const space = " ".charCodeAt(0);
const lf = "\n".charCodeAt(0);
const cr = "\r".charCodeAt(0);
const BLOCK_SIZE = 100;
const MOD = long_1.default.fromInt(37); // L
// Compute the starting point for the hash mod
@@ -47,8 +46,8 @@ function hash(callback, input) {
// Indexes match up with those from the window variable.
const lineNumbers = Array(BLOCK_SIZE).fill(-1);
// The current hash value, updated as we read each character
let hash = long_1.default.ZERO;
let firstMod = computeFirstMod();
let hashRaw = long_1.default.ZERO;
const firstMod = computeFirstMod();
// The current index in the window, will wrap around to zero when we reach BLOCK_SIZE
let index = 0;
// The line number of the character we are currently processing from the input
@@ -62,19 +61,19 @@ function hash(callback, input) {
const hashCounts = {};
// Output the current hash and line number to the callback function
const outputHash = function () {
let hashValue = hash.toUnsigned().toString(16);
const hashValue = hashRaw.toUnsigned().toString(16);
if (!hashCounts[hashValue]) {
hashCounts[hashValue] = 0;
}
hashCounts[hashValue]++;
callback(lineNumbers[index], hashValue + ":" + hashCounts[hashValue]);
callback(lineNumbers[index], `${hashValue}:${hashCounts[hashValue]}`);
lineNumbers[index] = -1;
};
// Update the current hash value and increment the index in the window
const updateHash = function (current) {
const begin = window[index];
window[index] = current;
hash = MOD.multiply(hash)
hashRaw = MOD.multiply(hashRaw)
.add(long_1.default.fromInt(current))
.subtract(firstMod.multiply(long_1.default.fromInt(begin)));
index = (index + 1) % BLOCK_SIZE;
@@ -121,8 +120,8 @@ function hash(callback, input) {
}
exports.hash = hash;
// Generate a hash callback function that updates the given result in-place
// when it recieves a hash for the correct line number. Ignores hashes for other lines.
function locationUpdateCallback(result, location) {
// when it receives a hash for the correct line number. Ignores hashes for other lines.
function locationUpdateCallback(result, location, logger) {
var _a, _b;
let locationStartLine = (_b = (_a = location.physicalLocation) === null || _a === void 0 ? void 0 : _a.region) === null || _b === void 0 ? void 0 : _b.startLine;
if (locationStartLine === undefined) {
@@ -131,7 +130,7 @@ function locationUpdateCallback(result, location) {
// using the hash of the first line of the file.
locationStartLine = 1;
}
return function (lineNumber, hash) {
return function (lineNumber, hashValue) {
// Ignore hashes for lines that don't concern us
if (locationStartLine !== lineNumber) {
return;
@@ -143,13 +142,10 @@ function locationUpdateCallback(result, location) {
// If the hash doesn't match the existing fingerprint then
// output a warning and don't overwrite it.
if (!existingFingerprint) {
result.partialFingerprints.primaryLocationLineHash = hash;
result.partialFingerprints.primaryLocationLineHash = hashValue;
}
else if (existingFingerprint !== hash) {
core.warning('Calculated fingerprint of ' + hash +
' for file ' + location.physicalLocation.artifactLocation.uri +
' line ' + lineNumber +
', but found existing inconsistent fingerprint value ' + existingFingerprint);
else if (existingFingerprint !== hashValue) {
logger.warning(`Calculated fingerprint of ${hashValue} for file ${location.physicalLocation.artifactLocation.uri} line ${lineNumber}, but found existing inconsistent fingerprint value ${existingFingerprint}`);
}
};
}
@@ -157,48 +153,48 @@ function locationUpdateCallback(result, location) {
// the source file so we can hash it.
// If possible returns a absolute file path for the source file,
// or if not possible then returns undefined.
function resolveUriToFile(location, artifacts) {
function resolveUriToFile(location, artifacts, checkoutPath, logger) {
// This may be referencing an artifact
if (!location.uri && location.index !== undefined) {
if (typeof location.index !== 'number' ||
if (typeof location.index !== "number" ||
location.index < 0 ||
location.index >= artifacts.length ||
typeof artifacts[location.index].location !== 'object') {
core.debug(`Ignoring location as URI "${location.index}" is invalid`);
typeof artifacts[location.index].location !== "object") {
logger.debug(`Ignoring location as URI "${location.index}" is invalid`);
return undefined;
}
location = artifacts[location.index].location;
}
// Get the URI and decode
if (typeof location.uri !== 'string') {
core.debug(`Ignoring location as index "${location.uri}" is invalid`);
if (typeof location.uri !== "string") {
logger.debug(`Ignoring location as index "${location.uri}" is invalid`);
return undefined;
}
let uri = decodeURIComponent(location.uri);
// Remove a file scheme, and abort if the scheme is anything else
const fileUriPrefix = 'file://';
const fileUriPrefix = "file://";
if (uri.startsWith(fileUriPrefix)) {
uri = uri.substring(fileUriPrefix.length);
}
if (uri.indexOf('://') !== -1) {
core.debug(`Ignoring location URI "${uri}" as the scheme is not recognised`);
if (uri.indexOf("://") !== -1) {
logger.debug(`Ignoring location URI "${uri}" as the scheme is not recognised`);
return undefined;
}
// Discard any absolute paths that aren't in the src root
const srcRootPrefix = process.env['GITHUB_WORKSPACE'] + '/';
if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
core.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
const srcRootPrefix = `${checkoutPath}/`;
if (uri.startsWith("/") && !uri.startsWith(srcRootPrefix)) {
logger.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
return undefined;
}
// Just assume a relative path is relative to the src root.
// This is not necessarily true but should be a good approximation
// and here we likely want to err on the side of handling more cases.
if (!uri.startsWith('/')) {
if (!uri.startsWith("/")) {
uri = srcRootPrefix + uri;
}
// Check the file exists
if (!fs.existsSync(uri)) {
core.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
return undefined;
}
return uri;
@@ -206,41 +202,43 @@ function resolveUriToFile(location, artifacts) {
exports.resolveUriToFile = resolveUriToFile;
// Compute fingerprints for results in the given sarif file
// and return an updated sarif file contents.
function addFingerprints(sarifContents) {
function addFingerprints(sarifContents, checkoutPath, logger) {
var _a, _b;
let sarif = JSON.parse(sarifContents);
const sarif = JSON.parse(sarifContents);
// Gather together results for the same file and construct
// callbacks to accept hashes for that file and update the location
const callbacksByFile = {};
for (const run of sarif.runs || []) {
// We may need the list of artifacts to resolve against
let artifacts = run.artifacts || [];
const artifacts = run.artifacts || [];
for (const result of run.results || []) {
// Check the primary location is defined correctly and is in the src root
const primaryLocation = (result.locations || [])[0];
if (!((_b = (_a = primaryLocation) === null || _a === void 0 ? void 0 : _a.physicalLocation) === null || _b === void 0 ? void 0 : _b.artifactLocation)) {
core.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`);
logger.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`);
continue;
}
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts);
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts, checkoutPath, logger);
if (!filepath) {
continue;
}
if (!callbacksByFile[filepath]) {
callbacksByFile[filepath] = [];
}
callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation));
callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation, logger));
}
}
// Now hash each file that was found
Object.entries(callbacksByFile).forEach(([filepath, callbacks]) => {
for (const [filepath, callbacks] of Object.entries(callbacksByFile)) {
// A callback that forwards the hash to all other callbacks for that file
const teeCallback = function (lineNumber, hash) {
Object.values(callbacks).forEach(c => c(lineNumber, hash));
const teeCallback = function (lineNumber, hashValue) {
for (const c of Object.values(callbacks)) {
c(lineNumber, hashValue);
}
};
const fileContents = fs.readFileSync(filepath).toString();
hash(teeCallback, fileContents);
});
}
return JSON.stringify(sarif);
}
exports.addFingerprints = addFingerprints;

File diff suppressed because one or more lines are too long

View File

@@ -1,7 +1,4 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
@@ -9,16 +6,20 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const ava_1 = __importDefault(require("ava"));
const fingerprints = __importStar(require("./fingerprints"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
testing_utils_1.setupTests(ava_1.default);
function testHash(t, input, expectedHashes) {
let index = 0;
let callback = function (lineNumber, hash) {
const callback = function (lineNumber, hash) {
t.is(lineNumber, index + 1);
t.is(hash, expectedHashes[index]);
index++;
@@ -26,7 +27,7 @@ function testHash(t, input, expectedHashes) {
fingerprints.hash(callback, input);
t.is(index, input.split(/\r\n|\r|\n/).length);
}
ava_1.default('hash', (t) => {
ava_1.default("hash", (t) => {
// Try empty file
testHash(t, "", ["c129715d7a2bc9a3:1"]);
// Try various combinations of newline characters
@@ -34,7 +35,7 @@ ava_1.default('hash', (t) => {
"271789c17abda88f:1",
"54703d4cd895b18:1",
"180aee12dab6264:1",
"a23a3dc5e078b07b:1"
"a23a3dc5e078b07b:1",
]);
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
"8b7cf3e952e7aeb2:1",
@@ -92,68 +93,87 @@ ava_1.default('hash', (t) => {
"a9cf91f7bbf1862b:1",
"55ec222b86bcae53:1",
"cc97dc7b1d7d8f7b:1",
"c129715d7a2bc9a3:1"
"c129715d7a2bc9a3:1",
]);
testHash(t, "x = 2\nx = 1\nprint(x)\nx = 3\nprint(x)\nx = 4\nprint(x)\n", [
"e54938cc54b302f1:1",
"bb609acbe9138d60:1",
"1131fd5871777f34:1",
"5c482a0f8b35ea28:1",
"54517377da7028d2:1",
"2c644846cb18d53e:1",
"f1b89f20de0d133:1",
"c129715d7a2bc9a3:1",
]);
});
function testResolveUriToFile(uri, index, artifactsURIs) {
const location = { "uri": uri, "index": index };
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
return fingerprints.resolveUriToFile(location, artifacts);
const location = { uri, index };
const artifacts = artifactsURIs.map((artifactURI) => ({
location: { uri: artifactURI },
}));
return fingerprints.resolveUriToFile(location, artifacts, process.cwd(), logging_1.getRunnerLogger(true));
}
ava_1.default('resolveUriToFile', t => {
ava_1.default("resolveUriToFile", (t) => {
// The resolveUriToFile method checks that the file exists and is in the right directory
// so we need to give it real files to look at. We will use this file as an example.
// For this to work we require the current working directory to be a parent, but this
// should generally always be the case so this is fine.
const cwd = process.cwd();
const filepath = __filename;
t.true(filepath.startsWith(cwd + '/'));
const relativeFilepaht = filepath.substring(cwd.length + 1);
process.env['GITHUB_WORKSPACE'] = cwd;
t.true(filepath.startsWith(`${cwd}/`));
const relativeFilepath = filepath.substring(cwd.length + 1);
// Absolute paths are unmodified
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
t.is(testResolveUriToFile(`file://${filepath}`, undefined, []), filepath);
// Relative paths are made absolute
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
t.is(testResolveUriToFile('file://' + relativeFilepaht, undefined, []), filepath);
t.is(testResolveUriToFile(relativeFilepath, undefined, []), filepath);
t.is(testResolveUriToFile(`file://${relativeFilepath}`, undefined, []), filepath);
// Absolute paths outside the src root are discarded
t.is(testResolveUriToFile('/src/foo/bar.js', undefined, []), undefined);
t.is(testResolveUriToFile('file:///src/foo/bar.js', undefined, []), undefined);
t.is(testResolveUriToFile("/src/foo/bar.js", undefined, []), undefined);
t.is(testResolveUriToFile("file:///src/foo/bar.js", undefined, []), undefined);
// Other schemes are discarded
t.is(testResolveUriToFile('https://' + filepath, undefined, []), undefined);
t.is(testResolveUriToFile('ftp://' + filepath, undefined, []), undefined);
t.is(testResolveUriToFile(`https://${filepath}`, undefined, []), undefined);
t.is(testResolveUriToFile(`ftp://${filepath}`, undefined, []), undefined);
// Invalid URIs are discarded
t.is(testResolveUriToFile(1, undefined, []), undefined);
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
// Non-existant files are discarded
t.is(testResolveUriToFile(filepath + '2', undefined, []), undefined);
// Non-existent files are discarded
t.is(testResolveUriToFile(`${filepath}2`, undefined, []), undefined);
// Index is resolved
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
t.is(testResolveUriToFile(undefined, 1, ['foo', filepath]), filepath);
t.is(testResolveUriToFile(undefined, 1, ["foo", filepath]), filepath);
// Invalid indexes are discarded
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
t.is(testResolveUriToFile(undefined, '0', [filepath]), undefined);
t.is(testResolveUriToFile(undefined, "0", [filepath]), undefined);
});
ava_1.default('addFingerprints', t => {
ava_1.default("addFingerprints", (t) => {
// Run an end-to-end test on a test file
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.input.sarif').toString();
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.expected.sarif').toString();
let input = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
.toString();
let expected = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.expected.sarif`)
.toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
t.deepEqual(fingerprints.addFingerprints(input), expected);
const checkoutPath = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, logging_1.getRunnerLogger(true)), expected);
});
ava_1.default('missingRegions', t => {
ava_1.default("missingRegions", (t) => {
// Run an end-to-end test on a test file
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.input.sarif').toString();
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.expected.sarif').toString();
let input = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
.toString();
let expected = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.expected.sarif`)
.toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
t.deepEqual(fingerprints.addFingerprints(input), expected);
const checkoutPath = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, logging_1.getRunnerLogger(true)), expected);
});
//# sourceMappingURL=fingerprints.test.js.map

File diff suppressed because one or more lines are too long

128
lib/init-action.js generated Normal file
View File

@@ -0,0 +1,128 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const init_1 = require("./init");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
var _a;
const statusReportBase = await actionsUtil.createStatusReportBase("init", "success", startedAt);
const languages = config.languages.join(",");
const workflowLanguages = actionsUtil.getOptionalInput("languages");
const paths = (config.originalUserInput.paths || []).join(",");
const pathsIgnore = (config.originalUserInput["paths-ignore"] || []).join(",");
const disableDefaultQueries = config.originalUserInput["disable-default-queries"]
? languages
: "";
const queries = [];
let queriesInput = (_a = actionsUtil.getOptionalInput("queries")) === null || _a === void 0 ? void 0 : _a.trim();
if (queriesInput === undefined || queriesInput.startsWith("+")) {
queries.push(...(config.originalUserInput.queries || []).map((q) => q.uses));
}
if (queriesInput !== undefined) {
queriesInput = queriesInput.startsWith("+")
? queriesInput.substr(1)
: queriesInput;
queries.push(...queriesInput.split(","));
}
const statusReport = {
...statusReportBase,
languages,
workflow_languages: workflowLanguages || "",
paths,
paths_ignore: pathsIgnore,
disable_default_queries: disableDefaultQueries,
queries: queries.join(","),
tools_input: actionsUtil.getOptionalInput("tools") || "",
tools_resolved_version: toolsVersion,
};
await actionsUtil.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
const logger = logging_1.getActionsLogger();
let config;
let codeql;
let toolsVersion;
try {
actionsUtil.prepareLocalRunEnvironment();
const workflowErrors = await actionsUtil.getWorkflowErrors();
if (workflowErrors.length > 0) {
core.warning(actionsUtil.formatWorkflowErrors(workflowErrors));
}
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "starting", startedAt, actionsUtil.formatWorkflowCause(workflowErrors))))) {
return;
}
const apiDetails = {
auth: actionsUtil.getRequiredInput("token"),
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
};
const initCodeQLResult = await init_1.initCodeQL(actionsUtil.getOptionalInput("tools"), apiDetails, actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), "actions", logger);
codeql = initCodeQLResult.codeql;
toolsVersion = initCodeQLResult.toolsVersion;
config = await init_1.initConfig(actionsUtil.getOptionalInput("languages"), actionsUtil.getOptionalInput("queries"), actionsUtil.getOptionalInput("config-file"), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), codeql, actionsUtil.getRequiredEnvParam("GITHUB_WORKSPACE"), apiDetails, "actions", logger);
if (config.languages.includes(languages_1.Language.python) &&
actionsUtil.getRequiredInput("setup-python-dependencies") === "true") {
try {
await init_1.installPythonDeps(codeql, logger);
}
catch (err) {
logger.warning(`${err.message} You can call this action with 'setup-python-dependencies: false' to disable this process`);
}
}
}
catch (e) {
core.setFailed(e.message);
console.log(e);
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "aborted", startedAt, e.message));
return;
}
try {
// Forward Go flags
const goFlags = process.env["GOFLAGS"];
if (goFlags) {
core.exportVariable("GOFLAGS", goFlags);
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
}
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
const codeqlRam = process.env["CODEQL_RAM"] || "6500";
core.exportVariable("CODEQL_RAM", codeqlRam);
const tracerConfig = await init_1.runInit(codeql, config);
if (tracerConfig !== undefined) {
for (const [key, value] of Object.entries(tracerConfig.env)) {
core.exportVariable(key, value);
}
if (process.platform === "win32") {
await init_1.injectWindowsTracer("Runner.Worker.exe", undefined, config, codeql, tracerConfig);
}
}
core.setOutput("codeql-path", config.codeQLCmd);
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "failure", startedAt, error.message, error.stack));
return;
}
await sendSuccessStatusReport(startedAt, config, toolsVersion);
}
async function runWrapper() {
try {
await run();
}
catch (error) {
core.setFailed(`init action failed: ${error}`);
console.log(error);
}
}
void runWrapper();
//# sourceMappingURL=init-action.js.map

1
lib/init-action.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"init-action.js","sourceRoot":"","sources":["../src/init-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAG9C,iCAMgB;AAChB,2CAAuC;AACvC,uCAA6C;AAC7C,6CAAkD;AAsBlD,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,MAA0B,EAC1B,YAAoB;;IAEpB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,MAAM,EACN,SAAS,EACT,SAAS,CACV,CAAC;IAEF,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC7C,MAAM,iBAAiB,GAAG,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAC;IACpE,MAAM,KAAK,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,WAAW,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CACvE,GAAG,CACJ,CAAC;IACF,MAAM,qBAAqB,GAAG,MAAM,CAAC,iBAAiB,CACpD,yBAAyB,CAC1B;QACC,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,EAAE,CAAC;IAEP,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,SAAG,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,0CAAE,IAAI,EAAE,CAAC;IACnE,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;QAC9D,OAAO,CAAC,IAAI,CACV,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,OAAO,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAC/D,CAAC;KACH;IACD,IAAI,YAAY,KAAK,SAAS,EAAE;QAC9B,YAAY,GAAG,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC;YACzC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC;YACxB,CAAC,CAAC,YAAY,CAAC;QACjB,OAAO,CAAC,IAAI,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;KAC1C;IAED,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,SAAS;QACT,kBAAkB,EAAE,iBAAiB,IAAI,EAAE;QAC3C,KAAK;QACL,YAAY,EAAE,WAAW;QACzB,uBAAuB,EAAE,qBAAqB;QAC9C,OAAO,EAAE,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC;QAC1B,WAAW,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,IAAI,EAAE;QACxD,sBAAsB,EAAE,YAAY;KACrC,CAAC;IAEF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,IAAI,MAA0B,CAAC;IAC/B,IAAI,MAAc,CAAC;IACnB,IAAI,YAAoB,CAAC;IAEzB,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QAEzC,MAAM,cAAc,GAAG,MAAM,WAAW,CAAC,iBAAiB,EAAE,CAAC;QAE7D,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE;YAC7B,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,oBAAoB,CAAC,cAAc,CAAC,CAAC,CAAC;SAChE;QAED,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,UAAU,EACV,SAAS,EACT,WAAW,CAAC,mBAAmB,CAAC,cAAc,CAAC,CAChD,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QAEF,MAAM,gBAAgB,GAAG,MAAM,iBAAU,CACvC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,UAAU,EACV,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,SAAS,EACT,MAAM,CACP,CAAC;QACF,MAAM,GAAG,gBAAgB,CAAC,MAAM,CAAC;QACjC,YAAY,GAAG,gBAAgB,CAAC,YAAY,CAAC;QAE7C,MAAM,GAAG,MAAM,iBAAU,CACvB,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,EACzC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EACvC,WAAW,CAAC,gBAAgB,CAAC,aAAa,CAAC,EAC3C,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,MAAM,EACN,WAAW,CAAC,mBAAmB,CAAC,kBAAkB,CAAC,EACnD,UAAU,EACV,SAAS,EACT,MAAM,CACP,CAAC;QAEF,IACE,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,oBAAQ,CAAC,MAAM,CAAC;YAC1C,WAAW,CAAC,gBAAgB,CAAC,2BAA2B,CAAC,KAAK,MAAM,EACpE;YACA,IAAI;gBACF,MAAM,wBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;aACzC;YAAC,OAAO,GAAG,EAAE;gBACZ,MAAM,CAAC,OAAO,CACZ,GAAG,GAAG,CAAC,OAAO,2FAA2F,CAC1G,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,CAAC,CAAC,OAAO,CACV,CACF,CAAC;QACF,OAAO;KACR;IAED,IAAI;QACF,mBAAmB;QACnB,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QACvC,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YACxC,IAAI,CAAC,OAAO,CACV,6GAA6G,CAC9G,CAAC;SACH;QAED,mGAAmG;QACnG,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,MAAM,CAAC;QACtD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;QAE7C,MAAM,YAAY,GAAG,MAAM,cAAO,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACnD,IAAI,YAAY,KAAK,SAAS,EAAE;YAC9B,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,EAAE;gBAC3D,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;aACjC;YAED,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,0BAAmB,CACvB,mBAAmB,EACnB,SAAS,EACT,MAAM,EACN,MAAM,EACN,YAAY,CACb,CAAC;aACH;SACF;QAED,IAAI,CAAC,SAAS,CAAC,aAAa,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;KACjD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;AACjE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,uBAAuB,KAAK,EAAE,CAAC,CAAC;QAC/C,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

174
lib/init.js generated Normal file
View File

@@ -0,0 +1,174 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const tracer_config_1 = require("./tracer-config");
const util = __importStar(require("./util"));
async function initCodeQL(codeqlURL, apiDetails, tempDir, toolsDir, mode, logger) {
logger.startGroup("Setup CodeQL tools");
const { codeql, toolsVersion } = await codeql_1.setupCodeQL(codeqlURL, apiDetails, tempDir, toolsDir, mode, logger);
await codeql.printVersion();
logger.endGroup();
return { codeql, toolsVersion };
}
exports.initCodeQL = initCodeQL;
async function initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, apiDetails, mode, logger) {
logger.startGroup("Load language configuration");
const config = await configUtils.initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, apiDetails, mode, logger);
analysisPaths.printPathFiltersWarning(config, logger);
logger.endGroup();
return config;
}
exports.initConfig = initConfig;
async function runInit(codeql, config) {
const sourceRoot = path.resolve();
fs.mkdirSync(util.getCodeQLDatabasesDir(config.tempDir), { recursive: true });
// TODO: replace this code once CodeQL supports multi-language tracing
for (const language of config.languages) {
// Init language database
await codeql.databaseInit(util.getCodeQLDatabasePath(config.tempDir, language), language, sourceRoot);
}
return await tracer_config_1.getCombinedTracerConfig(config, codeql);
}
exports.runInit = runInit;
// Runs a powershell script to inject the tracer into a parent process
// so it can tracer future processes, hopefully including the build process.
// If processName is given then injects into the nearest parent process with
// this name, otherwise uses the processLevel-th parent if defined, otherwise
// defaults to the 3rd parent as a rough guess.
async function injectWindowsTracer(processName, processLevel, config, codeql, tracerConfig) {
let script;
if (processName !== undefined) {
script = `
Param(
[Parameter(Position=0)]
[String]
$tracer
)
$id = $PID
while ($true) {
$p = Get-CimInstance -Class Win32_Process -Filter "ProcessId = $id"
Write-Host "Found process: $p"
if ($p -eq $null) {
throw "Could not determine ${processName} process"
}
if ($p[0].Name -eq "${processName}") {
Break
} else {
$id = $p[0].ParentProcessId
}
}
Write-Host "Final process: $p"
Invoke-Expression "&$tracer --inject=$id"`;
}
else {
// If the level is not defined then guess at the 3rd parent process.
// This won't be correct in every setting but it should be enough in most settings,
// and overestimating is likely better in this situation so we definitely trace
// what we want, though this does run the risk of interfering with future CI jobs.
// Note that the default of 3 doesn't work on github actions, so we include a
// special case in the script that checks for Runner.Worker.exe so we can still work
// on actions if the runner is invoked there.
processLevel = processLevel || 3;
script = `
Param(
[Parameter(Position=0)]
[String]
$tracer
)
$id = $PID
for ($i = 0; $i -le ${processLevel}; $i++) {
$p = Get-CimInstance -Class Win32_Process -Filter "ProcessId = $id"
Write-Host "Parent process \${i}: $p"
if ($p -eq $null) {
throw "Process tree ended before reaching required level"
}
# Special case just in case the runner is used on actions
if ($p[0].Name -eq "Runner.Worker.exe") {
Write-Host "Found Runner.Worker.exe process which means we are running on GitHub Actions"
Write-Host "Aborting search early and using process: $p"
Break
} elseif ($p[0].Name -eq "Agent.Worker.exe") {
Write-Host "Found Agent.Worker.exe process which means we are running on Azure Pipelines"
Write-Host "Aborting search early and using process: $p"
Break
} else {
$id = $p[0].ParentProcessId
}
}
Write-Host "Final process: $p"
Invoke-Expression "&$tracer --inject=$id"`;
}
const injectTracerPath = path.join(config.tempDir, "inject-tracer.ps1");
fs.writeFileSync(injectTracerPath, script);
await new toolrunner.ToolRunner(await safeWhich.safeWhich("powershell"), [
"-ExecutionPolicy",
"Bypass",
"-file",
injectTracerPath,
path.resolve(path.dirname(codeql.getPath()), "tools", "win64", "tracer.exe"),
], { env: { ODASA_TRACER_CONFIGURATION: tracerConfig.spec } }).exec();
}
exports.injectWindowsTracer = injectWindowsTracer;
async function installPythonDeps(codeql, logger) {
logger.startGroup("Setup Python dependencies");
const scriptsFolder = path.resolve(__dirname, "../python-setup");
// Setup tools on the Github hosted runners
if (process.env["ImageOS"] !== undefined) {
try {
if (process.platform === "win32") {
await new toolrunner.ToolRunner(await safeWhich.safeWhich("powershell"), [path.join(scriptsFolder, "install_tools.ps1")]).exec();
}
else {
await new toolrunner.ToolRunner(path.join(scriptsFolder, "install_tools.sh")).exec();
}
}
catch (e) {
// This script tries to install some needed tools in the runner. It should not fail, but if it does
// we just abort the process without failing the action
logger.endGroup();
logger.warning("Unable to download and extract the tools needed for installing the python dependencies. You can call this action with 'setup-python-dependencies: false' to disable this process.");
return;
}
}
// Install dependencies
try {
const script = "auto_install_packages.py";
if (process.platform === "win32") {
await new toolrunner.ToolRunner(await safeWhich.safeWhich("py"), [
"-3",
path.join(scriptsFolder, script),
path.dirname(codeql.getPath()),
]).exec();
}
else {
await new toolrunner.ToolRunner(path.join(scriptsFolder, script), [
path.dirname(codeql.getPath()),
]).exec();
}
}
catch (e) {
logger.endGroup();
logger.warning("We were unable to install your python dependencies. You can call this action with 'setup-python-dependencies: false' to disable this process.");
return;
}
logger.endGroup();
}
exports.installPythonDeps = installPythonDeps;
//# sourceMappingURL=init.js.map

1
lib/init.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,QAAgB,EAChB,IAAe,EACf,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,oBAAW,CAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,QAAQ,EACR,IAAI,EACJ,MAAM,CACP,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AApBD,gCAoBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,UAA4B,EAC5B,IAAe,EACf,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,UAAU,EACV,IAAI,EACJ,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9BD,gCA8BC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE9E,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnBD,0BAmBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,2CAA2C;IAC3C,IAAI,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,SAAS,EAAE;QACxC,IAAI;YACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC,CAAC,CAChD,CAAC,IAAI,EAAE,CAAC;aACV;iBAAM;gBACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;aACV;SACF;QAAC,OAAO,CAAC,EAAE;YACV,mGAAmG;YACnG,uDAAuD;YACvD,MAAM,CAAC,QAAQ,EAAE,CAAC;YAClB,MAAM,CAAC,OAAO,CACZ,mLAAmL,CACpL,CAAC;YACF,OAAO;SACR;KACF;IAED,uBAAuB;IACvB,IAAI;QACF,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,+IAA+I,CAChJ,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAnDD,8CAmDC"}

45
lib/languages.js generated Normal file
View File

@@ -0,0 +1,45 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// All the languages supported by CodeQL
var Language;
(function (Language) {
Language["csharp"] = "csharp";
Language["cpp"] = "cpp";
Language["go"] = "go";
Language["java"] = "java";
Language["javascript"] = "javascript";
Language["python"] = "python";
})(Language = exports.Language || (exports.Language = {}));
// Additional names for languages
const LANGUAGE_ALIASES = {
c: Language.cpp,
"c++": Language.cpp,
"c#": Language.csharp,
typescript: Language.javascript,
};
// Translate from user input or GitHub's API names for languages to CodeQL's names for languages
function parseLanguage(language) {
// Normalise to lower case
language = language.toLowerCase();
// See if it's an exact match
if (language in Language) {
return language;
}
// Check language aliases
if (language in LANGUAGE_ALIASES) {
return LANGUAGE_ALIASES[language];
}
return undefined;
}
exports.parseLanguage = parseLanguage;
function isTracedLanguage(language) {
return (["cpp", "java", "csharp"].includes(language) ||
(process.env["CODEQL_EXTRACTOR_GO_BUILD_TRACING"] === "on" &&
language === Language.go));
}
exports.isTracedLanguage = isTracedLanguage;
function isScannedLanguage(language) {
return !isTracedLanguage(language);
}
exports.isScannedLanguage = isScannedLanguage;
//# sourceMappingURL=languages.js.map

1
lib/languages.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;AAAA,wCAAwC;AACxC,IAAY,QAOX;AAPD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;AACnB,CAAC,EAPW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAOnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAAiC;IACrD,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO,CACL,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC;QAC5C,CAAC,OAAO,CAAC,GAAG,CAAC,mCAAmC,CAAC,KAAK,IAAI;YACxD,QAAQ,KAAK,QAAQ,CAAC,EAAE,CAAC,CAC5B,CAAC;AACJ,CAAC;AAND,4CAMC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}

44
lib/languages.test.js generated Normal file
View File

@@ -0,0 +1,44 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const languages_1 = require("./languages");
const testing_utils_1 = require("./testing-utils");
testing_utils_1.setupTests(ava_1.default);
ava_1.default("parseLanguage", async (t) => {
// Exact matches
t.deepEqual(languages_1.parseLanguage("csharp"), languages_1.Language.csharp);
t.deepEqual(languages_1.parseLanguage("cpp"), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage("go"), languages_1.Language.go);
t.deepEqual(languages_1.parseLanguage("java"), languages_1.Language.java);
t.deepEqual(languages_1.parseLanguage("javascript"), languages_1.Language.javascript);
t.deepEqual(languages_1.parseLanguage("python"), languages_1.Language.python);
// Aliases
t.deepEqual(languages_1.parseLanguage("c"), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage("c++"), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage("c#"), languages_1.Language.csharp);
t.deepEqual(languages_1.parseLanguage("typescript"), languages_1.Language.javascript);
// Not matches
t.deepEqual(languages_1.parseLanguage("foo"), undefined);
t.deepEqual(languages_1.parseLanguage(" "), undefined);
t.deepEqual(languages_1.parseLanguage(""), undefined);
});
ava_1.default("isTracedLanguage", async (t) => {
t.true(languages_1.isTracedLanguage(languages_1.Language.cpp));
t.true(languages_1.isTracedLanguage(languages_1.Language.java));
t.true(languages_1.isTracedLanguage(languages_1.Language.csharp));
t.false(languages_1.isTracedLanguage(languages_1.Language.go));
t.false(languages_1.isTracedLanguage(languages_1.Language.javascript));
t.false(languages_1.isTracedLanguage(languages_1.Language.python));
});
ava_1.default("isScannedLanguage", async (t) => {
t.false(languages_1.isScannedLanguage(languages_1.Language.cpp));
t.false(languages_1.isScannedLanguage(languages_1.Language.java));
t.false(languages_1.isScannedLanguage(languages_1.Language.csharp));
t.true(languages_1.isScannedLanguage(languages_1.Language.go));
t.true(languages_1.isScannedLanguage(languages_1.Language.javascript));
t.true(languages_1.isScannedLanguage(languages_1.Language.python));
});
//# sourceMappingURL=languages.test.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"languages.test.js","sourceRoot":"","sources":["../src/languages.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,2CAKqB;AACrB,mDAA6C;AAE7C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,gBAAgB;IAChB,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IACtD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,MAAM,CAAC,EAAE,oBAAQ,CAAC,IAAI,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAC9D,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAEtD,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAE9D,cAAc;IACd,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC7C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,SAAS,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAC;AAC5C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IACxC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE1C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mBAAmB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACpC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACzC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE5C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC"}

6
lib/logging.js generated
View File

@@ -12,9 +12,9 @@ function getActionsLogger() {
return core;
}
exports.getActionsLogger = getActionsLogger;
function getCLILogger() {
function getRunnerLogger(debugMode) {
return {
debug: console.debug,
debug: debugMode ? console.debug : () => undefined,
info: console.info,
warning: console.warn,
error: console.error,
@@ -22,5 +22,5 @@ function getCLILogger() {
endGroup: () => undefined,
};
}
exports.getCLILogger = getCLILogger;
exports.getRunnerLogger = getRunnerLogger;
//# sourceMappingURL=logging.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../src/logging.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAYtC,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC;AACd,CAAC;AAFD,4CAEC;AAED,SAAgB,YAAY;IAC1B,OAAO;QACL,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,OAAO,EAAE,OAAO,CAAC,IAAI;QACrB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AATD,oCASC"}
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../src/logging.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAYtC,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC;AACd,CAAC;AAFD,4CAEC;AAED,SAAgB,eAAe,CAAC,SAAkB;IAChD,OAAO;QACL,KAAK,EAAE,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,SAAS;QAClD,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,OAAO,EAAE,OAAO,CAAC,IAAI;QACrB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AATD,0CASC"}

2
lib/repository.js generated
View File

@@ -1,7 +1,7 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function parseRepositoryNwo(input) {
const parts = input.split('/');
const parts = input.split("/");
if (parts.length !== 2) {
throw new Error(`"${input}" is not a valid repository name`);
}

265
lib/runner.js generated Normal file
View File

@@ -0,0 +1,265 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path"));
const commander_1 = require("commander");
const analyze_1 = require("./analyze");
const autobuild_1 = require("./autobuild");
const codeql_1 = require("./codeql");
const config_utils_1 = require("./config-utils");
const init_1 = require("./init");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const util_1 = require("./util");
const program = new commander_1.Command();
program.version("0.0.1");
function getTempDir(userInput) {
const tempDir = path.join(userInput || process.cwd(), "codeql-runner");
if (!fs.existsSync(tempDir)) {
fs.mkdirSync(tempDir, { recursive: true });
}
return tempDir;
}
function getToolsDir(userInput) {
const toolsDir = userInput || path.join(os.homedir(), "codeql-runner-tools");
if (!fs.existsSync(toolsDir)) {
fs.mkdirSync(toolsDir, { recursive: true });
}
return toolsDir;
}
const codeqlEnvJsonFilename = "codeql-env.json";
// Imports the environment from codeqlEnvJsonFilename if not already present
function importTracerEnvironment(config) {
if (!("ODASA_TRACER_CONFIGURATION" in process.env)) {
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
const env = JSON.parse(fs.readFileSync(jsonEnvFile).toString("utf-8"));
for (const key of Object.keys(env)) {
process.env[key] = env[key];
}
}
}
// Allow the user to specify refs in full refs/heads/branch format
// or just the short branch name and prepend "refs/heads/" to it.
function parseRef(userInput) {
if (userInput.startsWith("refs/")) {
return userInput;
}
else {
return `refs/heads/${userInput}`;
}
}
// Parses the --trace-process-name arg from process.argv, or returns undefined
function parseTraceProcessName() {
for (let i = 0; i < process.argv.length - 1; i++) {
if (process.argv[i] === "--trace-process-name") {
return process.argv[i + 1];
}
}
return undefined;
}
// Parses the --trace-process-level arg from process.argv, or returns undefined
function parseTraceProcessLevel() {
for (let i = 0; i < process.argv.length - 1; i++) {
if (process.argv[i] === "--trace-process-level") {
const v = parseInt(process.argv[i + 1], 10);
return isNaN(v) ? undefined : v;
}
}
return undefined;
}
program
.command("init")
.description("Initializes CodeQL")
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--languages <languages>", "Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.")
.option("--queries <queries>", "Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.")
.option("--config-file <file>", "Path to config file.")
.option("--codeql-path <path>", "Path to a copy of the CodeQL CLI executable to use. Otherwise downloads a copy.")
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
.option("--tools-dir <dir>", "Directory to use for CodeQL tools and other files to store between runs. Default is a subdirectory of the home directory.")
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--debug", "Print more verbose output", false)
// This prevents a message like: error: unknown option '--trace-process-level'
// Remove this if commander.js starts supporting hidden options.
.allowUnknownOption()
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
try {
const tempDir = getTempDir(cmd.tempDir);
const toolsDir = getToolsDir(cmd.toolsDir);
// Wipe the temp dir
logger.info(`Cleaning temp directory ${tempDir}`);
fs.rmdirSync(tempDir, { recursive: true });
fs.mkdirSync(tempDir, { recursive: true });
const apiDetails = {
auth: cmd.githubAuth,
url: util_1.parseGithubUrl(cmd.githubUrl),
};
let codeql;
if (cmd.codeqlPath !== undefined) {
codeql = codeql_1.getCodeQL(cmd.codeqlPath);
}
else {
codeql = (await init_1.initCodeQL(undefined, apiDetails, tempDir, toolsDir, "runner", logger)).codeql;
}
const config = await init_1.initConfig(cmd.languages, cmd.queries, cmd.configFile, repository_1.parseRepositoryNwo(cmd.repository), tempDir, toolsDir, codeql, cmd.checkoutPath || process.cwd(), apiDetails, "runner", logger);
const tracerConfig = await init_1.runInit(codeql, config);
if (tracerConfig === undefined) {
return;
}
if (process.platform === "win32") {
await init_1.injectWindowsTracer(parseTraceProcessName(), parseTraceProcessLevel(), config, codeql, tracerConfig);
}
// Always output a json file of the env that can be consumed programmatically
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
fs.writeFileSync(jsonEnvFile, JSON.stringify(tracerConfig.env));
if (process.platform === "win32") {
const batEnvFile = path.join(config.tempDir, "codeql-env.bat");
const batEnvFileContents = Object.entries(tracerConfig.env)
.map(([key, value]) => `Set ${key}=${value}`)
.join("\n");
fs.writeFileSync(batEnvFile, batEnvFileContents);
const powershellEnvFile = path.join(config.tempDir, "codeql-env.sh");
const powershellEnvFileContents = Object.entries(tracerConfig.env)
.map(([key, value]) => `$env:${key}="${value}"`)
.join("\n");
fs.writeFileSync(powershellEnvFile, powershellEnvFileContents);
logger.info(`\nCodeQL environment output to "${jsonEnvFile}", "${batEnvFile}" and "${powershellEnvFile}". ` +
`Please export these variables to future processes so that CodeQL can monitor the build. ` +
`If using cmd/batch run "call ${batEnvFile}" ` +
`or if using PowerShell run "cat ${powershellEnvFile} | Invoke-Expression".`);
}
else {
// Assume that anything that's not windows is using a unix-style shell
const shEnvFile = path.join(config.tempDir, "codeql-env.sh");
const shEnvFileContents = Object.entries(tracerConfig.env)
// Some vars contain ${LIB} that we do not want to be expanded when executing this script
.map(([key, value]) => `export ${key}="${value.replace(/\$/g, "\\$")}"`)
.join("\n");
fs.writeFileSync(shEnvFile, shEnvFileContents);
logger.info(`\nCodeQL environment output to "${jsonEnvFile}" and "${shEnvFile}". ` +
`Please export these variables to future processes so that CodeQL can monitor the build, ` +
`for example by running ". ${shEnvFile}".`);
}
}
catch (e) {
logger.error("Init failed");
logger.error(e);
process.exitCode = 1;
}
});
program
.command("autobuild")
.description("Attempts to automatically build code")
.option("--language <language>", "The language to build. Otherwise will detect the dominant compiled language.")
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
.option("--debug", "Print more verbose output", false)
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
try {
const config = await config_utils_1.getConfig(getTempDir(cmd.tempDir), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. " +
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
}
importTracerEnvironment(config);
let language = undefined;
if (cmd.language !== undefined) {
language = languages_1.parseLanguage(cmd.language);
if (language === undefined || !config.languages.includes(language)) {
throw new Error(`"${cmd.language}" is not a recognised language. ` +
`Known languages in this project are ${config.languages.join(", ")}.`);
}
}
else {
language = autobuild_1.determineAutobuildLanguage(config, logger);
}
if (language !== undefined) {
await autobuild_1.runAutobuild(language, config, logger);
}
}
catch (e) {
logger.error("Autobuild failed");
logger.error(e);
process.exitCode = 1;
}
});
program
.command("analyze")
.description("Finishes extracting code and runs CodeQL queries")
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--no-upload", "Do not upload results after analysis.")
.option("--output-dir <dir>", "Directory to output SARIF files to. Default is in the temp directory.")
.option("--ram <ram>", "Amount of memory to use when running queries. Default is to use all available memory.")
.option("--no-add-snippets", "Specify whether to include code snippets in the sarif output.")
.option("--threads <threads>", "Number of threads to use when running queries. " +
"Default is to use all available cores.")
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
.option("--debug", "Print more verbose output", false)
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
try {
const tempDir = getTempDir(cmd.tempDir);
const outputDir = cmd.outputDir || path.join(tempDir, "codeql-sarif");
const config = await config_utils_1.getConfig(getTempDir(cmd.tempDir), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. " +
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
}
const apiDetails = {
auth: cmd.githubAuth,
url: util_1.parseGithubUrl(cmd.githubUrl),
};
await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, apiDetails, cmd.upload, "runner", outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), util_1.getThreadsFlag(cmd.threads, logger), config, logger);
}
catch (e) {
logger.error("Analyze failed");
logger.error(e);
process.exitCode = 1;
}
});
program
.command("upload")
.description("Uploads a SARIF file, or all SARIF files from a directory, to code scanning")
.requiredOption("--sarif-file <file>", "SARIF file to upload, or a directory containing multiple SARIF files. (Required)")
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--debug", "Print more verbose output", false)
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
const apiDetails = {
auth: cmd.githubAuth,
url: util_1.parseGithubUrl(cmd.githubUrl),
};
try {
await upload_lib.upload(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, apiDetails, "runner", logger);
}
catch (e) {
logger.error("Upload failed");
logger.error(e);
process.exitCode = 1;
}
});
program.parse(process.argv);
//# sourceMappingURL=runner.js.map

1
lib/runner.js.map Normal file

File diff suppressed because one or more lines are too long

76
lib/setup-tools.js generated
View File

@@ -1,76 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const toolcache = __importStar(require("@actions/tool-cache"));
const path = __importStar(require("path"));
const semver = __importStar(require("semver"));
class CodeQLSetup {
constructor(codeqlDist) {
this.dist = codeqlDist;
this.tools = path.join(this.dist, 'tools');
this.cmd = path.join(codeqlDist, 'codeql');
// TODO check process.arch ?
if (process.platform === 'win32') {
this.platform = 'win64';
if (this.cmd.endsWith('codeql')) {
this.cmd += ".exe";
}
}
else if (process.platform === 'linux') {
this.platform = 'linux64';
}
else if (process.platform === 'darwin') {
this.platform = 'osx64';
}
else {
throw new Error("Unsupported plaform: " + process.platform);
}
}
}
exports.CodeQLSetup = CodeQLSetup;
async function setupCodeQL() {
try {
const codeqlURL = core.getInput('tools', { required: true });
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL);
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
if (codeqlFolder) {
core.debug(`CodeQL found in cache ${codeqlFolder}`);
}
else {
const codeqlPath = await toolcache.downloadTool(codeqlURL);
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
}
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
}
catch (e) {
core.error(e);
throw new Error("Unable to download and extract CodeQL CLI");
}
}
exports.setupCodeQL = setupCodeQL;
function getCodeQLURLVersion(url) {
const match = url.match(/\/codeql-bundle-(.*)\//);
if (match === null || match.length < 2) {
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
}
let version = match[1];
if (!semver.valid(version)) {
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
version = '0.0.0-' + version;
}
const s = semver.clean(version);
if (!s) {
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
}
return s;
}
exports.getCodeQLURLVersion = getCodeQLURLVersion;
//# sourceMappingURL=setup-tools.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"setup-tools.js","sourceRoot":"","sources":["../src/setup-tools.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,+DAAiD;AACjD,2CAA6B;AAC7B,+CAAiC;AAEjC,MAAa,WAAW;IAMtB,YAAY,UAAkB;QAC5B,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAC3C,4BAA4B;QAC5B,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;YACxB,IAAI,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBAC/B,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC;aACpB;SACF;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YACvC,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;SAC3B;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACxC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;SACzB;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;SAC7D;IACH,CAAC;CACF;AAxBD,kCAwBC;AAEM,KAAK,UAAU,WAAW;IAC/B,IAAI;QACF,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;QAC7D,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAExD,IAAI,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,gBAAgB,CAAC,CAAC;QAC9D,IAAI,YAAY,EAAE;YAChB,IAAI,CAAC,KAAK,CAAC,yBAAyB,YAAY,EAAE,CAAC,CAAC;SACrD;aAAM;YACL,MAAM,UAAU,GAAG,MAAM,SAAS,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,eAAe,GAAG,MAAM,SAAS,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC/D,YAAY,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,eAAe,EAAE,QAAQ,EAAE,gBAAgB,CAAC,CAAC;SACtF;QACD,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAE3D;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACd,MAAM,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC;KAC9D;AACH,CAAC;AAnBD,kCAmBC;AAED,SAAgB,mBAAmB,CAAC,GAAW;IAE7C,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,CAAC,wBAAwB,CAAC,CAAC;IAClD,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,wBAAwB,GAAG,iCAAiC,CAAC,CAAC;KAC/E;IAED,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IAEvB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE;QAC1B,IAAI,CAAC,KAAK,CAAC,kBAAkB,OAAO,gEAAgE,OAAO,GAAG,CAAC,CAAC;QAChH,OAAO,GAAG,QAAQ,GAAG,OAAO,CAAC;KAC9B;IAED,MAAM,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;IAChC,IAAI,CAAC,CAAC,EAAE;QACN,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,iDAAiD,OAAO,UAAU,CAAC,CAAC;KAC/G;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AApBD,kDAoBC"}

View File

@@ -1,60 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolcache = __importStar(require("@actions/tool-cache"));
const ava_1 = __importDefault(require("ava"));
const nock_1 = __importDefault(require("nock"));
const path = __importStar(require("path"));
const setupTools = __importStar(require("./setup-tools"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.silenceDebugOutput(ava_1.default);
ava_1.default('download codeql bundle cache', async (t) => {
await util.withTmpDir(async (tmpDir) => {
process.env['GITHUB_WORKSPACE'] = tmpDir;
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
const versions = ['20200601', '20200610'];
for (let i = 0; i < versions.length; i++) {
const version = versions[i];
nock_1.default('https://example.com')
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
await setupTools.setupCodeQL();
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
}
const cachedVersions = toolcache.findAllVersions('CodeQL');
t.is(cachedVersions.length, 2);
});
});
ava_1.default('parse codeql bundle url version', t => {
const tests = {
'20200601': '0.0.0-20200601',
'20200601.0': '0.0.0-20200601.0',
'20200601.0.0': '20200601.0.0',
'1.2.3': '1.2.3',
'1.2.3-alpha': '1.2.3-alpha',
'1.2.3-beta.1': '1.2.3-beta.1',
};
for (const [version, expectedVersion] of Object.entries(tests)) {
const url = `https://github.com/.../codeql-bundle-${version}/...`;
try {
const parsedVersion = setupTools.getCodeQLURLVersion(url);
t.deepEqual(parsedVersion, expectedVersion);
}
catch (e) {
t.fail(e.message);
}
}
});
//# sourceMappingURL=setup-tools.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"setup-tools.test.js","sourceRoot":"","sources":["../src/setup-tools.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,0DAA4C;AAC5C,mDAAmD;AACnD,6CAA+B;AAE/B,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,UAAU,CAAC,WAAW,EAAE,CAAC;YAE/B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,UAAU,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YAC1D,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC"}

224
lib/setup-tracer.js generated
View File

@@ -1,224 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const util = __importStar(require("./util"));
const CRITICAL_TRACER_VARS = new Set(['SEMMLE_PRELOAD_libtrace',
,
'SEMMLE_RUNNER',
,
'SEMMLE_COPY_EXECUTABLES_ROOT',
,
'SEMMLE_DEPTRACE_SOCKET',
,
'SEMMLE_JAVA_TOOL_OPTIONS'
]);
async function tracerConfig(codeql, database, compilerSpec) {
const env = await codeql.getTracerEnv(database, compilerSpec);
const config = env['ODASA_TRACER_CONFIGURATION'];
const info = { spec: config, env: {} };
// Extract critical tracer variables from the environment
for (let entry of Object.entries(env)) {
const key = entry[0];
const value = entry[1];
// skip ODASA_TRACER_CONFIGURATION as it is handled separately
if (key === 'ODASA_TRACER_CONFIGURATION') {
continue;
}
// skip undefined values
if (typeof value === 'undefined') {
continue;
}
// Keep variables that do not exist in current environment. In addition always keep
// critical and CODEQL_ variables
if (typeof process.env[key] === 'undefined' || CRITICAL_TRACER_VARS.has(key) || key.startsWith('CODEQL_')) {
info.env[key] = value;
}
}
return info;
}
function concatTracerConfigs(configs) {
// A tracer config is a map containing additional environment variables and a tracer 'spec' file.
// A tracer 'spec' file has the following format [log_file, number_of_blocks, blocks_text]
// Merge the environments
const env = {};
let copyExecutables = false;
let envSize = 0;
for (const v of configs) {
for (let e of Object.entries(v.env)) {
const name = e[0];
const value = e[1];
// skip SEMMLE_COPY_EXECUTABLES_ROOT as it is handled separately
if (name === 'SEMMLE_COPY_EXECUTABLES_ROOT') {
copyExecutables = true;
}
else if (name in env) {
if (env[name] !== value) {
throw Error('Incompatible values in environment parameter ' +
name + ': ' + env[name] + ' and ' + value);
}
}
else {
env[name] = value;
envSize += 1;
}
}
}
// Concatenate spec files into a new spec file
let languages = Object.keys(configs);
const cppIndex = languages.indexOf('cpp');
// Make sure cpp is the last language, if it's present since it must be concatenated last
if (cppIndex !== -1) {
let lastLang = languages[languages.length - 1];
languages[languages.length - 1] = languages[cppIndex];
languages[cppIndex] = lastLang;
}
let totalLines = [];
let totalCount = 0;
for (let lang of languages) {
const lines = fs.readFileSync(configs[lang].spec, 'utf8').split(/\r?\n/);
const count = parseInt(lines[1], 10);
totalCount += count;
totalLines.push(...lines.slice(2));
}
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
const spec = path.resolve(tempFolder, 'compound-spec');
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
if (copyExecutables) {
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
envSize += 1;
}
fs.writeFileSync(spec, newSpecContent.join('\n'));
// Prepare the content of the compound environment file
let buffer = Buffer.alloc(4);
buffer.writeInt32LE(envSize, 0);
for (let e of Object.entries(env)) {
const key = e[0];
const value = e[1];
const lineBuffer = new Buffer(key + '=' + value + '\0', 'utf8');
const sizeBuffer = Buffer.alloc(4);
sizeBuffer.writeInt32LE(lineBuffer.length, 0);
buffer = Buffer.concat([buffer, sizeBuffer, lineBuffer]);
}
// Write the compound environment
const envPath = spec + '.environment';
fs.writeFileSync(envPath, buffer);
return { env, spec };
}
async function sendSuccessStatusReport(startedAt, config) {
const statusReportBase = await util.createStatusReportBase('init', 'success', startedAt);
const languages = config.languages.join(',');
const workflowLanguages = core.getInput('languages', { required: false });
const paths = (config.originalUserInput.paths || []).join(',');
const pathsIgnore = (config.originalUserInput['paths-ignore'] || []).join(',');
const disableDefaultQueries = config.originalUserInput['disable-default-queries'] ? languages : '';
const queries = (config.originalUserInput.queries || []).map(q => q.uses).join(',');
const statusReport = {
...statusReportBase,
languages: languages,
workflow_languages: workflowLanguages,
paths: paths,
paths_ignore: pathsIgnore,
disable_default_queries: disableDefaultQueries,
queries: queries,
};
await util.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
let config;
let codeql;
try {
util.prepareLocalRunEnvironment();
if (!await util.sendStatusReport(await util.createStatusReportBase('init', 'starting', startedAt), true)) {
return;
}
core.startGroup('Setup CodeQL tools');
codeql = await codeql_1.setupCodeQL();
await codeql.printVersion();
core.endGroup();
core.startGroup('Load language configuration');
config = await configUtils.initConfig();
analysisPaths.includeAndExcludeAnalysisPaths(config);
core.endGroup();
}
catch (e) {
core.setFailed(e.message);
console.log(e);
await util.sendStatusReport(await util.createStatusReportBase('init', 'aborted', startedAt, e.message));
return;
}
try {
const sourceRoot = path.resolve();
// Forward Go flags
const goFlags = process.env['GOFLAGS'];
if (goFlags) {
core.exportVariable('GOFLAGS', goFlags);
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
}
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
core.exportVariable('CODEQL_RAM', codeqlRam);
const databaseFolder = util.getCodeQLDatabasesDir();
fs.mkdirSync(databaseFolder, { recursive: true });
let tracedLanguageConfigs = [];
// TODO: replace this code once CodeQL supports multi-language tracing
for (let language of config.languages) {
const languageDatabase = path.join(databaseFolder, language);
// Init language database
await codeql.databaseInit(languageDatabase, language, sourceRoot);
// TODO: add better detection of 'traced languages' instead of using a hard coded list
if (codeql_1.isTracedLanguage(language)) {
const config = await tracerConfig(codeql, languageDatabase);
tracedLanguageConfigs.push(config);
}
}
if (tracedLanguageConfigs.length > 0) {
const mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs);
if (mainTracerConfig.spec) {
for (let entry of Object.entries(mainTracerConfig.env)) {
core.exportVariable(entry[0], entry[1]);
}
core.exportVariable('ODASA_TRACER_CONFIGURATION', mainTracerConfig.spec);
if (process.platform === 'darwin') {
core.exportVariable('DYLD_INSERT_LIBRARIES', path.join(codeql.getDir(), 'tools', 'osx64', 'libtrace.dylib'));
}
else if (process.platform === 'win32') {
await exec.exec('powershell', [
path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
path.resolve(codeql.getDir(), 'tools', 'win64', 'tracer.exe'),
], { env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
}
else {
core.exportVariable('LD_PRELOAD', path.join(codeql.getDir(), 'tools', 'linux64', '${LIB}trace.so'));
}
}
}
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await util.sendStatusReport(await util.createStatusReportBase('init', 'failure', startedAt, error.message, error.stack));
return;
}
await sendSuccessStatusReport(startedAt, config);
}
run().catch(e => {
core.setFailed("init action failed: " + e);
console.log(e);
});
//# sourceMappingURL=setup-tracer.js.map

File diff suppressed because one or more lines are too long

View File

@@ -1,10 +1,10 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
// The time at which the first action (normally init) started executing.
// If a workflow invokes a different action without first invoking the init
// action (i.e. the upload action is being used by a third-party integrator)
// then this variable will be assigned the start time of the action invoked
// rather that the init action.
exports.CODEQL_WORKFLOW_STARTED_AT = 'CODEQL_WORKFLOW_STARTED_AT';
exports.CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
//# sourceMappingURL=shared-environment.js.map

22
lib/test-utils.js generated
View File

@@ -1,22 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function silenceDebugOutput(test) {
const typedTest = test;
typedTest.beforeEach(t => {
const processStdoutWrite = process.stdout.write.bind(process.stdout);
t.context.write = processStdoutWrite;
process.stdout.write = (str, encoding, cb) => {
// Core library will directly call process.stdout.write for commands
// We don't want :: commands to be executed by the runner during tests
if (!str.match(/^::/)) {
processStdoutWrite(str, encoding, cb);
}
return true;
};
});
typedTest.afterEach(t => {
process.stdout.write = t.context.write;
});
}
exports.silenceDebugOutput = silenceDebugOutput;
//# sourceMappingURL=test-utils.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"test-utils.js","sourceRoot":"","sources":["../src/test-utils.ts"],"names":[],"mappings":";;AAEA,SAAgB,kBAAkB,CAAC,IAAwB;IACzD,MAAM,SAAS,GAAG,IAAmC,CAAC;IAEtD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACrB,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,KAAK,GAAG,kBAAkB,CAAC;QACrC,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,GAAQ,EAAE,QAAc,EAAE,EAA0B,EAAE,EAAE;YAC5E,oEAAoE;YACpE,sEAAsE;YACtE,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACnB,kBAAkB,CAAC,GAAG,EAAE,QAAQ,EAAE,EAAE,CAAC,CAAC;aACzC;YACD,OAAO,IAAI,CAAC;QAChB,CAAC,CAAC;IACN,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE;QACpB,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC;IAC3C,CAAC,CAAC,CAAC;AACL,CAAC;AAnBD,gDAmBC"}

15
lib/testing-utils.js generated
View File

@@ -19,19 +19,19 @@ function wrapOutput(context) {
// write(str: Uint8Array | string, encoding?: string, cb?: (err?: Error) => void): boolean;
return (chunk, encoding, cb) => {
// Work out which method overload we are in
if (cb === undefined && typeof encoding === 'function') {
if (cb === undefined && typeof encoding === "function") {
cb = encoding;
encoding = undefined;
}
// Record the output
if (typeof chunk === 'string') {
if (typeof chunk === "string") {
context.testOutput += chunk;
}
else {
context.testOutput += new TextDecoder(encoding || 'utf-8').decode(chunk);
context.testOutput += new TextDecoder(encoding || "utf-8").decode(chunk);
}
// Satisfy contract by calling callback when done
if (cb !== undefined && typeof cb === 'function') {
if (cb !== undefined && typeof cb === "function") {
cb();
}
return true;
@@ -39,7 +39,7 @@ function wrapOutput(context) {
}
function setupTests(test) {
const typedTest = test;
typedTest.beforeEach(t => {
typedTest.beforeEach((t) => {
// Set an empty CodeQL object so that all method calls will fail
// unless the test explicitly sets one up.
CodeQL.setCodeQL({});
@@ -56,11 +56,8 @@ function setupTests(test) {
// process.env only has strings fields, so a shallow copy is fine.
t.context.env = {};
Object.assign(t.context.env, process.env);
// Any test that runs code that expects to only be run on actions
// will depend on various environment variables.
process.env['GITHUB_API_URL'] = 'https://github.localhost/api/v3';
});
typedTest.afterEach.always(t => {
typedTest.afterEach.always((t) => {
// Restore stdout and stderr
// The captured output is only replayed if the test failed
process.stdout.write = t.context.stdoutWrite;

View File

@@ -1 +1 @@
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AACA,kDAA0B;AAE1B,iDAAmC;AAInC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CAAC,KAA0B,EAAE,QAAiB,EAAE,EAA0B,EAAW,EAAE;QAC5F,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACvB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;QAE1C,iEAAiE;QACjE,gDAAgD;QAChD,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,iCAAiC,CAAC;IACpE,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;QAC7B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,eAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AA3CD,gCA2CC"}
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AACA,kDAA0B;AAE1B,iDAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,eAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC"}

87
lib/toolrunner-error-catcher.js generated Normal file
View File

@@ -0,0 +1,87 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
/**
* Wrapper for toolrunner.Toolrunner which checks for specific return code and/or regex matches in console output.
* Output will be streamed to the live console as well as captured for subsequent processing.
* Returns promise with return code
*
* @param commandLine command to execute
* @param args optional arguments for tool. Escaping is handled by the lib.
* @param matchers defines specific codes and/or regexes that should lead to return of a custom error
* @param options optional exec options. See ExecOptions
* @returns Promise<number> exit code
*/
async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
var _a, _b, _c;
let stdout = "";
let stderr = "";
const listeners = {
stdout: (data) => {
var _a, _b;
stdout += data.toString();
if (((_b = (_a = options) === null || _a === void 0 ? void 0 : _a.listeners) === null || _b === void 0 ? void 0 : _b.stdout) !== undefined) {
options.listeners.stdout(data);
}
else {
// if no stdout listener was originally defined then we match default behavior of Toolrunner
process.stdout.write(data);
}
},
stderr: (data) => {
var _a, _b;
stderr += data.toString();
if (((_b = (_a = options) === null || _a === void 0 ? void 0 : _a.listeners) === null || _b === void 0 ? void 0 : _b.stderr) !== undefined) {
options.listeners.stderr(data);
}
else {
// if no stderr listener was originally defined then we match default behavior of Toolrunner
process.stderr.write(data);
}
},
};
// we capture the original return code or error so that if no match is found we can duplicate the behavior
let returnState;
try {
returnState = await new toolrunner.ToolRunner(await safeWhich.safeWhich(commandLine), args, {
...options,
listeners,
ignoreReturnCode: true,
}).exec();
}
catch (e) {
returnState = e;
}
// if there is a zero return code then we do not apply the matchers
if (returnState === 0)
return returnState;
if (matchers) {
for (const matcher of matchers) {
if (matcher.exitCode === returnState || ((_a = matcher.outputRegex) === null || _a === void 0 ? void 0 : _a.test(stderr)) || ((_b = matcher.outputRegex) === null || _b === void 0 ? void 0 : _b.test(stdout))) {
throw new Error(matcher.message);
}
}
}
if (typeof returnState === "number") {
// only if we were instructed to ignore the return code do we ever return it non-zero
if ((_c = options) === null || _c === void 0 ? void 0 : _c.ignoreReturnCode) {
return returnState;
}
else {
throw new Error(`The process '${commandLine}' failed with exit code ${returnState}`);
}
}
else {
throw returnState;
}
}
exports.toolrunnerErrorCatcher = toolrunnerErrorCatcher;
//# sourceMappingURL=toolrunner-error-catcher.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,aAAA,OAAO,0CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,aAAA,OAAO,0CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI;SACvB,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,CAAC;KACjB;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW,WAChC,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,EAAC,WACjC,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,EAAC,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,UAAI,OAAO,0CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAzED,wDAyEC"}

145
lib/toolrunner-error-catcher.test.js generated Normal file
View File

@@ -0,0 +1,145 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const exec = __importStar(require("@actions/exec"));
const ava_1 = __importDefault(require("ava"));
const testing_utils_1 = require("./testing-utils");
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
testing_utils_1.setupTests(ava_1.default);
ava_1.default("matchers are never applied if non-error exit", async (t) => {
const testArgs = buildDummyArgs("foo bar\\nblort qux", "foo bar\\nblort qux", "", 0);
const matchers = [
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "error!!!" },
];
t.deepEqual(await exec.exec("node", testArgs), 0);
t.deepEqual(await toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), 0);
});
ava_1.default("regex matchers are applied to stdout for non-zero exit code", async (t) => {
const testArgs = buildDummyArgs("foo bar\\nblort qux", "", "", 1);
const matchers = [
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
];
await t.throwsAsync(exec.exec("node", testArgs), {
instanceOf: Error,
message: "The process 'node' failed with exit code 1",
});
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
instanceOf: Error,
message: "🦄",
});
});
ava_1.default("regex matchers are applied to stderr for non-zero exit code", async (t) => {
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
const matchers = [
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
];
await t.throwsAsync(exec.exec("node", testArgs), {
instanceOf: Error,
message: "The process 'node' failed with exit code 1",
});
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
instanceOf: Error,
message: "🦄",
});
});
ava_1.default("matcher returns correct error message when multiple matchers defined", async (t) => {
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
const matchers = [
{ exitCode: 456, outputRegex: new RegExp("lorem ipsum"), message: "😩" },
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
{ exitCode: 789, outputRegex: new RegExp("blah blah"), message: "🤦‍♂️" },
];
await t.throwsAsync(exec.exec("node", testArgs), {
instanceOf: Error,
message: "The process 'node' failed with exit code 1",
});
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
instanceOf: Error,
message: "🦄",
});
});
ava_1.default("matcher returns first match to regex when multiple matches", async (t) => {
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
const matchers = [
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
{ exitCode: 789, outputRegex: new RegExp("blah blah"), message: "🤦‍♂️" },
{ exitCode: 987, outputRegex: new RegExp("foo bar"), message: "🚫" },
];
await t.throwsAsync(exec.exec("node", testArgs), {
instanceOf: Error,
message: "The process 'node' failed with exit code 1",
});
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
instanceOf: Error,
message: "🦄",
});
});
ava_1.default("exit code matchers are applied", async (t) => {
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 123);
const matchers = [
{
exitCode: 123,
outputRegex: new RegExp("this will not match"),
message: "🦄",
},
];
await t.throwsAsync(exec.exec("node", testArgs), {
instanceOf: Error,
message: "The process 'node' failed with exit code 123",
});
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
instanceOf: Error,
message: "🦄",
});
});
ava_1.default("execErrorCatcher respects the ignoreReturnValue option", async (t) => {
const testArgs = buildDummyArgs("standard output", "error output", "", 199);
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, [], { ignoreReturnCode: false }), { instanceOf: Error });
t.deepEqual(await toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, [], {
ignoreReturnCode: true,
}), 199);
});
ava_1.default("execErrorCatcher preserves behavior of provided listeners", async (t) => {
const stdoutExpected = "standard output";
const stderrExpected = "error output";
let stdoutActual = "";
let stderrActual = "";
const listeners = {
stdout: (data) => {
stdoutActual += data.toString();
},
stderr: (data) => {
stderrActual += data.toString();
},
};
const testArgs = buildDummyArgs(stdoutExpected, stderrExpected, "", 0);
t.deepEqual(await toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, [], {
listeners,
}), 0);
t.deepEqual(stdoutActual, `${stdoutExpected}\n`);
t.deepEqual(stderrActual, `${stderrExpected}\n`);
});
function buildDummyArgs(stdoutContents, stderrContents, desiredErrorMessage, desiredExitCode) {
let command = "";
if (stdoutContents)
command += `console.log("${stdoutContents}");`;
if (stderrContents)
command += `console.error("${stderrContents}");`;
if (command.length === 0)
throw new Error("Must provide contents for either stdout or stderr");
if (desiredErrorMessage)
command += `throw new Error("${desiredErrorMessage}");`;
if (desiredExitCode)
command += `process.exitCode = ${desiredExitCode};`;
return ["-e", command];
}
//# sourceMappingURL=toolrunner-error-catcher.test.js.map

File diff suppressed because one or more lines are too long

147
lib/tracer-config.js generated Normal file
View File

@@ -0,0 +1,147 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const languages_1 = require("./languages");
const util = __importStar(require("./util"));
const CRITICAL_TRACER_VARS = new Set([
"SEMMLE_PRELOAD_libtrace",
"SEMMLE_RUNNER",
"SEMMLE_COPY_EXECUTABLES_ROOT",
"SEMMLE_DEPTRACE_SOCKET",
"SEMMLE_JAVA_TOOL_OPTIONS",
]);
async function getTracerConfigForLanguage(codeql, config, language) {
const env = await codeql.getTracerEnv(util.getCodeQLDatabasePath(config.tempDir, language));
const spec = env["ODASA_TRACER_CONFIGURATION"];
const info = { spec, env: {} };
// Extract critical tracer variables from the environment
for (const entry of Object.entries(env)) {
const key = entry[0];
const value = entry[1];
// skip ODASA_TRACER_CONFIGURATION as it is handled separately
if (key === "ODASA_TRACER_CONFIGURATION") {
continue;
}
// skip undefined values
if (typeof value === "undefined") {
continue;
}
// Keep variables that do not exist in current environment. In addition always keep
// critical and CODEQL_ variables
if (typeof process.env[key] === "undefined" ||
CRITICAL_TRACER_VARS.has(key) ||
key.startsWith("CODEQL_")) {
info.env[key] = value;
}
}
return info;
}
exports.getTracerConfigForLanguage = getTracerConfigForLanguage;
function concatTracerConfigs(tracerConfigs, config) {
// A tracer config is a map containing additional environment variables and a tracer 'spec' file.
// A tracer 'spec' file has the following format [log_file, number_of_blocks, blocks_text]
// Merge the environments
const env = {};
let copyExecutables = false;
let envSize = 0;
for (const v of Object.values(tracerConfigs)) {
for (const e of Object.entries(v.env)) {
const name = e[0];
const value = e[1];
// skip SEMMLE_COPY_EXECUTABLES_ROOT as it is handled separately
if (name === "SEMMLE_COPY_EXECUTABLES_ROOT") {
copyExecutables = true;
}
else if (name in env) {
if (env[name] !== value) {
throw Error(`Incompatible values in environment parameter ${name}: ${env[name]} and ${value}`);
}
}
else {
env[name] = value;
envSize += 1;
}
}
}
// Concatenate spec files into a new spec file
const languages = Object.keys(tracerConfigs);
const cppIndex = languages.indexOf("cpp");
// Make sure cpp is the last language, if it's present since it must be concatenated last
if (cppIndex !== -1) {
const lastLang = languages[languages.length - 1];
languages[languages.length - 1] = languages[cppIndex];
languages[cppIndex] = lastLang;
}
const totalLines = [];
let totalCount = 0;
for (const lang of languages) {
const lines = fs
.readFileSync(tracerConfigs[lang].spec, "utf8")
.split(/\r?\n/);
const count = parseInt(lines[1], 10);
totalCount += count;
totalLines.push(...lines.slice(2));
}
const newLogFilePath = path.resolve(config.tempDir, "compound-build-tracer.log");
const spec = path.resolve(config.tempDir, "compound-spec");
const compoundTempFolder = path.resolve(config.tempDir, "compound-temp");
const newSpecContent = [
newLogFilePath,
totalCount.toString(10),
...totalLines,
];
if (copyExecutables) {
env["SEMMLE_COPY_EXECUTABLES_ROOT"] = compoundTempFolder;
envSize += 1;
}
fs.writeFileSync(spec, newSpecContent.join("\n"));
// Prepare the content of the compound environment file
let buffer = Buffer.alloc(4);
buffer.writeInt32LE(envSize, 0);
for (const e of Object.entries(env)) {
const key = e[0];
const value = e[1];
const lineBuffer = Buffer.from(`${key}=${value}\0`, "utf8");
const sizeBuffer = Buffer.alloc(4);
sizeBuffer.writeInt32LE(lineBuffer.length, 0);
buffer = Buffer.concat([buffer, sizeBuffer, lineBuffer]);
}
// Write the compound environment
const envPath = `${spec}.environment`;
fs.writeFileSync(envPath, buffer);
return { env, spec };
}
exports.concatTracerConfigs = concatTracerConfigs;
async function getCombinedTracerConfig(config, codeql) {
// Abort if there are no traced languages as there's nothing to do
const tracedLanguages = config.languages.filter(languages_1.isTracedLanguage);
if (tracedLanguages.length === 0) {
return undefined;
}
// Get all the tracer configs and combine them together
const tracedLanguageConfigs = {};
for (const language of tracedLanguages) {
tracedLanguageConfigs[language] = await getTracerConfigForLanguage(codeql, config, language);
}
const mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
// Add a couple more variables
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
const codeQLDir = path.dirname(codeql.getPath());
if (process.platform === "darwin") {
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
}
else if (process.platform !== "win32") {
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
}
return mainTracerConfig;
}
exports.getCombinedTracerConfig = getCombinedTracerConfig;
//# sourceMappingURL=tracer-config.js.map

Some files were not shown because too many files have changed in this diff Show More