Compare commits

...

88 Commits

Author SHA1 Message Date
Chris Gavin
0dc37c7260 Merge pull request #377 from github/fix-version-in-message
Fix the formatting of a warning message.
2021-01-26 17:43:05 +00:00
Chris Gavin
f109c77463 Merge branch 'main' into fix-version-in-message 2021-01-26 17:24:58 +00:00
Chris Gavin
601dc8486f Merge pull request #376 from github/fix-deduplication-sources
Fix deduplication of bundle download sources.
2021-01-26 17:22:03 +00:00
Chris Gavin
d182a0e3aa Fix deduplication of bundle download sources. 2021-01-26 16:56:43 +00:00
Chris Gavin
5261491807 Fix the formatting of a warning message. 2021-01-26 16:52:43 +00:00
Chris Gavin
24872f608c Merge pull request #374 from github/automatic-upload-debug-logs
Upload debug logs automatically when `ACTIONS_STEP_DEBUG` is enabled.
2021-01-26 10:11:17 +00:00
Chris Gavin
94b32884f9 Dump logs to stdout instead of stderr to avoid synchronization problems. 2021-01-26 09:47:31 +00:00
Chris Gavin
8705aaff32 Walk log filex manually rather than using the Actions globber. 2021-01-26 09:21:38 +00:00
Chris Gavin
219142571c Upload debug logs automatically when ACTIONS_STEP_DEBUG is enabled. 2021-01-25 15:44:31 +00:00
Simon Engledew
7a340d32a1 Merge pull request #370 from github/simon-engledew/hide-workflow-not-found
Do not warn users if a workflow cannot be read
2021-01-25 09:21:11 +00:00
Simon Engledew
ee4d06713e Fix wonky comment 2021-01-22 14:08:43 +00:00
Simon Engledew
6be1f5ce0e Add more granular exception handling 2021-01-22 13:52:04 +00:00
Simon Engledew
8a9922df92 Disambiguate report messages 2021-01-22 13:52:04 +00:00
Simon Engledew
795b1923ec Swap method naming to account for new functionality 2021-01-22 13:52:03 +00:00
Simon Engledew
28e2860afb Correctly report WorkflowMissing 2021-01-22 13:51:45 +00:00
Simon Engledew
4547749a2f Do not warn users if a workflow cannot be read
This will still send a LintFailed message status report.
2021-01-22 13:51:19 +00:00
Robert
484a9ad67e Merge pull request #368 from github/robertbrignull/external-token-fix
Only insert external repos token if supplied
2021-01-22 12:50:17 +00:00
Robert
1013277382 Merge branch 'main' into robertbrignull/external-token-fix 2021-01-22 12:08:26 +00:00
Robert
504cb5e7a2 Merge pull request #371 from github/robertbrignull/pr-checks-workflow
Move .github/workflows back where it was
2021-01-21 18:26:50 +00:00
Robert
cfdf2eaf7a move .github/workflows back where it was 2021-01-21 16:06:26 +00:00
Robert
a1bfa7609f Merge pull request #369 from felicitymay/patch-1
Update description to include limit on number of results
2021-01-19 17:57:15 +00:00
Felicity Chapman
a3a8231e64 Update upload-sarif/action.yml
Co-authored-by: hubwriter <hubwriter@github.com>
2021-01-19 17:17:45 +00:00
Felicity Chapman
d0ac97e33f Add results limit to description 2021-01-19 17:02:31 +00:00
Robert
cb574a7d60 only insert external repos token if supplied 2021-01-19 15:42:57 +00:00
Robert
b0adc415a0 Merge pull request #364 from ericcornelissen/eslint/no-unused-vars
Update code so "@typescript-eslint/no-unused-vars" passes
2021-01-18 10:01:59 +00:00
Eric Cornelissen
946779f5b6 Run npm run-script build 2021-01-15 18:40:06 +01:00
Eric Cornelissen
9a753aa409 Removed unused vars in config-utils.test.ts 2021-01-15 18:25:29 +01:00
Eric Cornelissen
2a6d6c52d7 Update ESLint configuration
Remove the line to disable the "github/no-unused-vars"  rule.
2021-01-15 18:20:07 +01:00
Robin Neatherway
8659fb33f9 Merge pull request #362 from github/rneatherway/merge-workflows
Merge pr-check and integration-testing workflows
2021-01-15 16:26:34 +00:00
Robin Neatherway
137e614f23 Merge branch 'main' into rneatherway/merge-workflows 2021-01-15 15:59:36 +00:00
Simon Engledew
4bdcd08344 Merge pull request #363 from github/simon-engledew/fix-incorrect-branch-warning
Fix overzealous warnings when PR scanning is not required
2021-01-15 10:59:19 +00:00
Simon Engledew
b6fc7138bf Merge branch 'main' into simon-engledew/fix-incorrect-branch-warning 2021-01-15 10:41:33 +00:00
Simon Engledew
1a6f6a27b3 Implement PR feedback 2021-01-15 08:28:21 +00:00
Robert
f86e200d13 Merge pull request #357 from github/robertbrignull/external-token-option
Introduce external repository token
2021-01-14 17:41:30 +00:00
Robert
c8abbce0a2 Merge branch 'main' into robertbrignull/external-token-option 2021-01-14 17:27:33 +00:00
Robin Neatherway
3d63fa4dad Merge pull request #361 from github/rneatherway/status-base
Correct typo in setting of PR upload base ref
2021-01-14 10:28:34 +00:00
Simon Engledew
0853901c0d Fix overzealous warning when PR scanning is not required 2021-01-13 12:28:24 +00:00
Robin Neatherway
369cad8272 Use the fully qualified ref name 2021-01-13 12:16:10 +00:00
Robin Neatherway
9ace6974f2 Merge pr-check and integration-testing workflows
These had some minor overlap checking that the JS is up to date and
there isn't any benefit in having them separate as the jobs are run in
parallel anyway.
2021-01-13 12:10:34 +00:00
Robin Neatherway
884ee1d129 Correct typo in setting upload base ref 2021-01-13 12:00:33 +00:00
Robin Neatherway
a7f3c648eb Update test to check base ref/sha values 2021-01-13 11:57:55 +00:00
Robert
087e7a3a1a Merge pull request #360 from github/robertbrignull/preload_tracer_env_var
Output path to preload_tracer in env file
2021-01-13 10:15:31 +00:00
Robert
97a70e6013 include missing changes 2021-01-12 12:09:22 +00:00
Robert
90d1a31dd4 Introduce external repository token 2021-01-12 12:07:03 +00:00
Robert
70733e4ae5 add call operator 2021-01-12 11:44:25 +00:00
Robert
a432f684f7 Use CODEQL_RUNNER env var in integration tests 2021-01-12 11:29:42 +00:00
Robert
2f9814894f Add tests of CODEQL_RUNNER env var 2021-01-12 11:29:30 +00:00
Robert
c796788c33 move CODEQL_RUNNER population to tracer-config.ts 2021-01-12 10:49:08 +00:00
Robert
cd2eafc8e3 switch to using runner instead of preload_tracer 2021-01-12 10:43:06 +00:00
Robert
5a03a14bfb use CODEQL_PLATFORM 2021-01-12 10:24:25 +00:00
Robert
dbd8007298 Output path to preload_tracer 2021-01-11 11:15:53 +00:00
Sam Partington
a0c4707dcc Merge pull request #323 from github/split-upload-method
Split upload method into two mode-specific ones
2021-01-06 12:02:34 +00:00
Sam Partington
c7275a75ce Don't re-retrieve GitHub version when already have it
54e0c67332 (r45693026)
54e0c67332 (r45693030)
2021-01-06 11:40:16 +00:00
Sam Partington
023add5df0 Fix linter violations 2021-01-06 11:19:50 +00:00
Sam Partington
4e46a490ae Merge branch 'main' into split-upload-method 2021-01-06 11:13:51 +00:00
Sam Partington
54e0c67332 Revert "Retrieve GitHubVersion when used rather than passing around"
This reverts commit 6de1b753c2.

https://github.com/github/codeql-action/pull/323#discussion_r551960301
2021-01-06 11:06:01 +00:00
Sam Partington
4bc186cf34 Move type definition to where it's used
https://github.com/github/codeql-action/pull/323#discussion_r551951925
2021-01-06 10:58:32 +00:00
Sam Partington
1da4ce5a03 Ensure stats get sent when not uploading results
https://github.com/github/codeql-action/pull/323#discussion_r551948732
2021-01-06 10:52:16 +00:00
Robert
20d8f91819 Merge pull request #358 from github/robertbrignull/recursive_sarif_test
Add test of finding SARIF files recursively
2021-01-04 15:34:11 +00:00
Robert
3792ed8ceb Merge branch 'main' into robertbrignull/recursive_sarif_test 2021-01-04 15:15:44 +00:00
Simon Engledew
b1e0b46970 Merge pull request #348 from github/simon-engledew/checkout-wrong-heads
Only report the first CheckoutWrongHead lint error
2021-01-04 14:19:05 +00:00
Simon Engledew
034bf318b8 Merge branch 'main' into simon-engledew/checkout-wrong-heads 2021-01-04 13:37:11 +00:00
Robert
bd4e3adfd9 Add test of finding SARIF files recursively 2021-01-04 13:12:30 +00:00
Robert
230cb9b734 Merge pull request #352 from chao2zhang/main
Support find .sarif files recursively
2021-01-04 12:58:36 +00:00
Simon Engledew
456cd431ff Improve test coverage 2021-01-04 12:05:37 +00:00
Simon Engledew
1511db33b3 Only check the steps of the job currently being run 2021-01-04 12:00:15 +00:00
Chao Zhang
55eae6652f Support find .sarif files recursively 2020-12-22 23:24:51 -08:00
Chris Gavin
094554cf89 Merge pull request #349 from github/prevent-accidental-analysis-of-get-pip
Prevent accidental analysis of the `get-pip.py` script.
2020-12-22 14:48:15 +00:00
Chris Gavin
3c494fdd7a Prevent accidental analysis of the get-pip.py script. 2020-12-22 14:16:57 +00:00
Sam Partington
6de1b753c2 Retrieve GitHubVersion when used rather than passing around 2020-12-22 11:49:42 +00:00
Sam Partington
45dd5ee97d Fix linter violations 2020-12-22 11:28:08 +00:00
Sam Partington
82a8fa443e Merge branch 'main' into split-upload-method 2020-12-22 11:23:49 +00:00
Simon Engledew
e89a24b8cb Only report the first CheckoutWrongHead lint error 2020-12-21 11:08:49 +00:00
Simon Engledew
dc999c55d0 Merge pull request #346 from github/simon-engledew/fix-numerical-branches
Fix for numerical branch names
2020-12-21 09:17:09 +00:00
Simon Engledew
2d00e8c6f7 Fix for numerical branch names 2020-12-18 10:23:46 +00:00
Robin Neatherway
9f7bdecc04 Merge pull request #341 from github/rneatherway/gate-integration-testing
Gate execution of expensive tests on JS being up to date
2020-12-16 11:06:41 +00:00
Robin Neatherway
cea5932aad Merge branch 'main' into rneatherway/gate-integration-testing 2020-12-15 18:16:25 +00:00
Robert
cbd120ea91 Merge pull request #343 from github/dependabot/npm_and_yarn/ini-1.3.8
Bump ini from 1.3.5 to 1.3.8
2020-12-14 10:42:35 +00:00
Robert
71c7759fac update node_modules 2020-12-14 10:25:05 +00:00
Robert
9435055597 Merge branch 'main' into dependabot/npm_and_yarn/ini-1.3.8 2020-12-14 10:24:17 +00:00
Robert
5d77983efc Merge pull request #344 from github/dependabot/npm_and_yarn/runner/ini-1.3.8
Bump ini from 1.3.5 to 1.3.8 in /runner
2020-12-14 10:23:46 +00:00
dependabot[bot]
1fd28a0d4c Bump ini from 1.3.5 to 1.3.8 in /runner
Bumps [ini](https://github.com/isaacs/ini) from 1.3.5 to 1.3.8.
- [Release notes](https://github.com/isaacs/ini/releases)
- [Commits](https://github.com/isaacs/ini/compare/v1.3.5...v1.3.8)

Signed-off-by: dependabot[bot] <support@github.com>
2020-12-13 05:06:37 +00:00
dependabot[bot]
46c74bba1d Bump ini from 1.3.5 to 1.3.8
Bumps [ini](https://github.com/isaacs/ini) from 1.3.5 to 1.3.8.
- [Release notes](https://github.com/isaacs/ini/releases)
- [Commits](https://github.com/isaacs/ini/compare/v1.3.5...v1.3.8)

Signed-off-by: dependabot[bot] <support@github.com>
2020-12-12 17:32:00 +00:00
Robin Neatherway
3e176f8293 Gate execution of expensive tests on JS being up to date 2020-12-08 16:53:35 +00:00
Sam Partington
ff28c8d403 Move uploading side-effect out of runAnalyze
https://github.com/github/codeql-action/pull/323#discussion_r530978010
2020-11-27 12:27:10 +00:00
Sam Partington
9532bda6e4 Use better comparison operator 2020-11-26 11:57:34 +00:00
Sam Partington
57514f31db Split upload method into two mode-specific ones 2020-11-26 11:50:57 +00:00
Sam Partington
7ae9b0db35 Extract filepath retrieval to a method 2020-11-26 11:18:00 +00:00
70 changed files with 7484 additions and 1194 deletions

View File

@@ -44,7 +44,6 @@
"@typescript-eslint/no-unsafe-call": "off", "@typescript-eslint/no-unsafe-call": "off",
"@typescript-eslint/no-unsafe-member-access": "off", "@typescript-eslint/no-unsafe-member-access": "off",
"@typescript-eslint/no-unsafe-return": "off", "@typescript-eslint/no-unsafe-return": "off",
"@typescript-eslint/no-unused-vars": "off",
"@typescript-eslint/no-var-requires": "off", "@typescript-eslint/no-var-requires": "off",
"@typescript-eslint/prefer-regexp-exec": "off", "@typescript-eslint/prefer-regexp-exec": "off",
"@typescript-eslint/require-await": "off", "@typescript-eslint/require-await": "off",

View File

@@ -1,507 +0,0 @@
name: "Integration Testing"
on:
push:
branches: [main, v1]
pull_request:
jobs:
multi-language-repo_test-autodetect-languages:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- uses: ./../action/init
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
- run: |
cd "$RUNNER_TEMP/codeql_databases"
# List all directories as there will be precisely one directory per database
# but there may be other files in this directory such as query suites.
if [ "$(ls -d */ | wc -l)" != 6 ] || \
[[ ! -d cpp ]] || \
[[ ! -d csharp ]] || \
[[ ! -d go ]] || \
[[ ! -d java ]] || \
[[ ! -d javascript ]] || \
[[ ! -d python ]]; then
echo "Did not find expected number of databases. Database dir contains: $(ls)"
exit 1
fi
multi-language-repo_test-custom-queries-and-remote-config:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
tools: [~, latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- uses: ./../action/init
with:
tools: ${{ matrix.tools }}
languages: cpp,csharp,java,javascript,python
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
# Currently is not possible to analyze Go in conjunction with other languages in macos
multi-language-repo_test-go-custom-queries:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/setup-go@v2
if: ${{ matrix.os == 'macos-latest' }}
with:
go-version: '^1.13.1'
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- uses: ./../action/init
with:
languages: go
config-file: ./.github/codeql/custom-queries.yml
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
go-custom-tracing:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
env:
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
steps:
- uses: actions/setup-go@v2
if: ${{ matrix.os == 'macos-latest' }}
with:
go-version: '^1.13.1'
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- uses: ./../action/init
with:
languages: go
- name: Build code
shell: bash
run: go build main.go
- uses: ./../action/analyze
env:
TEST_MODE: true
go-custom-tracing-autobuild:
# No need to test Go autobuild on multiple OSes since
# we're testing Go custom tracing with a manual build on all OSes.
runs-on: ubuntu-latest
env:
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- uses: ./../action/init
with:
languages: go
- uses: ./../action/autobuild
- uses: ./../action/analyze
env:
TEST_MODE: true
multi-language-repo_rubocop:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6
- name: Install Code Scanning integration
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
- name: Install dependencies
run: bundle install
- name: Rubocop run
run: |
bash -c "
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
[[ $? -ne 2 ]]
"
- uses: ./../action/upload-sarif
with:
sarif_file: rubocop.sarif
env:
TEST_MODE: true
test-proxy:
runs-on: ubuntu-latest
container:
image: ubuntu:18.04
options: --dns 127.0.0.1
services:
squid-proxy:
image: datadog/squid:latest
ports:
- 3128:3128
env:
https_proxy: http://squid-proxy:3128
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- uses: ./../action/init
with:
languages: javascript
- uses: ./../action/analyze
env:
TEST_MODE: true
runner-analyze-javascript-ubuntu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
# Pass --config-file here, but not for other jobs in this workflow.
# This means we're testing the config file parsing in the runner
# but not slowing down all jobs unnecessarily as it doesn't add much
# testing the parsing on different operating systems and languages.
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-ubuntu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
. ./codeql-runner/codeql-env.sh
dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
. ./codeql-runner/codeql-env.sh
dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-ubuntu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
../action/runner/dist/codeql-runner-linux autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
../action/runner/dist/codeql-runner-win.exe autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
../action/runner/dist/codeql-runner-macos autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-upload-sarif:
runs-on: ubuntu-latest
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Upload with runner
run: |
# Deliberately don't use TEST_MODE here. This is specifically testing
# the compatibility with the API.
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}

View File

@@ -20,25 +20,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Check generated JavaScript - name: Check generated JavaScript
run: | run: .github/workflows/script/check-js.sh
# Sanity check that repo is clean to start with
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then this workflow needs attention...
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Wipe the lib directory incase there are extra unnecessary files in there
rm -rf lib
# Generate the JavaScript files
npm run-script build
# Check that repo is still clean
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
git status
exit 1
fi
echo "Success: JavaScript files are up to date"
check-node-modules: check-node-modules:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -46,27 +28,10 @@ jobs:
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Check node modules up to date - name: Check node modules up to date
run: | run: .github/workflows/script/check-node-modules.sh
# Sanity check that repo is clean to start with
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then this workflow needs attention...
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Reinstall modules and then clean to remove absolute paths
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
npm ci
npm run removeNPMAbsolutePaths
# Check that repo is still clean
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
git status
exit 1
fi
echo "Success: node_modules are up to date"
npm-test: npm-test:
needs: [check-js, check-node-modules]
strategy: strategy:
matrix: matrix:
os: [ubuntu-latest,macos-latest] os: [ubuntu-latest,macos-latest]
@@ -76,3 +41,533 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: npm run-script test - name: npm run-script test
run: npm run-script test run: npm run-script test
multi-language-repo_test-autodetect-languages:
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- uses: ./../action/init
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
- run: |
cd "$RUNNER_TEMP/codeql_databases"
# List all directories as there will be precisely one directory per database
# but there may be other files in this directory such as query suites.
if [ "$(ls -d */ | wc -l)" != 6 ] || \
[[ ! -d cpp ]] || \
[[ ! -d csharp ]] || \
[[ ! -d go ]] || \
[[ ! -d java ]] || \
[[ ! -d javascript ]] || \
[[ ! -d python ]]; then
echo "Did not find expected number of databases. Database dir contains: $(ls)"
exit 1
fi
multi-language-repo_test-custom-queries-and-remote-config:
needs: [check-js, check-node-modules]
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
tools: [~, latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- uses: ./../action/init
with:
tools: ${{ matrix.tools }}
languages: cpp,csharp,java,javascript,python
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
# Currently is not possible to analyze Go in conjunction with other languages in macos
multi-language-repo_test-go-custom-queries:
needs: [check-js, check-node-modules]
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/setup-go@v2
if: ${{ matrix.os == 'macos-latest' }}
with:
go-version: '^1.13.1'
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- uses: ./../action/init
with:
languages: go
config-file: ./.github/codeql/custom-queries.yml
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
go-custom-tracing:
needs: [check-js, check-node-modules]
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
env:
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
steps:
- uses: actions/setup-go@v2
if: ${{ matrix.os == 'macos-latest' }}
with:
go-version: '^1.13.1'
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- uses: ./../action/init
with:
languages: go
- name: Build code
shell: bash
run: go build main.go
- uses: ./../action/analyze
env:
TEST_MODE: true
go-custom-tracing-autobuild:
needs: [check-js, check-node-modules]
# No need to test Go autobuild on multiple OSes since
# we're testing Go custom tracing with a manual build on all OSes.
runs-on: ubuntu-latest
env:
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- uses: ./../action/init
with:
languages: go
- uses: ./../action/autobuild
- uses: ./../action/analyze
env:
TEST_MODE: true
multi-language-repo_rubocop:
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6
- name: Install Code Scanning integration
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
- name: Install dependencies
run: bundle install
- name: Rubocop run
run: |
bash -c "
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
[[ $? -ne 2 ]]
"
- uses: ./../action/upload-sarif
with:
sarif_file: rubocop.sarif
env:
TEST_MODE: true
test-proxy:
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
container:
image: ubuntu:18.04
options: --dns 127.0.0.1
services:
squid-proxy:
image: datadog/squid:latest
ports:
- 3128:3128
env:
https_proxy: http://squid-proxy:3128
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- uses: ./../action/init
with:
languages: javascript
- uses: ./../action/analyze
env:
TEST_MODE: true
runner-analyze-javascript-ubuntu:
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
# Pass --config-file here, but not for other jobs in this workflow.
# This means we're testing the config file parsing in the runner
# but not slowing down all jobs unnecessarily as it doesn't add much
# testing the parsing on different operating systems and languages.
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-windows:
needs: [check-js, check-node-modules]
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-macos:
needs: [check-js, check-node-modules]
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-ubuntu:
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
. ./codeql-runner/codeql-env.sh
$CODEQL_RUNNER dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-windows:
needs: [check-js, check-node-modules]
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
& $Env:CODEQL_RUNNER dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-macos:
needs: [check-js, check-node-modules]
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
. ./codeql-runner/codeql-env.sh
$CODEQL_RUNNER dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-ubuntu:
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
../action/runner/dist/codeql-runner-linux autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-windows:
needs: [check-js, check-node-modules]
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
../action/runner/dist/codeql-runner-win.exe autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-macos:
needs: [check-js, check-node-modules]
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
../action/runner/dist/codeql-runner-macos autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-upload-sarif:
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Upload with runner
run: |
# Deliberately don't use TEST_MODE here. This is specifically testing
# the compatibility with the API.
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}

21
.github/workflows/script/check-js.sh vendored Executable file
View File

@@ -0,0 +1,21 @@
#!/bin/bash
set -eu
# Sanity check that repo is clean to start with
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then this workflow needs attention...
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Wipe the lib directory incase there are extra unnecessary files in there
rm -rf lib
# Generate the JavaScript files
npm run-script build
# Check that repo is still clean
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
git status
exit 1
fi
echo "Success: JavaScript files are up to date"

View File

@@ -0,0 +1,21 @@
#!/bin/bash
set -eu
# Sanity check that repo is clean to start with
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then this workflow needs attention...
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Reinstall modules and then clean to remove absolute paths
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
npm ci
npm run removeNPMAbsolutePaths
# Check that repo is still clean
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
git status
exit 1
fi
echo "Success: node_modules are up to date"

View File

@@ -96,7 +96,16 @@ Use the `config-file` parameter of the `init` action to enable the configuration
config-file: ./.github/codeql/codeql-config.yml config-file: ./.github/codeql/codeql-config.yml
``` ```
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration file](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration-file)." The configuration file can be located in a different repository. This is useful if you want to share the same configuration across multiple repositories. If the configuration file is in a private repository you can also specify an `external-repository-token` option. This should be a personal access token that has read access to any repositories containing referenced config files and queries.
```yaml
- uses: github/codeql-action/init@v1
with:
config-file: owner/repo/codeql-config.yml@branch
external-repository-token: ${{ secrets.EXTERNAL_REPOSITORY_TOKEN }}
```
For information on how to write a configuration file, see "[Using a custom configuration file](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration-file)."
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action: If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:

View File

@@ -19,6 +19,9 @@ inputs:
queries: queries:
description: Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file; prefix with "+" to use both sets of queries. description: Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file; prefix with "+" to use both sets of queries.
required: false required: false
external-repository-token:
description: A token for fetching external config files and queries if they reside in a private repository.
required: false
setup-python-dependencies: setup-python-dependencies:
description: Try to auto-install your python dependencies description: Try to auto-install your python dependencies
required: true required: true

119
lib/actions-util.js generated
View File

@@ -107,6 +107,7 @@ function escapeRegExp(string) {
} }
function patternToRegExp(value) { function patternToRegExp(value) {
return new RegExp(`^${value return new RegExp(`^${value
.toString()
.split(GLOB_PATTERN) .split(GLOB_PATTERN)
.reduce(function (arr, cur) { .reduce(function (arr, cur) {
if (cur === "**") { if (cur === "**") {
@@ -152,6 +153,8 @@ function toCodedErrors(errors) {
return acc; return acc;
}, {}); }, {});
} }
// code to send back via status report
// message to add as a warning annotation to the run
exports.WorkflowErrors = toCodedErrors({ exports.WorkflowErrors = toCodedErrors({
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`, MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
MissingHooks: `Please specify on.push and on.pull_request hooks so that Code Scanning can compare pull requests against the state of the base branch.`, MissingHooks: `Please specify on.push and on.pull_request hooks so that Code Scanning can compare pull requests against the state of the base branch.`,
@@ -160,15 +163,16 @@ exports.WorkflowErrors = toCodedErrors({
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`, PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`, PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
LintFailed: `Unable to lint workflow for CodeQL.`,
}); });
function validateWorkflow(doc) { function getWorkflowErrors(doc) {
var _a, _b, _c, _d, _e, _f, _g, _h; var _a, _b, _c, _d, _e, _f, _g, _h;
const errors = []; const errors = [];
// .jobs[key].steps[].run const jobName = process.env.GITHUB_JOB;
for (const job of Object.values(((_a = doc) === null || _a === void 0 ? void 0 : _a.jobs) || {})) { if (jobName) {
if (Array.isArray((_b = job) === null || _b === void 0 ? void 0 : _b.steps)) { const job = (_b = (_a = doc) === null || _a === void 0 ? void 0 : _a.jobs) === null || _b === void 0 ? void 0 : _b[jobName];
for (const step of (_c = job) === null || _c === void 0 ? void 0 : _c.steps) { const steps = (_c = job) === null || _c === void 0 ? void 0 : _c.steps;
if (Array.isArray(steps)) {
for (const step of steps) {
// this was advice that we used to give in the README // this was advice that we used to give in the README
// we actually want to run the analysis on the merge commit // we actually want to run the analysis on the merge commit
// to produce results that are more inline with expectations // to produce results that are more inline with expectations
@@ -176,43 +180,34 @@ function validateWorkflow(doc) {
// and avoid some race conditions // and avoid some race conditions
if (((_d = step) === null || _d === void 0 ? void 0 : _d.run) === "git checkout HEAD^2") { if (((_d = step) === null || _d === void 0 ? void 0 : _d.run) === "git checkout HEAD^2") {
errors.push(exports.WorkflowErrors.CheckoutWrongHead); errors.push(exports.WorkflowErrors.CheckoutWrongHead);
break;
} }
} }
} }
} }
let missing = MissingTriggers.None; let missing = MissingTriggers.None;
if (doc.on === undefined) { if (doc.on === undefined) {
missing = MissingTriggers.Push | MissingTriggers.PullRequest; // this is not a valid config
} }
else if (typeof doc.on === "string") { else if (typeof doc.on === "string") {
switch (doc.on) { if (doc.on === "pull_request") {
case "push": missing = MissingTriggers.Push;
missing = MissingTriggers.PullRequest;
break;
case "pull_request":
missing = MissingTriggers.Push;
break;
default:
missing = MissingTriggers.Push | MissingTriggers.PullRequest;
break;
} }
} }
else if (Array.isArray(doc.on)) { else if (Array.isArray(doc.on)) {
if (!doc.on.includes("push")) { const hasPush = doc.on.includes("push");
const hasPullRequest = doc.on.includes("pull_request");
if (hasPullRequest && !hasPush) {
missing = missing | MissingTriggers.Push; missing = missing | MissingTriggers.Push;
} }
if (!doc.on.includes("pull_request")) {
missing = missing | MissingTriggers.PullRequest;
}
} }
else if (isObject(doc.on)) { else if (isObject(doc.on)) {
if (!Object.prototype.hasOwnProperty.call(doc.on, "pull_request")) { const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
missing = missing | MissingTriggers.PullRequest; const hasPullRequest = Object.prototype.hasOwnProperty.call(doc.on, "pull_request");
} if (!hasPush && hasPullRequest) {
if (!Object.prototype.hasOwnProperty.call(doc.on, "push")) {
missing = missing | MissingTriggers.Push; missing = missing | MissingTriggers.Push;
} }
else { if (hasPush && hasPullRequest) {
const paths = (_e = doc.on.push) === null || _e === void 0 ? void 0 : _e.paths; const paths = (_e = doc.on.push) === null || _e === void 0 ? void 0 : _e.paths;
// if you specify paths or paths-ignore you can end up with commits that have no baseline // if you specify paths or paths-ignore you can end up with commits that have no baseline
// if they didn't change any files // if they didn't change any files
@@ -225,22 +220,27 @@ function validateWorkflow(doc) {
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified); errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
} }
} }
const push = branchesToArray((_g = doc.on.push) === null || _g === void 0 ? void 0 : _g.branches); // if doc.on.pull_request is null that means 'all branches'
if (push !== "**") { // if doc.on.pull_request is undefined that means 'off'
const pull_request = branchesToArray((_h = doc.on.pull_request) === null || _h === void 0 ? void 0 : _h.branches); // we only want to check for mismatched branches if pull_request is on.
if (pull_request !== "**") { if (doc.on.pull_request !== undefined) {
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value))); const push = branchesToArray((_g = doc.on.push) === null || _g === void 0 ? void 0 : _g.branches);
if (difference.length > 0) { if (push !== "**") {
// there are branches in pull_request that may not have a baseline const pull_request = branchesToArray((_h = doc.on.pull_request) === null || _h === void 0 ? void 0 : _h.branches);
// because we are not building them on push if (pull_request !== "**") {
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
if (difference.length > 0) {
// there are branches in pull_request that may not have a baseline
// because we are not building them on push
errors.push(exports.WorkflowErrors.MismatchedBranches);
}
}
else if (push.length > 0) {
// push is set up to run on a subset of branches
// and you could open a PR against a branch with no baseline
errors.push(exports.WorkflowErrors.MismatchedBranches); errors.push(exports.WorkflowErrors.MismatchedBranches);
} }
} }
else if (push.length > 0) {
// push is set up to run on a subset of branches
// and you could open a PR against a branch with no baseline
errors.push(exports.WorkflowErrors.MismatchedBranches);
}
} }
} }
else { else {
@@ -261,20 +261,35 @@ function validateWorkflow(doc) {
} }
return errors; return errors;
} }
exports.validateWorkflow = validateWorkflow; exports.getWorkflowErrors = getWorkflowErrors;
async function getWorkflowErrors() { async function validateWorkflow() {
let workflow;
try { try {
const workflow = await getWorkflow(); workflow = await getWorkflow();
if (workflow === undefined) {
return [];
}
return validateWorkflow(workflow);
} }
catch (e) { catch (e) {
return [exports.WorkflowErrors.LintFailed]; return `error: getWorkflow() failed: ${e.toString()}`;
} }
let workflowErrors;
try {
workflowErrors = getWorkflowErrors(workflow);
}
catch (e) {
return `error: getWorkflowErrors() failed: ${e.toString()}`;
}
if (workflowErrors.length > 0) {
let message;
try {
message = formatWorkflowErrors(workflowErrors);
}
catch (e) {
return `error: formatWorkflowErrors() failed: ${e.toString()}`;
}
core.warning(message);
}
return `warning: ${formatWorkflowCause(workflowErrors)}`;
} }
exports.getWorkflowErrors = getWorkflowErrors; exports.validateWorkflow = validateWorkflow;
function formatWorkflowErrors(errors) { function formatWorkflowErrors(errors) {
const issuesWere = errors.length === 1 ? "issue was" : "issues were"; const issuesWere = errors.length === 1 ? "issue was" : "issues were";
const errorsList = errors.map((e) => e.message).join(" "); const errorsList = errors.map((e) => e.message).join(" ");
@@ -291,13 +306,7 @@ exports.formatWorkflowCause = formatWorkflowCause;
async function getWorkflow() { async function getWorkflow() {
const relativePath = await getWorkflowPath(); const relativePath = await getWorkflowPath();
const absolutePath = path.join(getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath); const absolutePath = path.join(getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath);
try { return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
}
catch (e) {
core.warning(`Could not read workflow: ${e.toString()}`);
return undefined;
}
} }
exports.getWorkflow = getWorkflow; exports.getWorkflow = getWorkflow;
/** /**

File diff suppressed because one or more lines are too long

283
lib/actions-util.test.js generated
View File

@@ -11,9 +11,13 @@ var __importStar = (this && this.__importStar) || function (mod) {
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava")); const ava_1 = __importDefault(require("ava"));
const yaml = __importStar(require("js-yaml"));
const sinon_1 = __importDefault(require("sinon")); const sinon_1 = __importDefault(require("sinon"));
const actionsutil = __importStar(require("./actions-util")); const actionsutil = __importStar(require("./actions-util"));
const testing_utils_1 = require("./testing-utils"); const testing_utils_1 = require("./testing-utils");
function errorCodes(actual, expected) {
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
}
testing_utils_1.setupTests(ava_1.default); testing_utils_1.setupTests(ava_1.default);
ava_1.default("getRef() throws on the empty string", async (t) => { ava_1.default("getRef() throws on the empty string", async (t) => {
process.env["GITHUB_REF"] = ""; process.env["GITHUB_REF"] = "";
@@ -68,141 +72,136 @@ ava_1.default("prepareEnvironment() when a local run", (t) => {
t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB"); t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB");
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB"); t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
}); });
ava_1.default("validateWorkflow() when on is missing", (t) => { ava_1.default("getWorkflowErrors() when on is empty", (t) => {
const errors = actionsutil.validateWorkflow({}); const errors = actionsutil.getWorkflowErrors({ on: {} });
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingHooks]); t.deepEqual(...errorCodes(errors, []));
}); });
ava_1.default("validateWorkflow() when on.push is missing", (t) => { ava_1.default("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
const errors = actionsutil.validateWorkflow({ on: {} }); const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
console.log(errors); t.deepEqual(...errorCodes(errors, []));
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingHooks]);
}); });
ava_1.default("validateWorkflow() when on.push is an array missing pull_request", (t) => { ava_1.default("getWorkflowErrors() when on.push is an array missing push", (t) => {
const errors = actionsutil.validateWorkflow({ on: ["push"] }); const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingPullRequestHook]); t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook]));
}); });
ava_1.default("validateWorkflow() when on.push is an array missing push", (t) => { ava_1.default("getWorkflowErrors() when on.push is valid", (t) => {
const errors = actionsutil.validateWorkflow({ on: ["pull_request"] }); const errors = actionsutil.getWorkflowErrors({
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingPushHook]);
});
ava_1.default("validateWorkflow() when on.push is valid", (t) => {
const errors = actionsutil.validateWorkflow({
on: ["push", "pull_request"], on: ["push", "pull_request"],
}); });
t.deepEqual(errors, []); t.deepEqual(...errorCodes(errors, []));
}); });
ava_1.default("validateWorkflow() when on.push is a valid superset", (t) => { ava_1.default("getWorkflowErrors() when on.push is a valid superset", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: ["push", "pull_request", "schedule"], on: ["push", "pull_request", "schedule"],
}); });
t.deepEqual(errors, []); t.deepEqual(...errorCodes(errors, []));
}); });
ava_1.default("validateWorkflow() when on.push should not have a path", (t) => { ava_1.default("getWorkflowErrors() when on.push should not have a path", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { on: {
push: { branches: ["main"], paths: ["test/*"] }, push: { branches: ["main"], paths: ["test/*"] },
pull_request: { branches: ["main"] }, pull_request: { branches: ["main"] },
}, },
}); });
t.deepEqual(errors, [actionsutil.WorkflowErrors.PathsSpecified]); t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified]));
}); });
ava_1.default("validateWorkflow() when on.push is a correct object", (t) => { ava_1.default("getWorkflowErrors() when on.push is a correct object", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } }, on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
}); });
t.deepEqual(errors, []); t.deepEqual(...errorCodes(errors, []));
}); });
ava_1.default("validateWorkflow() when on.pull_requests is a string", (t) => { ava_1.default("getWorkflowErrors() when on.pull_requests is a string", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } }, on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
}); });
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]); t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
}); });
ava_1.default("validateWorkflow() when on.pull_requests is a string and correct", (t) => { ava_1.default("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { push: { branches: "*" }, pull_request: { branches: "*" } }, on: { push: { branches: "*" }, pull_request: { branches: "*" } },
}); });
t.deepEqual(errors, []); t.deepEqual(...errorCodes(errors, []));
}); });
ava_1.default("validateWorkflow() when on.push is correct with empty objects", (t) => { ava_1.default("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
on: { push: undefined, pull_request: undefined }, on:
}); push:
t.deepEqual(errors, []); pull_request:
`));
t.deepEqual(...errorCodes(errors, []));
}); });
ava_1.default("validateWorkflow() when on.push is mismatched", (t) => { ava_1.default("getWorkflowErrors() when on.push is mismatched", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { on: {
push: { branches: ["main"] }, push: { branches: ["main"] },
pull_request: { branches: ["feature"] }, pull_request: { branches: ["feature"] },
}, },
}); });
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]); t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
}); });
ava_1.default("validateWorkflow() when on.push is not mismatched", (t) => { ava_1.default("getWorkflowErrors() when on.push is not mismatched", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { on: {
push: { branches: ["main", "feature"] }, push: { branches: ["main", "feature"] },
pull_request: { branches: ["main"] }, pull_request: { branches: ["main"] },
}, },
}); });
t.deepEqual(errors, []); t.deepEqual(...errorCodes(errors, []));
}); });
ava_1.default("validateWorkflow() when on.push is mismatched for pull_request", (t) => { ava_1.default("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { on: {
push: { branches: ["main"] }, push: { branches: ["main"] },
pull_request: { branches: ["main", "feature"] }, pull_request: { branches: ["main", "feature"] },
}, },
}); });
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]); t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
}); });
ava_1.default("validateWorkflow() for a range of malformed workflows", (t) => { ava_1.default("getWorkflowErrors() for a range of malformed workflows", (t) => {
t.deepEqual(actionsutil.validateWorkflow({ t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: { on: {
push: 1, push: 1,
pull_request: 1, pull_request: 1,
}, },
}), []); }), []));
t.deepEqual(actionsutil.validateWorkflow({ t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1, on: 1,
}), [actionsutil.WorkflowErrors.MissingHooks]); }), [actionsutil.WorkflowErrors.MissingHooks]));
t.deepEqual(actionsutil.validateWorkflow({ t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1, on: 1,
jobs: 1, jobs: 1,
}), [actionsutil.WorkflowErrors.MissingHooks]); }), [actionsutil.WorkflowErrors.MissingHooks]));
t.deepEqual(actionsutil.validateWorkflow({ t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1, on: 1,
jobs: [1], jobs: [1],
}), [actionsutil.WorkflowErrors.MissingHooks]); }), [actionsutil.WorkflowErrors.MissingHooks]));
t.deepEqual(actionsutil.validateWorkflow({ t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1, on: 1,
jobs: { 1: 1 }, jobs: { 1: 1 },
}), [actionsutil.WorkflowErrors.MissingHooks]); }), [actionsutil.WorkflowErrors.MissingHooks]));
t.deepEqual(actionsutil.validateWorkflow({ t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1, on: 1,
jobs: { test: 1 }, jobs: { test: 1 },
}), [actionsutil.WorkflowErrors.MissingHooks]); }), [actionsutil.WorkflowErrors.MissingHooks]));
t.deepEqual(actionsutil.validateWorkflow({ t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1, on: 1,
jobs: { test: [1] }, jobs: { test: [1] },
}), [actionsutil.WorkflowErrors.MissingHooks]); }), [actionsutil.WorkflowErrors.MissingHooks]));
t.deepEqual(actionsutil.validateWorkflow({ t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1, on: 1,
jobs: { test: { steps: 1 } }, jobs: { test: { steps: 1 } },
}), [actionsutil.WorkflowErrors.MissingHooks]); }), [actionsutil.WorkflowErrors.MissingHooks]));
t.deepEqual(actionsutil.validateWorkflow({ t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1, on: 1,
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } }, jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
}), [actionsutil.WorkflowErrors.MissingHooks]); }), [actionsutil.WorkflowErrors.MissingHooks]));
t.deepEqual(actionsutil.validateWorkflow({ t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1, on: 1,
jobs: { test: [undefined] }, jobs: { test: [undefined] },
}), [actionsutil.WorkflowErrors.MissingHooks]); }), [actionsutil.WorkflowErrors.MissingHooks]));
t.deepEqual(actionsutil.validateWorkflow(1), [ t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1), []));
actionsutil.WorkflowErrors.MissingHooks, t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
]);
t.deepEqual(actionsutil.validateWorkflow({
on: { on: {
push: { push: {
branches: 1, branches: 1,
@@ -211,41 +210,43 @@ ava_1.default("validateWorkflow() for a range of malformed workflows", (t) => {
branches: 1, branches: 1,
}, },
}, },
}), []); }), []));
}); });
ava_1.default("validateWorkflow() when on.pull_request for every branch but push specifies branches", (t) => { ava_1.default("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
on: { name: "CodeQL"
push: { branches: ["main"] }, on:
pull_request: null, push:
}, branches: ["main"]
}); pull_request:
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]); `));
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
}); });
ava_1.default("validateWorkflow() when on.pull_request for wildcard branches", (t) => { ava_1.default("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { on: {
push: { branches: ["feature/*"] }, push: { branches: ["feature/*"] },
pull_request: { branches: "feature/moose" }, pull_request: { branches: "feature/moose" },
}, },
}); });
t.deepEqual(errors, []); t.deepEqual(...errorCodes(errors, []));
}); });
ava_1.default("validateWorkflow() when on.pull_request for mismatched wildcard branches", (t) => { ava_1.default("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { on: {
push: { branches: ["feature/moose"] }, push: { branches: ["feature/moose"] },
pull_request: { branches: "feature/*" }, pull_request: { branches: "feature/*" },
}, },
}); });
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]); t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
}); });
ava_1.default("validateWorkflow() when HEAD^2 is checked out", (t) => { ava_1.default("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
const errors = actionsutil.validateWorkflow({ process.env.GITHUB_JOB = "test";
const errors = actionsutil.getWorkflowErrors({
on: ["push", "pull_request"], on: ["push", "pull_request"],
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } }, jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
}); });
t.deepEqual(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]); t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
}); });
ava_1.default("formatWorkflowErrors() when there is one error", (t) => { ava_1.default("formatWorkflowErrors() when there is one error", (t) => {
const message = actionsutil.formatWorkflowErrors([ const message = actionsutil.formatWorkflowErrors([
@@ -290,4 +291,108 @@ ava_1.default("patternIsSuperset()", (t) => {
t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose")); t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*")); t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
}); });
ava_1.default("getWorkflowErrors() when branches contain dots", (t) => {
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
on:
push:
branches: [4.1, master]
pull_request:
# The branches below must be a subset of the branches above
branches: [4.1, master]
`));
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() when on.push has a trailing comma", (t) => {
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on:
push:
branches: [master, ]
pull_request:
# The branches below must be a subset of the branches above
branches: [master]
`));
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
process.env.GITHUB_JOB = "test";
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on:
push:
branches: [master]
pull_request:
# The branches below must be a subset of the branches above
branches: [master]
jobs:
test:
steps:
- run: "git checkout HEAD^2"
test2:
steps:
- run: "git checkout HEAD^2"
test3:
steps: []
`));
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
});
ava_1.default("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
process.env.GITHUB_JOB = "test3";
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on:
push:
branches: [master]
pull_request:
# The branches below must be a subset of the branches above
branches: [master]
jobs:
test:
steps:
- run: "git checkout HEAD^2"
test2:
steps:
- run: "git checkout HEAD^2"
test3:
steps: []
`));
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() when on is missing", (t) => {
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
`));
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() with a different on setup", (t) => {
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on: "workflow_dispatch"
`)), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on: [workflow_dispatch]
`)), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on:
workflow_dispatch: {}
`)), []));
});
ava_1.default("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on:
push:
branches: [master]
`)), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on: ["push"]
`)), []));
});
//# sourceMappingURL=actions-util.test.js.map //# sourceMappingURL=actions-util.test.js.map

File diff suppressed because one or more lines are too long

40
lib/analyze-action.js generated
View File

@@ -7,12 +7,15 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const core = __importStar(require("@actions/core")); const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util")); const actionsUtil = __importStar(require("./actions-util"));
const analyze_1 = require("./analyze"); const analyze_1 = require("./analyze");
const config_utils_1 = require("./config-utils"); const config_utils_1 = require("./config-utils");
const logging_1 = require("./logging"); const logging_1 = require("./logging");
const repository_1 = require("./repository"); const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
async function sendStatusReport(startedAt, stats, error) { async function sendStatusReport(startedAt, stats, error) {
var _a, _b, _c; var _a, _b, _c;
@@ -29,13 +32,14 @@ async function sendStatusReport(startedAt, stats, error) {
async function run() { async function run() {
const startedAt = new Date(); const startedAt = new Date();
let stats = undefined; let stats = undefined;
let config = undefined;
try { try {
actionsUtil.prepareLocalRunEnvironment(); actionsUtil.prepareLocalRunEnvironment();
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) { if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
return; return;
} }
const logger = logging_1.getActionsLogger(); const logger = logging_1.getActionsLogger();
const config = await config_utils_1.getConfig(actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), logger); config = await config_utils_1.getConfig(actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), logger);
if (config === undefined) { if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?"); throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
} }
@@ -43,7 +47,16 @@ async function run() {
auth: actionsUtil.getRequiredInput("token"), auth: actionsUtil.getRequiredInput("token"),
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"), url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
}; };
stats = await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), apiDetails, actionsUtil.getRequiredInput("upload") === "true", "actions", actionsUtil.getRequiredInput("output"), util.getMemoryFlag(actionsUtil.getOptionalInput("ram")), util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger), config, logger); const outputDir = actionsUtil.getRequiredInput("output");
const queriesStats = await analyze_1.runAnalyze(outputDir, util.getMemoryFlag(actionsUtil.getOptionalInput("ram")), util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger), config, logger);
if (actionsUtil.getRequiredInput("upload") === "true") {
const uploadStats = await upload_lib.uploadFromActions(outputDir, repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), config.gitHubVersion, apiDetails, logger);
stats = { ...queriesStats, ...uploadStats };
}
else {
logger.info("Not uploading results");
stats = { ...queriesStats };
}
} }
catch (error) { catch (error) {
core.setFailed(error.message); core.setFailed(error.message);
@@ -54,6 +67,29 @@ async function run() {
await sendStatusReport(startedAt, stats, error); await sendStatusReport(startedAt, stats, error);
return; return;
} }
finally {
if (core.isDebug() && config !== undefined) {
core.info("Debug mode is on. Printing CodeQL debug logs...");
for (const language of config.languages) {
const databaseDirectory = util.getCodeQLDatabasePath(config.tempDir, language);
const logsDirectory = path.join(databaseDirectory, "log");
const walkLogFiles = (dir) => {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile()) {
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
core.endGroup();
}
else if (entry.isDirectory()) {
walkLogFiles(path.resolve(dir, entry.name));
}
}
};
walkLogFiles(logsDirectory);
}
}
}
await sendStatusReport(startedAt, stats); await sendStatusReport(startedAt, stats);
} }
async function runWrapper() { async function runWrapper() {

View File

@@ -1 +1 @@
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAImB;AACnB,iDAA2C;AAC3C,uCAA6C;AAC7C,6CAAkD;AAClD,6CAA+B;AAM/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,MAAM,GAAG,MAAM,wBAAS,CAC5B,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QACF,KAAK,GAAG,MAAM,oBAAU,CACtB,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,MAAM,WAAW,CAAC,YAAY,EAAE,EAChC,MAAM,WAAW,CAAC,MAAM,EAAE,EAC1B,MAAM,WAAW,CAAC,cAAc,EAAE,EAClC,WAAW,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAClD,WAAW,CAAC,gBAAgB,EAAE,EAC9B,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,UAAU,EACV,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EACjD,SAAS,EACT,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EACvD,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACpE,MAAM,EACN,MAAM,CACP,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAEnB,IAAI,KAAK,YAAY,6BAAmB,EAAE;YACxC,KAAK,GAAG,EAAE,GAAG,KAAK,CAAC,mBAAmB,EAAE,CAAC;SAC1C;QAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"} {"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAImB;AACnB,iDAAmD;AACnD,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAU/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI,MAAM,GAAuB,SAAS,CAAC;IAC3C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,GAAG,MAAM,wBAAS,CACtB,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,YAAY,GAAG,MAAM,oBAAU,CACnC,SAAS,EACT,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EACvD,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACpE,MAAM,EACN,MAAM,CACP,CAAC;QAEF,IAAI,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EAAE;YACrD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,SAAS,EACT,+BAAkB,CAChB,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CACrD,EACD,MAAM,WAAW,CAAC,YAAY,EAAE,EAChC,MAAM,WAAW,CAAC,MAAM,EAAE,EAC1B,MAAM,WAAW,CAAC,cAAc,EAAE,EAClC,WAAW,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAClD,WAAW,CAAC,gBAAgB,EAAE,EAC9B,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,MAAM,CAAC,aAAa,EACpB,UAAU,EACV,MAAM,CACP,CAAC;YACF,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;SAC7C;aAAM;YACL,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;YACrC,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,CAAC;SAC7B;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAEnB,IAAI,KAAK,YAAY,6BAAmB,EAAE;YACxC,KAAK,GAAG,EAAE,GAAG,KAAK,CAAC,mBAAmB,EAAE,CAAC;SAC1C;QAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;YAAS;QACR,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,MAAM,KAAK,SAAS,EAAE;YAC1C,IAAI,CAAC,IAAI,CAAC,iDAAiD,CAAC,CAAC;YAC7D,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,MAAM,iBAAiB,GAAG,IAAI,CAAC,qBAAqB,CAClD,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;gBACF,MAAM,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;gBAE1D,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE;oBACnC,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;oBAC7D,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;wBAC3B,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;4BAClB,IAAI,CAAC,UAAU,CACb,uBAAuB,QAAQ,MAAM,KAAK,CAAC,IAAI,EAAE,CAClD,CAAC;4BACF,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAC/C,CAAC;4BACF,IAAI,CAAC,QAAQ,EAAE,CAAC;yBACjB;6BAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;4BAC9B,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;yBAC7C;qBACF;gBACH,CAAC,CAAC;gBACF,YAAY,CAAC,aAAa,CAAC,CAAC;aAC7B;SACF;KACF;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

10
lib/analyze.js generated
View File

@@ -14,7 +14,6 @@ const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
const languages_1 = require("./languages"); const languages_1 = require("./languages");
const sharedEnv = __importStar(require("./shared-environment")); const sharedEnv = __importStar(require("./shared-environment"));
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
class CodeQLAnalysisError extends Error { class CodeQLAnalysisError extends Error {
constructor(queriesStatusReport, message) { constructor(queriesStatusReport, message) {
@@ -117,7 +116,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
return statusReport; return statusReport;
} }
exports.runQueries = runQueries; exports.runQueries = runQueries;
async function runAnalyze(repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, apiDetails, doUpload, mode, outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) { async function runAnalyze(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
// Delete the tracer config env var to avoid tracing ourselves // Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION]; delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
fs.mkdirSync(outputDir, { recursive: true }); fs.mkdirSync(outputDir, { recursive: true });
@@ -125,12 +124,7 @@ async function runAnalyze(repositoryNwo, commitOid, ref, analysisKey, analysisNa
await finalizeDatabaseCreation(config, threadsFlag, logger); await finalizeDatabaseCreation(config, threadsFlag, logger);
logger.info("Analyzing database"); logger.info("Analyzing database");
const queriesStats = await runQueries(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger); const queriesStats = await runQueries(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger);
if (!doUpload) { return { ...queriesStats };
logger.info("Not uploading results");
return { ...queriesStats };
}
const uploadStats = await upload_lib.upload(outputDir, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, config.gitHubVersion, apiDetails, mode, logger);
return { ...queriesStats, ...uploadStats };
} }
exports.runAnalyze = runAnalyze; exports.runAnalyze = runAnalyze;
//# sourceMappingURL=analyze.js.map //# sourceMappingURL=analyze.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAE3D,gEAAkD;AAElD,qCAAqC;AAErC,2CAA0D;AAG1D,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAE/B,MAAa,mBAAoB,SAAQ,KAAK;IAG5C,YAAY,mBAAwC,EAAE,OAAe;QACnE,KAAK,CAAC,OAAO,CAAC,CAAC;QAEf,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC;QAClC,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;CACF;AATD,kDASC;AAmCD,KAAK,UAAU,oBAAoB,CAAC,MAAc;IAChD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IAClD,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3D,+FAA+F;QAC/F,OAAO;KACR;IAED,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,OAAO,GAAG;QACd,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC;IAEF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ;QACE,IAAI;QACJ,8EAA8E;KAC/E,EACD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,kCAAkC,MAAM,EAAE,CAAC,CAAC;IACxD,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;IAE/C,MAAM,GAAG,EAAE,CAAC;IACZ,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ,CAAC,IAAI,EAAE,wCAAwC,CAAC,EAChD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,qCAAqC,MAAM,EAAE,CAAC,CAAC;IAC3D,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC,GAAG,MAAM,CAAC;AACpD,CAAC;AAED,KAAK,UAAU,4BAA4B,CACzC,MAA0B,EAC1B,MAAc;IAEd,sEAAsE;IACtE,oCAAoC;IACpC,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;YAE5C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,MAAM,EAAE;gBAChC,MAAM,oBAAoB,CAAC,MAAM,CAAC,CAAC;aACpC;YAED,MAAM,MAAM,CAAC,sBAAsB,CACjC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,CACT,CAAC;YACF,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CACrC,MAA0B,EAC1B,WAAmB,EACnB,MAAc;IAEd,MAAM,4BAA4B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,gBAAgB,CAC3B,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,WAAW,CACZ,CAAC;QACF,MAAM,CAAC,QAAQ,EAAE,CAAC;KACnB;AACH,CAAC;AAED,2DAA2D;AACpD,KAAK,UAAU,UAAU,CAC9B,WAAmB,EACnB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,MAAM,YAAY,GAAwB,EAAE,CAAC;IAE7C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,aAAa,QAAQ,EAAE,CAAC,CAAC;QAE3C,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QACzC,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;YAC/D,MAAM,IAAI,KAAK,CACb,qBAAqB,QAAQ,gDAAgD,CAC9E,CAAC;SACH;QAED,IAAI;YACF,KAAK,MAAM,IAAI,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,EAAE;gBACxC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBAEvC,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAC7C,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;oBACF,uEAAuE;oBACvE,2EAA2E;oBAC3E,MAAM,cAAc,GAAG,GAAG,YAAY,YAAY,IAAI,MAAM,CAAC;oBAC7D,MAAM,kBAAkB,GAAG,OAAO,CAAC,IAAI,CAAC;yBACrC,GAAG,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC;yBACnC,IAAI,CAAC,IAAI,CAAC,CAAC;oBACd,EAAE,CAAC,aAAa,CAAC,cAAc,EAAE,kBAAkB,CAAC,CAAC;oBACrD,MAAM,CAAC,KAAK,CACV,wBAAwB,QAAQ,QAAQ,kBAAkB,EAAE,CAC7D,CAAC;oBAEF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,GAAG,QAAQ,IAAI,IAAI,QAAQ,CAAC,CAAC;oBAEtE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;oBAC3C,MAAM,MAAM,CAAC,eAAe,CAC1B,YAAY,EACZ,SAAS,EACT,cAAc,EACd,UAAU,EACV,eAAe,EACf,WAAW,CACZ,CAAC;oBAEF,MAAM,CAAC,KAAK,CACV,8BAA8B,QAAQ,gBAAgB,SAAS,GAAG,CACnE,CAAC;oBACF,MAAM,CAAC,QAAQ,EAAE,CAAC;oBAElB,yBAAyB;oBACzB,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBACrC,YAAY,CAAC,WAAW,IAAI,YAAY,QAAQ,cAAc,CAAC;wBAC7D,OAAO,GAAG,SAAS,CAAC;iBACvB;aACF;SACF;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YACf,YAAY,CAAC,wBAAwB,GAAG,QAAQ,CAAC;YACjD,MAAM,IAAI,mBAAmB,CAC3B,YAAY,EACZ,8BAA8B,QAAQ,KAAK,CAAC,EAAE,CAC/C,CAAC;SACH;KACF;IAED,OAAO,YAAY,CAAC;AACtB,CAAC;AA1ED,gCA0EC;AAEM,KAAK,UAAU,UAAU,CAC9B,aAA4B,EAC5B,SAAiB,EACjB,GAAW,EACX,WAA+B,EAC/B,YAAgC,EAChC,aAAiC,EACjC,YAAoB,EACpB,WAA+B,EAC/B,UAA4B,EAC5B,QAAiB,EACjB,IAAe,EACf,SAAiB,EACjB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,8DAA8D;IAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;IAEzD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7C,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;IAC5C,MAAM,wBAAwB,CAAC,MAAM,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;IAE5D,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,MAAM,UAAU,CACnC,SAAS,EACT,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,MAAM,CACP,CAAC;IAEF,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACrC,OAAO,EAAE,GAAG,YAAY,EAAE,CAAC;KAC5B;IAED,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,SAAS,EACT,aAAa,EACb,SAAS,EACT,GAAG,EACH,WAAW,EACX,YAAY,EACZ,aAAa,EACb,YAAY,EACZ,WAAW,EACX,MAAM,CAAC,aAAa,EACpB,UAAU,EACV,IAAI,EACJ,MAAM,CACP,CAAC;IAEF,OAAO,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;AAC7C,CAAC;AA3DD,gCA2DC"} {"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAE3D,gEAAkD;AAClD,qCAAqC;AAErC,2CAA0D;AAE1D,gEAAkD;AAClD,6CAA+B;AAE/B,MAAa,mBAAoB,SAAQ,KAAK;IAG5C,YAAY,mBAAwC,EAAE,OAAe;QACnE,KAAK,CAAC,OAAO,CAAC,CAAC;QAEf,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC;QAClC,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;CACF;AATD,kDASC;AA+BD,KAAK,UAAU,oBAAoB,CAAC,MAAc;IAChD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IAClD,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3D,+FAA+F;QAC/F,OAAO;KACR;IAED,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,OAAO,GAAG;QACd,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC;IAEF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ;QACE,IAAI;QACJ,8EAA8E;KAC/E,EACD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,kCAAkC,MAAM,EAAE,CAAC,CAAC;IACxD,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;IAE/C,MAAM,GAAG,EAAE,CAAC;IACZ,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ,CAAC,IAAI,EAAE,wCAAwC,CAAC,EAChD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,qCAAqC,MAAM,EAAE,CAAC,CAAC;IAC3D,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC,GAAG,MAAM,CAAC;AACpD,CAAC;AAED,KAAK,UAAU,4BAA4B,CACzC,MAA0B,EAC1B,MAAc;IAEd,sEAAsE;IACtE,oCAAoC;IACpC,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;YAE5C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,MAAM,EAAE;gBAChC,MAAM,oBAAoB,CAAC,MAAM,CAAC,CAAC;aACpC;YAED,MAAM,MAAM,CAAC,sBAAsB,CACjC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,CACT,CAAC;YACF,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CACrC,MAA0B,EAC1B,WAAmB,EACnB,MAAc;IAEd,MAAM,4BAA4B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,gBAAgB,CAC3B,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,WAAW,CACZ,CAAC;QACF,MAAM,CAAC,QAAQ,EAAE,CAAC;KACnB;AACH,CAAC;AAED,2DAA2D;AACpD,KAAK,UAAU,UAAU,CAC9B,WAAmB,EACnB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,MAAM,YAAY,GAAwB,EAAE,CAAC;IAE7C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,aAAa,QAAQ,EAAE,CAAC,CAAC;QAE3C,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QACzC,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;YAC/D,MAAM,IAAI,KAAK,CACb,qBAAqB,QAAQ,gDAAgD,CAC9E,CAAC;SACH;QAED,IAAI;YACF,KAAK,MAAM,IAAI,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,EAAE;gBACxC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBAEvC,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAC7C,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;oBACF,uEAAuE;oBACvE,2EAA2E;oBAC3E,MAAM,cAAc,GAAG,GAAG,YAAY,YAAY,IAAI,MAAM,CAAC;oBAC7D,MAAM,kBAAkB,GAAG,OAAO,CAAC,IAAI,CAAC;yBACrC,GAAG,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC;yBACnC,IAAI,CAAC,IAAI,CAAC,CAAC;oBACd,EAAE,CAAC,aAAa,CAAC,cAAc,EAAE,kBAAkB,CAAC,CAAC;oBACrD,MAAM,CAAC,KAAK,CACV,wBAAwB,QAAQ,QAAQ,kBAAkB,EAAE,CAC7D,CAAC;oBAEF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,GAAG,QAAQ,IAAI,IAAI,QAAQ,CAAC,CAAC;oBAEtE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;oBAC3C,MAAM,MAAM,CAAC,eAAe,CAC1B,YAAY,EACZ,SAAS,EACT,cAAc,EACd,UAAU,EACV,eAAe,EACf,WAAW,CACZ,CAAC;oBAEF,MAAM,CAAC,KAAK,CACV,8BAA8B,QAAQ,gBAAgB,SAAS,GAAG,CACnE,CAAC;oBACF,MAAM,CAAC,QAAQ,EAAE,CAAC;oBAElB,yBAAyB;oBACzB,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBACrC,YAAY,CAAC,WAAW,IAAI,YAAY,QAAQ,cAAc,CAAC;wBAC7D,OAAO,GAAG,SAAS,CAAC;iBACvB;aACF;SACF;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YACf,YAAY,CAAC,wBAAwB,GAAG,QAAQ,CAAC;YACjD,MAAM,IAAI,mBAAmB,CAC3B,YAAY,EACZ,8BAA8B,QAAQ,KAAK,CAAC,EAAE,CAC/C,CAAC;SACH;KACF;IAED,OAAO,YAAY,CAAC;AACtB,CAAC;AA1ED,gCA0EC;AAEM,KAAK,UAAU,UAAU,CAC9B,SAAiB,EACjB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,8DAA8D;IAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;IAEzD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7C,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;IAC5C,MAAM,wBAAwB,CAAC,MAAM,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;IAE5D,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,MAAM,UAAU,CACnC,SAAS,EACT,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,MAAM,CACP,CAAC;IAEF,OAAO,EAAE,GAAG,YAAY,EAAE,CAAC;AAC7B,CAAC;AA3BD,gCA2BC"}

View File

@@ -1 +1 @@
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,0EAAgD;AAEhD,iDAAuE;AACvE,iCAAoC;AAEpC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAOY,QAAA,YAAY,GAAG,UAC1B,UAA4B,EAC5B,aAAa,GAAG,KAAK;IAErB,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,WAAW,CAAC,MAAM,CAC3B,WAAW,CAAC,iBAAiB,CAAC,UAAU,CAAC,IAAI,EAAE;QAC7C,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,+BAAgB,CAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,kCAAmB,CAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,oBAAY,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;AACjD,CAAC;AAPD,kDAOC"} {"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,0EAAgD;AAEhD,iDAAuE;AACvE,iCAAoC;AAEpC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeY,QAAA,YAAY,GAAG,UAC1B,UAA4B,EAC5B,aAAa,GAAG,KAAK;IAErB,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,WAAW,CAAC,MAAM,CAC3B,WAAW,CAAC,iBAAiB,CAAC,UAAU,CAAC,IAAI,EAAE;QAC7C,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,+BAAgB,CAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,kCAAmB,CAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,oBAAY,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;AACjD,CAAC;AAPD,kDAOC"}

8
lib/codeql.js generated
View File

@@ -6,6 +6,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod; result["default"] = mod;
return result; return result;
}; };
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
@@ -14,6 +17,7 @@ const globalutil = __importStar(require("util"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner")); const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const http = __importStar(require("@actions/http-client")); const http = __importStar(require("@actions/http-client"));
const toolcache = __importStar(require("@actions/tool-cache")); const toolcache = __importStar(require("@actions/tool-cache"));
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
const semver = __importStar(require("semver")); const semver = __importStar(require("semver"));
const uuid_1 = require("uuid"); const uuid_1 = require("uuid");
const actions_util_1 = require("./actions-util"); const actions_util_1 = require("./actions-util");
@@ -76,7 +80,9 @@ async function getCodeQLBundleDownloadURL(apiDetails, mode, logger) {
]; ];
// We now filter out any duplicates. // We now filter out any duplicates.
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork. // Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
const uniqueDownloadSources = potentialDownloadSources.filter((url, index, self) => index === self.indexOf(url)); const uniqueDownloadSources = potentialDownloadSources.filter((source, index, self) => {
return !self.slice(0, index).some((other) => fast_deep_equal_1.default(source, other));
});
const codeQLBundleName = getCodeQLBundleName(); const codeQLBundleName = getCodeQLBundleName();
for (const downloadSource of uniqueDownloadSources) { for (const downloadSource of uniqueDownloadSources) {
const [apiURL, repository] = downloadSource; const [apiURL, repository] = downloadSource;

File diff suppressed because one or more lines are too long

18
lib/config-utils.js generated
View File

@@ -128,7 +128,7 @@ async function addLocalQueries(codeQL, resultMap, localQueryPath, checkoutPath,
/** /**
* Retrieve the set of queries at the referenced remote repo and add them to resultMap. * Retrieve the set of queries at the referenced remote repo and add them to resultMap.
*/ */
async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile) { async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile) {
let tok = queryUses.split("@"); let tok = queryUses.split("@");
if (tok.length !== 2) { if (tok.length !== 2) {
throw new Error(getQueryUsesInvalid(configFile, queryUses)); throw new Error(getQueryUsesInvalid(configFile, queryUses));
@@ -147,7 +147,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl
} }
const nwo = `${tok[0]}/${tok[1]}`; const nwo = `${tok[0]}/${tok[1]}`;
// Checkout the external repository // Checkout the external repository
const checkoutPath = await externalQueries.checkoutExternalRepository(nwo, ref, githubUrl, tempDir, logger); const checkoutPath = await externalQueries.checkoutExternalRepository(nwo, ref, apiDetails, tempDir, logger);
const queryPath = tok.length > 2 const queryPath = tok.length > 2
? path.join(checkoutPath, tok.slice(2).join("/")) ? path.join(checkoutPath, tok.slice(2).join("/"))
: checkoutPath; : checkoutPath;
@@ -161,7 +161,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl
* local paths starting with './', or references to remote repos, or * local paths starting with './', or references to remote repos, or
* a finite set of hardcoded terms for builtin suites. * a finite set of hardcoded terms for builtin suites.
*/ */
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, checkoutPath, githubUrl, logger, configFile) { async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, checkoutPath, apiDetails, logger, configFile) {
queryUses = queryUses.trim(); queryUses = queryUses.trim();
if (queryUses === "") { if (queryUses === "") {
throw new Error(getQueryUsesInvalid(configFile)); throw new Error(getQueryUsesInvalid(configFile));
@@ -177,7 +177,7 @@ async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir,
return; return;
} }
// Otherwise, must be a reference to another repo // Otherwise, must be a reference to another repo
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile); await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
} }
// Regex validating stars in paths or paths-ignore entries. // Regex validating stars in paths or paths-ignore entries.
// The intention is to only allow ** to appear when immediately // The intention is to only allow ** to appear when immediately
@@ -367,12 +367,12 @@ async function getLanguages(languagesInput, repository, apiDetails, logger) {
} }
return parsedLanguages; return parsedLanguages;
} }
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, checkoutPath, githubUrl, logger) { async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, checkoutPath, apiDetails, logger) {
queriesInput = queriesInput.trim(); queriesInput = queriesInput.trim();
// "+" means "don't override config file" - see shouldAddConfigFileQueries // "+" means "don't override config file" - see shouldAddConfigFileQueries
queriesInput = queriesInput.replace(/^\+/, ""); queriesInput = queriesInput.replace(/^\+/, "");
for (const query of queriesInput.split(",")) { for (const query of queriesInput.split(",")) {
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, githubUrl, logger); await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, apiDetails, logger);
} }
} }
// Returns true if either no queries were provided in the workflow. // Returns true if either no queries were provided in the workflow.
@@ -393,7 +393,7 @@ async function getDefaultConfig(languagesInput, queriesInput, repository, tempDi
const queries = {}; const queries = {};
await addDefaultQueries(codeQL, languages, queries); await addDefaultQueries(codeQL, languages, queries);
if (queriesInput) { if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails.url, logger); await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails, logger);
} }
return { return {
languages, languages,
@@ -450,7 +450,7 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
// unless they're prefixed with "+", in which case they supplement those // unless they're prefixed with "+", in which case they supplement those
// in the config file. // in the config file.
if (queriesInput) { if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails.url, logger); await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails, logger);
} }
if (shouldAddConfigFileQueries(queriesInput) && if (shouldAddConfigFileQueries(queriesInput) &&
QUERIES_PROPERTY in parsedYAML) { QUERIES_PROPERTY in parsedYAML) {
@@ -462,7 +462,7 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
typeof query[QUERIES_USES_PROPERTY] !== "string") { typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueryUsesInvalid(configFile)); throw new Error(getQueryUsesInvalid(configFile));
} }
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, checkoutPath, apiDetails.url, logger, configFile); await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, checkoutPath, apiDetails, logger, configFile);
} }
} }
if (PATHS_IGNORE_PROPERTY in parsedYAML) { if (PATHS_IGNORE_PROPERTY in parsedYAML) {

File diff suppressed because one or more lines are too long

View File

@@ -25,6 +25,7 @@ const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default); testing_utils_1.setupTests(ava_1.default);
const sampleApiDetails = { const sampleApiDetails = {
auth: "token", auth: "token",
externalRepoAuth: "token",
url: "https://github.example.com", url: "https://github.example.com",
}; };
const gitHubVersion = { type: "dotcom" }; const gitHubVersion = { type: "dotcom" };
@@ -427,7 +428,7 @@ ava_1.default("Invalid queries in workflow file handled correctly", async (t) =>
// This function just needs to be type-correct; it doesn't need to do anything, // This function just needs to be type-correct; it doesn't need to do anything,
// since we're deliberately passing in invalid data // since we're deliberately passing in invalid data
const codeQL = codeql_1.setCodeQL({ const codeQL = codeql_1.setCodeQL({
async resolveQueries(_queries, _extraSearchPath) { async resolveQueries() {
return { return {
byLanguage: { byLanguage: {
javascript: {}, javascript: {},

File diff suppressed because one or more lines are too long

View File

@@ -14,7 +14,7 @@ const safeWhich = __importStar(require("@chrisgavin/safe-which"));
/** /**
* Check out repository at the given ref, and return the directory of the checkout. * Check out repository at the given ref, and return the directory of the checkout.
*/ */
async function checkoutExternalRepository(repository, ref, githubUrl, tempDir, logger) { async function checkoutExternalRepository(repository, ref, apiDetails, tempDir, logger) {
logger.info(`Checking out ${repository}`); logger.info(`Checking out ${repository}`);
const checkoutLocation = path.join(tempDir, repository, ref); const checkoutLocation = path.join(tempDir, repository, ref);
if (!checkoutLocation.startsWith(tempDir)) { if (!checkoutLocation.startsWith(tempDir)) {
@@ -22,10 +22,10 @@ async function checkoutExternalRepository(repository, ref, githubUrl, tempDir, l
throw new Error(`'${repository}@${ref}' is not a valid repository and reference.`); throw new Error(`'${repository}@${ref}' is not a valid repository and reference.`);
} }
if (!fs.existsSync(checkoutLocation)) { if (!fs.existsSync(checkoutLocation)) {
const repoURL = `${githubUrl}/${repository}`; const repoCloneURL = buildCheckoutURL(repository, apiDetails);
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [ await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
"clone", "clone",
repoURL, repoCloneURL,
checkoutLocation, checkoutLocation,
]).exec(); ]).exec();
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [ await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
@@ -38,4 +38,17 @@ async function checkoutExternalRepository(repository, ref, githubUrl, tempDir, l
return checkoutLocation; return checkoutLocation;
} }
exports.checkoutExternalRepository = checkoutExternalRepository; exports.checkoutExternalRepository = checkoutExternalRepository;
function buildCheckoutURL(repository, apiDetails) {
const repoCloneURL = new URL(apiDetails.url);
if (apiDetails.externalRepoAuth !== undefined) {
repoCloneURL.username = "x-access-token";
repoCloneURL.password = apiDetails.externalRepoAuth;
}
if (!repoCloneURL.pathname.endsWith("/")) {
repoCloneURL.pathname += "/";
}
repoCloneURL.pathname += `${repository}`;
return repoCloneURL.toString();
}
exports.buildCheckoutURL = buildCheckoutURL;
//# sourceMappingURL=external-queries.js.map //# sourceMappingURL=external-queries.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAIpD;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,SAAiB,EACjB,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,GAAG,SAAS,IAAI,UAAU,EAAE,CAAC;QAC7C,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,OAAO;YACP,OAAO;YACP,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC"} {"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAKpD;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,UAAwC,EACxC,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,YAAY,GAAG,gBAAgB,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAC9D,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,OAAO;YACP,YAAY;YACZ,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC;AAED,SAAgB,gBAAgB,CAC9B,UAAkB,EAClB,UAAwC;IAExC,MAAM,YAAY,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC7C,IAAI,UAAU,CAAC,gBAAgB,KAAK,SAAS,EAAE;QAC7C,YAAY,CAAC,QAAQ,GAAG,gBAAgB,CAAC;QACzC,YAAY,CAAC,QAAQ,GAAG,UAAU,CAAC,gBAAgB,CAAC;KACrD;IACD,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;QACxC,YAAY,CAAC,QAAQ,IAAI,GAAG,CAAC;KAC9B;IACD,YAAY,CAAC,QAAQ,IAAI,GAAG,UAAU,EAAE,CAAC;IACzC,OAAO,YAAY,CAAC,QAAQ,EAAE,CAAC;AACjC,CAAC;AAdD,4CAcC"}

View File

@@ -81,17 +81,35 @@ ava_1.default("checkoutExternalQueries", async (t) => {
const commit2Sha = await runGit(["rev-parse", "HEAD"]); const commit2Sha = await runGit(["rev-parse", "HEAD"]);
// Checkout the first commit, which should contain 'a' and 'b' // Checkout the first commit, which should contain 'a' and 'b'
t.false(fs.existsSync(path.join(tmpDir, repoName))); t.false(fs.existsSync(path.join(tmpDir, repoName)));
await externalQueries.checkoutExternalRepository(repoName, commit1Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true)); await externalQueries.checkoutExternalRepository(repoName, commit1Sha, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" }, tmpDir, logging_1.getRunnerLogger(true));
t.true(fs.existsSync(path.join(tmpDir, repoName))); t.true(fs.existsSync(path.join(tmpDir, repoName)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha))); t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "a"))); t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "a")));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "b"))); t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "b")));
// Checkout the second commit as well, which should only contain 'a' // Checkout the second commit as well, which should only contain 'a'
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha))); t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
await externalQueries.checkoutExternalRepository(repoName, commit2Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true)); await externalQueries.checkoutExternalRepository(repoName, commit2Sha, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" }, tmpDir, logging_1.getRunnerLogger(true));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha))); t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "a"))); t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "a")));
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b"))); t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
}); });
}); });
ava_1.default("buildCheckoutURL", (t) => {
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
url: "https://github.com",
externalRepoAuth: undefined,
}), "https://github.com/foo/bar");
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
url: "https://github.example.com/",
externalRepoAuth: undefined,
}), "https://github.example.com/foo/bar");
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
url: "https://github.com",
externalRepoAuth: "abc",
}), "https://x-access-token:abc@github.com/foo/bar");
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
url: "https://github.example.com/",
externalRepoAuth: "abc",
}), "https://x-access-token:abc@github.example.com/foo/bar");
});
//# sourceMappingURL=external-queries.test.js.map //# sourceMappingURL=external-queries.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AACpD,8CAAuB;AAEvB,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAChC,OAAO,EACP;oBACE,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CACF,CAAC,IAAI,EAAE,CAAC;aACV;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,gBAAgB,EAAE,OAAO,CAAC,CAAC,CAAC;QAEpD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"} {"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AACpD,8CAAuB;AAEvB,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAChC,OAAO,EACP;oBACE,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CACF,CAAC,IAAI,EAAE,CAAC;aACV;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,gBAAgB,EAAE,OAAO,CAAC,CAAC,CAAC;QAEpD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,EAAE;IAC7B,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,4BAA4B,CAC7B,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,oCAAoC,CACrC,CAAC;IAEF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,+CAA+C,CAChD,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,uDAAuD,CACxD,CAAC;AACJ,CAAC,CAAC,CAAC"}

12
lib/init-action.js generated
View File

@@ -56,6 +56,7 @@ async function run() {
let toolsVersion; let toolsVersion;
const apiDetails = { const apiDetails = {
auth: actionsUtil.getRequiredInput("token"), auth: actionsUtil.getRequiredInput("token"),
externalRepoAuth: actionsUtil.getOptionalInput("external-repository-token"),
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"), url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
}; };
const gitHubVersion = await util_1.getGitHubVersion(apiDetails); const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
@@ -64,15 +65,8 @@ async function run() {
} }
try { try {
actionsUtil.prepareLocalRunEnvironment(); actionsUtil.prepareLocalRunEnvironment();
const workflowErrors = await actionsUtil.getWorkflowErrors(); const workflowErrors = await actionsUtil.validateWorkflow();
// we do not want to worry users if linting is failing if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "starting", startedAt, workflowErrors)))) {
// but we do want to send a status report containing this error code
// below
const userWorkflowErrors = workflowErrors.filter((o) => o.code !== "LintFailed");
if (userWorkflowErrors.length > 0) {
core.warning(actionsUtil.formatWorkflowErrors(userWorkflowErrors));
}
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "starting", startedAt, actionsUtil.formatWorkflowCause(workflowErrors))))) {
return; return;
} }
const initCodeQLResult = await init_1.initCodeQL(actionsUtil.getOptionalInput("tools"), apiDetails, actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), "actions", logger); const initCodeQLResult = await init_1.initCodeQL(actionsUtil.getOptionalInput("tools"), apiDetails, actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), "actions", logger);

File diff suppressed because one or more lines are too long

View File

@@ -1 +1 @@
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,QAAgB,EAChB,IAAe,EACf,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,oBAAW,CAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,QAAQ,EACR,IAAI,EACJ,MAAM,CACP,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AApBD,gCAoBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,aAAiC,EACjC,UAA4B,EAC5B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9BD,gCA8BC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE9E,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnBD,0BAmBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,2CAA2C;IAC3C,IAAI,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,SAAS,EAAE;QACxC,IAAI;YACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC,CAAC,CAChD,CAAC,IAAI,EAAE,CAAC;aACV;iBAAM;gBACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;aACV;SACF;QAAC,OAAO,CAAC,EAAE;YACV,mGAAmG;YACnG,uDAAuD;YACvD,MAAM,CAAC,QAAQ,EAAE,CAAC;YAClB,MAAM,CAAC,OAAO,CACZ,mLAAmL,CACpL,CAAC;YACF,OAAO;SACR;KACF;IAED,uBAAuB;IACvB,IAAI;QACF,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,+IAA+I,CAChJ,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAnDD,8CAmDC"} {"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,QAAgB,EAChB,IAAe,EACf,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,oBAAW,CAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,QAAQ,EACR,IAAI,EACJ,MAAM,CACP,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AApBD,gCAoBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,aAAiC,EACjC,UAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9BD,gCA8BC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE9E,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnBD,0BAmBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,2CAA2C;IAC3C,IAAI,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,SAAS,EAAE;QACxC,IAAI;YACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC,CAAC,CAChD,CAAC,IAAI,EAAE,CAAC;aACV;iBAAM;gBACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;aACV;SACF;QAAC,OAAO,CAAC,EAAE;YACV,mGAAmG;YACnG,uDAAuD;YACvD,MAAM,CAAC,QAAQ,EAAE,CAAC;YAClB,MAAM,CAAC,OAAO,CACZ,mLAAmL,CACpL,CAAC;YACF,OAAO;SACR;KACF;IAED,uBAAuB;IACvB,IAAI;QACF,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,+IAA+I,CAChJ,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAnDD,8CAmDC"}

11
lib/runner.js generated
View File

@@ -83,6 +83,7 @@ program
.requiredOption("--repository <repository>", "Repository name. (Required)") .requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)") .requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)") .requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--external-repository-token <token>", "A token for fetching external config files and queries if they reside in a private repository.")
.option("--languages <languages>", "Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.") .option("--languages <languages>", "Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.")
.option("--queries <queries>", "Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.") .option("--queries <queries>", "Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.")
.option("--config-file <file>", "Path to config file.") .option("--config-file <file>", "Path to config file.")
@@ -105,6 +106,7 @@ program
fs.mkdirSync(tempDir, { recursive: true }); fs.mkdirSync(tempDir, { recursive: true });
const apiDetails = { const apiDetails = {
auth: cmd.githubAuth, auth: cmd.githubAuth,
externalRepoAuth: cmd.externalRepositoryToken,
url: util_1.parseGithubUrl(cmd.githubUrl), url: util_1.parseGithubUrl(cmd.githubUrl),
}; };
const gitHubVersion = await util_1.getGitHubVersion(apiDetails); const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
@@ -231,7 +233,12 @@ program
auth: cmd.githubAuth, auth: cmd.githubAuth,
url: util_1.parseGithubUrl(cmd.githubUrl), url: util_1.parseGithubUrl(cmd.githubUrl),
}; };
await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, apiDetails, cmd.upload, "runner", outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), util_1.getThreadsFlag(cmd.threads, logger), config, logger); await analyze_1.runAnalyze(outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), util_1.getThreadsFlag(cmd.threads, logger), config, logger);
if (!cmd.upload) {
logger.info("Not uploading results");
return;
}
await upload_lib.uploadFromRunner(outputDir, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.checkoutPath || process.cwd(), config.gitHubVersion, apiDetails, logger);
} }
catch (e) { catch (e) {
logger.error("Analyze failed"); logger.error("Analyze failed");
@@ -258,7 +265,7 @@ program
}; };
try { try {
const gitHubVersion = await util_1.getGitHubVersion(apiDetails); const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
await upload_lib.upload(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, gitHubVersion, apiDetails, "runner", logger); await upload_lib.uploadFromRunner(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.checkoutPath || process.cwd(), gitHubVersion, apiDetails, logger);
} }
catch (e) { catch (e) {
logger.error("Upload failed"); logger.error("Upload failed");

File diff suppressed because one or more lines are too long

6
lib/tracer-config.js generated
View File

@@ -141,6 +141,12 @@ async function getCombinedTracerConfig(config, codeql) {
else if (process.platform !== "win32") { else if (process.platform !== "win32") {
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so"); mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
} }
// On macos it's necessary to prefix the build command with the runner exectuable
// on order to trace when System Integrity Protection is enabled.
// The exectuable also exists and works for other platforms so we output this env
// var with a path to the runner regardless so it's always available.
const runnerExeName = process.platform === "win32" ? "runner.exe" : "runner";
mainTracerConfig.env["CODEQL_RUNNER"] = path.join(mainTracerConfig.env["CODEQL_DIST"], "tools", mainTracerConfig.env["CODEQL_PLATFORM"], runnerExeName);
return mainTracerConfig; return mainTracerConfig;
} }
exports.getCombinedTracerConfig = getCombinedTracerConfig; exports.getCombinedTracerConfig = getCombinedTracerConfig;

File diff suppressed because one or more lines are too long

View File

@@ -238,6 +238,7 @@ ava_1.default("getCombinedTracerConfig - return undefined when no languages are
async getTracerEnv() { async getTracerEnv() {
return { return {
ODASA_TRACER_CONFIGURATION: "abc", ODASA_TRACER_CONFIGURATION: "abc",
CODEQL_DIST: "/",
foo: "bar", foo: "bar",
}; };
}, },
@@ -250,17 +251,28 @@ ava_1.default("getCombinedTracerConfig - valid spec file", async (t) => {
const config = getTestConfig(tmpDir); const config = getTestConfig(tmpDir);
const spec = path.join(tmpDir, "spec"); const spec = path.join(tmpDir, "spec");
fs.writeFileSync(spec, "foo.log\n2\nabc\ndef"); fs.writeFileSync(spec, "foo.log\n2\nabc\ndef");
const bundlePath = path.join(tmpDir, "bundle");
const codeqlPlatform = process.platform === "win32"
? "win64"
: process.platform === "darwin"
? "osx64"
: "linux64";
const codeQL = codeql_1.setCodeQL({ const codeQL = codeql_1.setCodeQL({
async getTracerEnv() { async getTracerEnv() {
return { return {
ODASA_TRACER_CONFIGURATION: spec, ODASA_TRACER_CONFIGURATION: spec,
CODEQL_DIST: bundlePath,
CODEQL_PLATFORM: codeqlPlatform,
foo: "bar", foo: "bar",
}; };
}, },
}); });
const result = await tracer_config_1.getCombinedTracerConfig(config, codeQL); const result = await tracer_config_1.getCombinedTracerConfig(config, codeQL);
t.notDeepEqual(result, undefined);
const expectedEnv = { const expectedEnv = {
foo: "bar", foo: "bar",
CODEQL_DIST: bundlePath,
CODEQL_PLATFORM: codeqlPlatform,
ODASA_TRACER_CONFIGURATION: result.spec, ODASA_TRACER_CONFIGURATION: result.spec,
}; };
if (process.platform === "darwin") { if (process.platform === "darwin") {
@@ -269,6 +281,15 @@ ava_1.default("getCombinedTracerConfig - valid spec file", async (t) => {
else if (process.platform !== "win32") { else if (process.platform !== "win32") {
expectedEnv["LD_PRELOAD"] = path.join(path.dirname(codeQL.getPath()), "tools", "linux64", "${LIB}trace.so"); expectedEnv["LD_PRELOAD"] = path.join(path.dirname(codeQL.getPath()), "tools", "linux64", "${LIB}trace.so");
} }
if (process.platform === "win32") {
expectedEnv["CODEQL_RUNNER"] = path.join(bundlePath, "tools/win64/runner.exe");
}
else if (process.platform === "darwin") {
expectedEnv["CODEQL_RUNNER"] = path.join(bundlePath, "tools/osx64/runner");
}
else {
expectedEnv["CODEQL_RUNNER"] = path.join(bundlePath, "tools/linux64/runner");
}
t.deepEqual(result, { t.deepEqual(result, {
spec: path.join(tmpDir, "compound-spec"), spec: path.join(tmpDir, "compound-spec"),
env: expectedEnv, env: expectedEnv,

File diff suppressed because one or more lines are too long

49
lib/upload-lib.js generated
View File

@@ -63,32 +63,55 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, mode, logger) {
logger.debug(`response status: ${response.status}`); logger.debug(`response status: ${response.status}`);
logger.info("Successfully uploaded results"); logger.info("Successfully uploaded results");
} }
// Recursively walks a directory and returns all SARIF files it finds.
// Does not follow symlinks.
function findSarifFilesInDir(sarifPath) {
const sarifFiles = [];
const walkSarifFiles = (dir) => {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && entry.name.endsWith(".sarif")) {
sarifFiles.push(path.resolve(dir, entry.name));
}
else if (entry.isDirectory()) {
walkSarifFiles(path.resolve(dir, entry.name));
}
}
};
walkSarifFiles(sarifPath);
return sarifFiles;
}
exports.findSarifFilesInDir = findSarifFilesInDir;
// Uploads a single sarif file or a directory of sarif files // Uploads a single sarif file or a directory of sarif files
// depending on what the path happens to refer to. // depending on what the path happens to refer to.
// Returns true iff the upload occurred and succeeded // Returns true iff the upload occurred and succeeded
async function upload(sarifPath, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, gitHubVersion, apiDetails, mode, logger) { async function uploadFromActions(sarifPath, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, gitHubVersion, apiDetails, logger) {
const sarifFiles = []; return await uploadFiles(getSarifFilePaths(sarifPath), repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, gitHubVersion, apiDetails, "actions", logger);
}
exports.uploadFromActions = uploadFromActions;
// Uploads a single sarif file or a directory of sarif files
// depending on what the path happens to refer to.
// Returns true iff the upload occurred and succeeded
async function uploadFromRunner(sarifPath, repositoryNwo, commitOid, ref, checkoutPath, gitHubVersion, apiDetails, logger) {
return await uploadFiles(getSarifFilePaths(sarifPath), repositoryNwo, commitOid, ref, undefined, undefined, undefined, checkoutPath, undefined, gitHubVersion, apiDetails, "runner", logger);
}
exports.uploadFromRunner = uploadFromRunner;
function getSarifFilePaths(sarifPath) {
if (!fs.existsSync(sarifPath)) { if (!fs.existsSync(sarifPath)) {
throw new Error(`Path does not exist: ${sarifPath}`); throw new Error(`Path does not exist: ${sarifPath}`);
} }
let sarifFiles;
if (fs.lstatSync(sarifPath).isDirectory()) { if (fs.lstatSync(sarifPath).isDirectory()) {
const paths = fs sarifFiles = findSarifFilesInDir(sarifPath);
.readdirSync(sarifPath)
.filter((f) => f.endsWith(".sarif"))
.map((f) => path.resolve(sarifPath, f));
for (const filepath of paths) {
sarifFiles.push(filepath);
}
if (sarifFiles.length === 0) { if (sarifFiles.length === 0) {
throw new Error(`No SARIF files found to upload in "${sarifPath}".`); throw new Error(`No SARIF files found to upload in "${sarifPath}".`);
} }
} }
else { else {
sarifFiles.push(sarifPath); sarifFiles = [sarifPath];
} }
return await uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, gitHubVersion, apiDetails, mode, logger); return sarifFiles;
} }
exports.upload = upload;
// Counts the number of results in the given SARIF file // Counts the number of results in the given SARIF file
function countResultsInSarif(sarif) { function countResultsInSarif(sarif) {
let numResults = 0; let numResults = 0;
@@ -142,7 +165,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
if (process.env.GITHUB_EVENT_NAME === "pull_request" && if (process.env.GITHUB_EVENT_NAME === "pull_request" &&
process.env.GITHUB_EVENT_PATH) { process.env.GITHUB_EVENT_PATH) {
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")); const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
payloadObj.base_ref = `refs/heads/$githubEvent.pull_request.base.ref`; payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha; payloadObj.base_sha = githubEvent.pull_request.base.sha;
} }
} }

File diff suppressed because one or more lines are too long

38
lib/upload-lib.test.js generated
View File

@@ -1,7 +1,4 @@
"use strict"; "use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) { var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod; if (mod && mod.__esModule) return mod;
var result = {}; var result = {};
@@ -9,11 +6,17 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod; result["default"] = mod;
return result; return result;
}; };
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const ava_1 = __importDefault(require("ava")); const ava_1 = __importDefault(require("ava"));
const logging_1 = require("./logging"); const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils"); const testing_utils_1 = require("./testing-utils");
const uploadLib = __importStar(require("./upload-lib")); const uploadLib = __importStar(require("./upload-lib"));
const util_1 = require("./util");
testing_utils_1.setupTests(ava_1.default); testing_utils_1.setupTests(ava_1.default);
ava_1.default("validateSarifFileSchema - valid", (t) => { ava_1.default("validateSarifFileSchema - valid", (t) => {
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`; const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
@@ -44,8 +47,8 @@ ava_1.default("validate correct payload used per version", async (t) => {
process.env["GITHUB_EVENT_PATH"] = `${__dirname}/../src/testdata/pull_request.json`; process.env["GITHUB_EVENT_PATH"] = `${__dirname}/../src/testdata/pull_request.json`;
for (const version of newVersions) { for (const version of newVersions) {
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "actions"); const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "actions");
t.truthy(payload.base_ref); t.deepEqual(payload.base_ref, "refs/heads/master");
t.truthy(payload.base_sha); t.deepEqual(payload.base_sha, "f95f852bd8fca8fcc58a9a2d6c842781e32a215e");
} }
for (const version of oldVersions) { for (const version of oldVersions) {
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "actions"); const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "actions");
@@ -54,4 +57,29 @@ ava_1.default("validate correct payload used per version", async (t) => {
t.falsy(payload.base_sha); t.falsy(payload.base_sha);
} }
}); });
ava_1.default("finding SARIF files", async (t) => {
await util_1.withTmpDir(async (tmpDir) => {
// include a couple of sarif files
fs.writeFileSync(path.join(tmpDir, "a.sarif"), "");
fs.writeFileSync(path.join(tmpDir, "b.sarif"), "");
// other random files shouldn't be returned
fs.writeFileSync(path.join(tmpDir, "c.foo"), "");
// we should recursively look in subdirectories
fs.mkdirSync(path.join(tmpDir, "dir1"));
fs.writeFileSync(path.join(tmpDir, "dir1", "d.sarif"), "");
fs.mkdirSync(path.join(tmpDir, "dir1", "dir2"));
fs.writeFileSync(path.join(tmpDir, "dir1", "dir2", "e.sarif"), "");
// we should ignore symlinks
fs.mkdirSync(path.join(tmpDir, "dir3"));
fs.symlinkSync(tmpDir, path.join(tmpDir, "dir3", "symlink1"), "dir");
fs.symlinkSync(path.join(tmpDir, "a.sarif"), path.join(tmpDir, "dir3", "symlink2.sarif"), "file");
const sarifFiles = uploadLib.findSarifFilesInDir(tmpDir);
t.deepEqual(sarifFiles, [
path.join(tmpDir, "a.sarif"),
path.join(tmpDir, "b.sarif"),
path.join(tmpDir, "dir1", "d.sarif"),
path.join(tmpDir, "dir1", "dir2", "e.sarif"),
]);
});
});
//# sourceMappingURL=upload-lib.test.js.map //# sourceMappingURL=upload-lib.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,uCAA4C;AAC5C,mDAA6C;AAC7C,wDAA0C;AAG1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,MAAM,SAAS,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CACf,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC9C,MAAM,SAAS,GAAG,GAAG,SAAS,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CACZ,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,2CAA2C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC5D,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,QAAQ,EAAE;QAClB,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE;KACnC,CAAC;IACF,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE;QACnC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE;KACnC,CAAC;IACF,MAAM,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;IAEpD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,MAAM,CAAC;IAC1C,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,mBAAmB,EACnB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,kCAAkC;QAClC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;IAED,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,cAAc,CAAC;IAClD,OAAO,CAAC,GAAG,CACT,mBAAmB,CACpB,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACrD,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC3B,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC5B;IAED,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,iDAAiD;QACjD,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;AACH,CAAC,CAAC,CAAC"} {"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,8CAAuB;AAEvB,uCAA4C;AAC5C,mDAA6C;AAC7C,wDAA0C;AAC1C,iCAAmD;AAEnD,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,MAAM,SAAS,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CACf,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC9C,MAAM,SAAS,GAAG,GAAG,SAAS,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CACZ,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,2CAA2C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC5D,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,QAAQ,EAAE;QAClB,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE;KACnC,CAAC;IACF,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE;QACnC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE;KACnC,CAAC;IACF,MAAM,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;IAEpD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,MAAM,CAAC;IAC1C,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,mBAAmB,EACnB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,kCAAkC;QAClC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;IAED,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,cAAc,CAAC;IAClD,OAAO,CAAC,GAAG,CACT,mBAAmB,CACpB,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACrD,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,QAAQ,EAAE,mBAAmB,CAAC,CAAC;QACnD,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,QAAQ,EAAE,0CAA0C,CAAC,CAAC;KAC3E;IAED,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,iDAAiD;QACjD,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qBAAqB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACtC,MAAM,iBAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,kCAAkC;QAClC,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QACnD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAEnD,2CAA2C;QAC3C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,EAAE,CAAC,CAAC;QAEjD,+CAA+C;QAC/C,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QACxC,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAC3D,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QAChD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAEnE,4BAA4B;QAC5B,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QACxC,EAAE,CAAC,WAAW,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,KAAK,CAAC,CAAC;QACrE,EAAE,CAAC,WAAW,CACZ,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,gBAAgB,CAAC,EAC3C,MAAM,CACP,CAAC;QAEF,MAAM,UAAU,GAAG,SAAS,CAAC,mBAAmB,CAAC,MAAM,CAAC,CAAC;QAEzD,CAAC,CAAC,SAAS,CAAC,UAAU,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;YAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;YAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC;YACpC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC;SAC7C,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

View File

@@ -32,7 +32,7 @@ async function run() {
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"), url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
}; };
const gitHubVersion = await util_1.getGitHubVersion(apiDetails); const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
const uploadStats = await upload_lib.upload(actionsUtil.getRequiredInput("sarif_file"), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), gitHubVersion, apiDetails, "actions", logging_1.getActionsLogger()); const uploadStats = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), gitHubVersion, apiDetails, logging_1.getActionsLogger());
await sendSuccessStatusReport(startedAt, uploadStats); await sendSuccessStatusReport(startedAt, uploadStats);
} }
catch (error) { catch (error) {

View File

@@ -1 +1 @@
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAA0C;AAM1C,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,uBAAgB,CAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,MAAM,WAAW,CAAC,YAAY,EAAE,EAChC,MAAM,WAAW,CAAC,MAAM,EAAE,EAC1B,MAAM,WAAW,CAAC,cAAc,EAAE,EAClC,WAAW,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAClD,WAAW,CAAC,gBAAgB,EAAE,EAC9B,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,aAAa,EACb,UAAU,EACV,SAAS,EACT,0BAAgB,EAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"} {"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAA0C;AAM1C,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,uBAAgB,CAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,MAAM,WAAW,CAAC,YAAY,EAAE,EAChC,MAAM,WAAW,CAAC,MAAM,EAAE,EAC1B,MAAM,WAAW,CAAC,cAAc,EAAE,EAClC,WAAW,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAClD,WAAW,CAAC,gBAAgB,EAAE,EAC9B,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,aAAa,EACb,UAAU,EACV,0BAAgB,EAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

4
lib/util.js generated
View File

@@ -222,10 +222,10 @@ function checkGitHubVersionInRange(version, mode, logger) {
const disallowedAPIVersionReason = apiVersionInRange(version.version, apiCompatibility.minimumVersion, apiCompatibility.maximumVersion); const disallowedAPIVersionReason = apiVersionInRange(version.version, apiCompatibility.minimumVersion, apiCompatibility.maximumVersion);
const toolName = mode === "actions" ? "Action" : "Runner"; const toolName = mode === "actions" ? "Action" : "Runner";
if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_OLD) { if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_OLD) {
logger.warning(`The CodeQL ${toolName} version you are using is too old to be compatible with GitHub Enterprise ${version}. If you experience issues, please upgrade to a more recent version of the CodeQL ${toolName}.`); logger.warning(`The CodeQL ${toolName} version you are using is too old to be compatible with GitHub Enterprise ${version.version}. If you experience issues, please upgrade to a more recent version of the CodeQL ${toolName}.`);
} }
if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_NEW) { if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_NEW) {
logger.warning(`GitHub Enterprise ${version} is too old to be compatible with this version of the CodeQL ${toolName}. If you experience issues, please upgrade to a more recent version of GitHub Enterprise or use an older version of the CodeQL ${toolName}.`); logger.warning(`GitHub Enterprise ${version.version} is too old to be compatible with this version of the CodeQL ${toolName}. If you experience issues, please upgrade to a more recent version of GitHub Enterprise or use an older version of the CodeQL ${toolName}.`);
} }
hasBeenWarnedAboutVersion = true; hasBeenWarnedAboutVersion = true;
if (mode === "actions") { if (mode === "actions") {

File diff suppressed because one or more lines are too long

5555
node_modules/.package-lock.json generated vendored Normal file

File diff suppressed because it is too large Load Diff

86
node_modules/ini/ini.js generated vendored
View File

@@ -15,7 +15,7 @@ function encode (obj, opt) {
if (typeof opt === 'string') { if (typeof opt === 'string') {
opt = { opt = {
section: opt, section: opt,
whitespace: false whitespace: false,
} }
} else { } else {
opt = opt || {} opt = opt || {}
@@ -30,27 +30,25 @@ function encode (obj, opt) {
val.forEach(function (item) { val.forEach(function (item) {
out += safe(k + '[]') + separator + safe(item) + '\n' out += safe(k + '[]') + separator + safe(item) + '\n'
}) })
} else if (val && typeof val === 'object') { } else if (val && typeof val === 'object')
children.push(k) children.push(k)
} else { else
out += safe(k) + separator + safe(val) + eol out += safe(k) + separator + safe(val) + eol
}
}) })
if (opt.section && out.length) { if (opt.section && out.length)
out = '[' + safe(opt.section) + ']' + eol + out out = '[' + safe(opt.section) + ']' + eol + out
}
children.forEach(function (k, _, __) { children.forEach(function (k, _, __) {
var nk = dotSplit(k).join('\\.') var nk = dotSplit(k).join('\\.')
var section = (opt.section ? opt.section + '.' : '') + nk var section = (opt.section ? opt.section + '.' : '') + nk
var child = encode(obj[k], { var child = encode(obj[k], {
section: section, section: section,
whitespace: opt.whitespace whitespace: opt.whitespace,
}) })
if (out.length && child.length) { if (out.length && child.length)
out += eol out += eol
}
out += child out += child
}) })
@@ -62,7 +60,7 @@ function dotSplit (str) {
.replace(/\\\./g, '\u0001') .replace(/\\\./g, '\u0001')
.split(/\./).map(function (part) { .split(/\./).map(function (part) {
return part.replace(/\1/g, '\\.') return part.replace(/\1/g, '\\.')
.replace(/\2LITERAL\\1LITERAL\2/g, '\u0001') .replace(/\2LITERAL\\1LITERAL\2/g, '\u0001')
}) })
} }
@@ -75,15 +73,25 @@ function decode (str) {
var lines = str.split(/[\r\n]+/g) var lines = str.split(/[\r\n]+/g)
lines.forEach(function (line, _, __) { lines.forEach(function (line, _, __) {
if (!line || line.match(/^\s*[;#]/)) return if (!line || line.match(/^\s*[;#]/))
return
var match = line.match(re) var match = line.match(re)
if (!match) return if (!match)
return
if (match[1] !== undefined) { if (match[1] !== undefined) {
section = unsafe(match[1]) section = unsafe(match[1])
if (section === '__proto__') {
// not allowed
// keep parsing the section, but don't attach it.
p = {}
return
}
p = out[section] = out[section] || {} p = out[section] = out[section] || {}
return return
} }
var key = unsafe(match[2]) var key = unsafe(match[2])
if (key === '__proto__')
return
var value = match[3] ? unsafe(match[4]) : true var value = match[3] ? unsafe(match[4]) : true
switch (value) { switch (value) {
case 'true': case 'true':
@@ -94,20 +102,20 @@ function decode (str) {
// Convert keys with '[]' suffix to an array // Convert keys with '[]' suffix to an array
if (key.length > 2 && key.slice(-2) === '[]') { if (key.length > 2 && key.slice(-2) === '[]') {
key = key.substring(0, key.length - 2) key = key.substring(0, key.length - 2)
if (!p[key]) { if (key === '__proto__')
return
if (!p[key])
p[key] = [] p[key] = []
} else if (!Array.isArray(p[key])) { else if (!Array.isArray(p[key]))
p[key] = [p[key]] p[key] = [p[key]]
}
} }
// safeguard against resetting a previously defined // safeguard against resetting a previously defined
// array by accidentally forgetting the brackets // array by accidentally forgetting the brackets
if (Array.isArray(p[key])) { if (Array.isArray(p[key]))
p[key].push(value) p[key].push(value)
} else { else
p[key] = value p[key] = value
}
}) })
// {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}} // {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}}
@@ -115,9 +123,9 @@ function decode (str) {
Object.keys(out).filter(function (k, _, __) { Object.keys(out).filter(function (k, _, __) {
if (!out[k] || if (!out[k] ||
typeof out[k] !== 'object' || typeof out[k] !== 'object' ||
Array.isArray(out[k])) { Array.isArray(out[k]))
return false return false
}
// see if the parent section is also an object. // see if the parent section is also an object.
// if so, add it to that, and mark this one for deletion // if so, add it to that, and mark this one for deletion
var parts = dotSplit(k) var parts = dotSplit(k)
@@ -125,12 +133,15 @@ function decode (str) {
var l = parts.pop() var l = parts.pop()
var nl = l.replace(/\\\./g, '.') var nl = l.replace(/\\\./g, '.')
parts.forEach(function (part, _, __) { parts.forEach(function (part, _, __) {
if (!p[part] || typeof p[part] !== 'object') p[part] = {} if (part === '__proto__')
return
if (!p[part] || typeof p[part] !== 'object')
p[part] = {}
p = p[part] p = p[part]
}) })
if (p === out && nl === l) { if (p === out && nl === l)
return false return false
}
p[nl] = out[k] p[nl] = out[k]
return true return true
}).forEach(function (del, _, __) { }).forEach(function (del, _, __) {
@@ -152,18 +163,20 @@ function safe (val) {
(val.length > 1 && (val.length > 1 &&
isQuoted(val)) || isQuoted(val)) ||
val !== val.trim()) val !== val.trim())
? JSON.stringify(val) ? JSON.stringify(val)
: val.replace(/;/g, '\\;').replace(/#/g, '\\#') : val.replace(/;/g, '\\;').replace(/#/g, '\\#')
} }
function unsafe (val, doUnesc) { function unsafe (val, doUnesc) {
val = (val || '').trim() val = (val || '').trim()
if (isQuoted(val)) { if (isQuoted(val)) {
// remove the single quotes before calling JSON.parse // remove the single quotes before calling JSON.parse
if (val.charAt(0) === "'") { if (val.charAt(0) === "'")
val = val.substr(1, val.length - 2) val = val.substr(1, val.length - 2)
}
try { val = JSON.parse(val) } catch (_) {} try {
val = JSON.parse(val)
} catch (_) {}
} else { } else {
// walk the val to find the first not-escaped ; character // walk the val to find the first not-escaped ; character
var esc = false var esc = false
@@ -171,23 +184,22 @@ function unsafe (val, doUnesc) {
for (var i = 0, l = val.length; i < l; i++) { for (var i = 0, l = val.length; i < l; i++) {
var c = val.charAt(i) var c = val.charAt(i)
if (esc) { if (esc) {
if ('\\;#'.indexOf(c) !== -1) { if ('\\;#'.indexOf(c) !== -1)
unesc += c unesc += c
} else { else
unesc += '\\' + c unesc += '\\' + c
}
esc = false esc = false
} else if (';#'.indexOf(c) !== -1) { } else if (';#'.indexOf(c) !== -1)
break break
} else if (c === '\\') { else if (c === '\\')
esc = true esc = true
} else { else
unesc += c unesc += c
}
} }
if (esc) { if (esc)
unesc += '\\' unesc += '\\'
}
return unesc.trim() return unesc.trim()
} }
return val return val

23
node_modules/ini/package.json generated vendored
View File

@@ -2,26 +2,29 @@
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)", "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
"name": "ini", "name": "ini",
"description": "An ini encoder/decoder for node", "description": "An ini encoder/decoder for node",
"version": "1.3.5", "version": "1.3.8",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git://github.com/isaacs/ini.git" "url": "git://github.com/isaacs/ini.git"
}, },
"main": "ini.js", "main": "ini.js",
"scripts": { "scripts": {
"pretest": "standard ini.js", "eslint": "eslint",
"test": "tap test/*.js --100 -J", "lint": "npm run eslint -- ini.js test/*.js",
"lintfix": "npm run lint -- --fix",
"test": "tap",
"posttest": "npm run lint",
"preversion": "npm test", "preversion": "npm test",
"postversion": "npm publish", "postversion": "npm publish",
"postpublish": "git push origin --all; git push origin --tags" "prepublishOnly": "git push origin --follow-tags"
}, },
"engines": {
"node": "*"
},
"dependencies": {},
"devDependencies": { "devDependencies": {
"standard": "^10.0.3", "eslint": "^7.9.0",
"tap": "^10.7.3 || 11" "eslint-plugin-import": "^2.22.0",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^4.2.1",
"eslint-plugin-standard": "^4.0.1",
"tap": "14"
}, },
"license": "ISC", "license": "ISC",
"files": [ "files": [

9
package-lock.json generated
View File

@@ -2077,8 +2077,7 @@
"fast-deep-equal": { "fast-deep-equal": {
"version": "3.1.3", "version": "3.1.3",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
"dev": true
}, },
"fast-diff": { "fast-diff": {
"version": "1.2.0", "version": "1.2.0",
@@ -2435,9 +2434,9 @@
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4="
}, },
"ini": { "ini": {
"version": "1.3.5", "version": "1.3.8",
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
"integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==", "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==",
"dev": true "dev": true
}, },
"irregular-plurals": { "irregular-plurals": {

View File

@@ -29,6 +29,7 @@
"@octokit/types": "^5.5.0", "@octokit/types": "^5.5.0",
"commander": "^6.0.0", "commander": "^6.0.0",
"console-log-level": "^1.4.1", "console-log-level": "^1.4.1",
"fast-deep-equal": "^3.1.3",
"file-url": "^3.0.0", "file-url": "^3.0.0",
"fs": "0.0.1-security", "fs": "0.0.1-security",
"js-yaml": "^3.13.1", "js-yaml": "^3.13.1",

View File

@@ -31,8 +31,7 @@ python3 -m pip install --user pipenv
if command -v python2 &> /dev/null; then if command -v python2 &> /dev/null; then
# Setup Python 2 dependency installation tools. # Setup Python 2 dependency installation tools.
# The Ubuntu 20.04 GHA environment does not come with a Python 2 pip # The Ubuntu 20.04 GHA environment does not come with a Python 2 pip
curl https://bootstrap.pypa.io/get-pip.py --output get-pip.py curl --location --fail https://bootstrap.pypa.io/get-pip.py | python2
python2 get-pip.py
python2 -m pip install --user --upgrade pip setuptools wheel python2 -m pip install --user --upgrade pip setuptools wheel

View File

@@ -2072,9 +2072,9 @@
"dev": true "dev": true
}, },
"ini": { "ini": {
"version": "1.3.5", "version": "1.3.8",
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
"integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==", "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==",
"dev": true "dev": true
}, },
"interpret": { "interpret": {

View File

@@ -1,9 +1,17 @@
import test from "ava"; import test from "ava";
import * as yaml from "js-yaml";
import sinon from "sinon"; import sinon from "sinon";
import * as actionsutil from "./actions-util"; import * as actionsutil from "./actions-util";
import { setupTests } from "./testing-utils"; import { setupTests } from "./testing-utils";
function errorCodes(
actual: actionsutil.CodedError[],
expected: actionsutil.CodedError[]
): [string[], string[]] {
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
}
setupTests(test); setupTests(test);
test("getRef() throws on the empty string", async (t) => { test("getRef() throws on the empty string", async (t) => {
@@ -82,265 +90,302 @@ test("prepareEnvironment() when a local run", (t) => {
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB"); t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
}); });
test("validateWorkflow() when on is missing", (t) => { test("getWorkflowErrors() when on is empty", (t) => {
const errors = actionsutil.validateWorkflow({}); const errors = actionsutil.getWorkflowErrors({ on: {} });
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingHooks]); t.deepEqual(...errorCodes(errors, []));
}); });
test("validateWorkflow() when on.push is missing", (t) => { test("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
const errors = actionsutil.validateWorkflow({ on: {} }); const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
console.log(errors); t.deepEqual(...errorCodes(errors, []));
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingHooks]);
}); });
test("validateWorkflow() when on.push is an array missing pull_request", (t) => { test("getWorkflowErrors() when on.push is an array missing push", (t) => {
const errors = actionsutil.validateWorkflow({ on: ["push"] }); const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingPullRequestHook]); t.deepEqual(
...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook])
);
}); });
test("validateWorkflow() when on.push is an array missing push", (t) => { test("getWorkflowErrors() when on.push is valid", (t) => {
const errors = actionsutil.validateWorkflow({ on: ["pull_request"] }); const errors = actionsutil.getWorkflowErrors({
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingPushHook]);
});
test("validateWorkflow() when on.push is valid", (t) => {
const errors = actionsutil.validateWorkflow({
on: ["push", "pull_request"], on: ["push", "pull_request"],
}); });
t.deepEqual(errors, []); t.deepEqual(...errorCodes(errors, []));
}); });
test("validateWorkflow() when on.push is a valid superset", (t) => { test("getWorkflowErrors() when on.push is a valid superset", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: ["push", "pull_request", "schedule"], on: ["push", "pull_request", "schedule"],
}); });
t.deepEqual(errors, []); t.deepEqual(...errorCodes(errors, []));
}); });
test("validateWorkflow() when on.push should not have a path", (t) => { test("getWorkflowErrors() when on.push should not have a path", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { on: {
push: { branches: ["main"], paths: ["test/*"] }, push: { branches: ["main"], paths: ["test/*"] },
pull_request: { branches: ["main"] }, pull_request: { branches: ["main"] },
}, },
}); });
t.deepEqual(errors, [actionsutil.WorkflowErrors.PathsSpecified]); t.deepEqual(
...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified])
);
}); });
test("validateWorkflow() when on.push is a correct object", (t) => { test("getWorkflowErrors() when on.push is a correct object", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } }, on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
}); });
t.deepEqual(errors, []); t.deepEqual(...errorCodes(errors, []));
}); });
test("validateWorkflow() when on.pull_requests is a string", (t) => { test("getWorkflowErrors() when on.pull_requests is a string", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } }, on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
}); });
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]); t.deepEqual(
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
);
}); });
test("validateWorkflow() when on.pull_requests is a string and correct", (t) => { test("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { push: { branches: "*" }, pull_request: { branches: "*" } }, on: { push: { branches: "*" }, pull_request: { branches: "*" } },
}); });
t.deepEqual(errors, []); t.deepEqual(...errorCodes(errors, []));
}); });
test("validateWorkflow() when on.push is correct with empty objects", (t) => { test("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors(
on: { push: undefined, pull_request: undefined }, yaml.safeLoad(`
}); on:
push:
pull_request:
`)
);
t.deepEqual(errors, []); t.deepEqual(...errorCodes(errors, []));
}); });
test("validateWorkflow() when on.push is mismatched", (t) => { test("getWorkflowErrors() when on.push is mismatched", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { on: {
push: { branches: ["main"] }, push: { branches: ["main"] },
pull_request: { branches: ["feature"] }, pull_request: { branches: ["feature"] },
}, },
}); });
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]); t.deepEqual(
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
);
}); });
test("validateWorkflow() when on.push is not mismatched", (t) => { test("getWorkflowErrors() when on.push is not mismatched", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { on: {
push: { branches: ["main", "feature"] }, push: { branches: ["main", "feature"] },
pull_request: { branches: ["main"] }, pull_request: { branches: ["main"] },
}, },
}); });
t.deepEqual(errors, []); t.deepEqual(...errorCodes(errors, []));
}); });
test("validateWorkflow() when on.push is mismatched for pull_request", (t) => { test("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { on: {
push: { branches: ["main"] }, push: { branches: ["main"] },
pull_request: { branches: ["main", "feature"] }, pull_request: { branches: ["main", "feature"] },
}, },
}); });
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]); t.deepEqual(
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
);
}); });
test("validateWorkflow() for a range of malformed workflows", (t) => { test("getWorkflowErrors() for a range of malformed workflows", (t) => {
t.deepEqual( t.deepEqual(
actionsutil.validateWorkflow({ ...errorCodes(
on: { actionsutil.getWorkflowErrors({
push: 1, on: {
pull_request: 1, push: 1,
}, pull_request: 1,
} as any),
[]
);
t.deepEqual(
actionsutil.validateWorkflow({
on: 1,
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
);
t.deepEqual(
actionsutil.validateWorkflow({
on: 1,
jobs: 1,
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
);
t.deepEqual(
actionsutil.validateWorkflow({
on: 1,
jobs: [1],
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
);
t.deepEqual(
actionsutil.validateWorkflow({
on: 1,
jobs: { 1: 1 },
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
);
t.deepEqual(
actionsutil.validateWorkflow({
on: 1,
jobs: { test: 1 },
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
);
t.deepEqual(
actionsutil.validateWorkflow({
on: 1,
jobs: { test: [1] },
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
);
t.deepEqual(
actionsutil.validateWorkflow({
on: 1,
jobs: { test: { steps: 1 } },
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
);
t.deepEqual(
actionsutil.validateWorkflow({
on: 1,
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
);
t.deepEqual(
actionsutil.validateWorkflow({
on: 1,
jobs: { test: [undefined] },
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
);
t.deepEqual(actionsutil.validateWorkflow(1 as any), [
actionsutil.WorkflowErrors.MissingHooks,
]);
t.deepEqual(
actionsutil.validateWorkflow({
on: {
push: {
branches: 1,
}, },
pull_request: { } as any),
branches: 1, []
)
);
t.deepEqual(
...errorCodes(
actionsutil.getWorkflowErrors({
on: 1,
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
)
);
t.deepEqual(
...errorCodes(
actionsutil.getWorkflowErrors({
on: 1,
jobs: 1,
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
)
);
t.deepEqual(
...errorCodes(
actionsutil.getWorkflowErrors({
on: 1,
jobs: [1],
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
)
);
t.deepEqual(
...errorCodes(
actionsutil.getWorkflowErrors({
on: 1,
jobs: { 1: 1 },
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
)
);
t.deepEqual(
...errorCodes(
actionsutil.getWorkflowErrors({
on: 1,
jobs: { test: 1 },
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
)
);
t.deepEqual(
...errorCodes(
actionsutil.getWorkflowErrors({
on: 1,
jobs: { test: [1] },
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
)
);
t.deepEqual(
...errorCodes(
actionsutil.getWorkflowErrors({
on: 1,
jobs: { test: { steps: 1 } },
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
)
);
t.deepEqual(
...errorCodes(
actionsutil.getWorkflowErrors({
on: 1,
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
)
);
t.deepEqual(
...errorCodes(
actionsutil.getWorkflowErrors({
on: 1,
jobs: { test: [undefined] },
} as any),
[actionsutil.WorkflowErrors.MissingHooks]
)
);
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1 as any), []));
t.deepEqual(
...errorCodes(
actionsutil.getWorkflowErrors({
on: {
push: {
branches: 1,
},
pull_request: {
branches: 1,
},
}, },
}, } as any),
} as any), []
[] )
); );
}); });
test("validateWorkflow() when on.pull_request for every branch but push specifies branches", (t) => { test("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors(
on: { yaml.safeLoad(`
push: { branches: ["main"] }, name: "CodeQL"
pull_request: null, on:
}, push:
}); branches: ["main"]
pull_request:
`)
);
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]); t.deepEqual(
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
);
}); });
test("validateWorkflow() when on.pull_request for wildcard branches", (t) => { test("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { on: {
push: { branches: ["feature/*"] }, push: { branches: ["feature/*"] },
pull_request: { branches: "feature/moose" }, pull_request: { branches: "feature/moose" },
}, },
}); });
t.deepEqual(errors, []); t.deepEqual(...errorCodes(errors, []));
}); });
test("validateWorkflow() when on.pull_request for mismatched wildcard branches", (t) => { test("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
const errors = actionsutil.validateWorkflow({ const errors = actionsutil.getWorkflowErrors({
on: { on: {
push: { branches: ["feature/moose"] }, push: { branches: ["feature/moose"] },
pull_request: { branches: "feature/*" }, pull_request: { branches: "feature/*" },
}, },
}); });
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]); t.deepEqual(
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
);
}); });
test("validateWorkflow() when HEAD^2 is checked out", (t) => { test("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
const errors = actionsutil.validateWorkflow({ process.env.GITHUB_JOB = "test";
const errors = actionsutil.getWorkflowErrors({
on: ["push", "pull_request"], on: ["push", "pull_request"],
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } }, jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
}); });
t.deepEqual(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]); t.deepEqual(
...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead])
);
}); });
test("formatWorkflowErrors() when there is one error", (t) => { test("formatWorkflowErrors() when there is one error", (t) => {
@@ -400,3 +445,171 @@ test("patternIsSuperset()", (t) => {
) )
); );
}); });
test("getWorkflowErrors() when branches contain dots", (t) => {
const errors = actionsutil.getWorkflowErrors(
yaml.safeLoad(`
on:
push:
branches: [4.1, master]
pull_request:
# The branches below must be a subset of the branches above
branches: [4.1, master]
`)
);
t.deepEqual(...errorCodes(errors, []));
});
test("getWorkflowErrors() when on.push has a trailing comma", (t) => {
const errors = actionsutil.getWorkflowErrors(
yaml.safeLoad(`
name: "CodeQL"
on:
push:
branches: [master, ]
pull_request:
# The branches below must be a subset of the branches above
branches: [master]
`)
);
t.deepEqual(...errorCodes(errors, []));
});
test("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
process.env.GITHUB_JOB = "test";
const errors = actionsutil.getWorkflowErrors(
yaml.safeLoad(`
name: "CodeQL"
on:
push:
branches: [master]
pull_request:
# The branches below must be a subset of the branches above
branches: [master]
jobs:
test:
steps:
- run: "git checkout HEAD^2"
test2:
steps:
- run: "git checkout HEAD^2"
test3:
steps: []
`)
);
t.deepEqual(
...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead])
);
});
test("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
process.env.GITHUB_JOB = "test3";
const errors = actionsutil.getWorkflowErrors(
yaml.safeLoad(`
name: "CodeQL"
on:
push:
branches: [master]
pull_request:
# The branches below must be a subset of the branches above
branches: [master]
jobs:
test:
steps:
- run: "git checkout HEAD^2"
test2:
steps:
- run: "git checkout HEAD^2"
test3:
steps: []
`)
);
t.deepEqual(...errorCodes(errors, []));
});
test("getWorkflowErrors() when on is missing", (t) => {
const errors = actionsutil.getWorkflowErrors(
yaml.safeLoad(`
name: "CodeQL"
`)
);
t.deepEqual(...errorCodes(errors, []));
});
test("getWorkflowErrors() with a different on setup", (t) => {
t.deepEqual(
...errorCodes(
actionsutil.getWorkflowErrors(
yaml.safeLoad(`
name: "CodeQL"
on: "workflow_dispatch"
`)
),
[]
)
);
t.deepEqual(
...errorCodes(
actionsutil.getWorkflowErrors(
yaml.safeLoad(`
name: "CodeQL"
on: [workflow_dispatch]
`)
),
[]
)
);
t.deepEqual(
...errorCodes(
actionsutil.getWorkflowErrors(
yaml.safeLoad(`
name: "CodeQL"
on:
workflow_dispatch: {}
`)
),
[]
)
);
});
test("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
t.deepEqual(
...errorCodes(
actionsutil.getWorkflowErrors(
yaml.safeLoad(`
name: "CodeQL"
on:
push:
branches: [master]
`)
),
[]
)
);
t.deepEqual(
...errorCodes(
actionsutil.getWorkflowErrors(
yaml.safeLoad(`
name: "CodeQL"
on: ["push"]
`)
),
[]
)
);
});

View File

@@ -143,6 +143,7 @@ function escapeRegExp(string) {
function patternToRegExp(value) { function patternToRegExp(value) {
return new RegExp( return new RegExp(
`^${value `^${value
.toString()
.split(GLOB_PATTERN) .split(GLOB_PATTERN)
.reduce(function (arr, cur) { .reduce(function (arr, cur) {
if (cur === "**") { if (cur === "**") {
@@ -183,19 +184,19 @@ enum MissingTriggers {
PullRequest = 2, PullRequest = 2,
} }
interface CodedError { export interface CodedError {
message: string; message: string;
code: string; code: string;
} }
function toCodedErrors(errors: { function toCodedErrors<T>(errors: T): Record<keyof T, CodedError> {
[key: string]: string;
}): { [key: string]: CodedError } {
return Object.entries(errors).reduce((acc, [key, value]) => { return Object.entries(errors).reduce((acc, [key, value]) => {
acc[key] = { message: value, code: key }; acc[key] = { message: value, code: key };
return acc; return acc;
}, {} as ReturnType<typeof toCodedErrors>); }, {} as Record<keyof T, CodedError>);
} }
// code to send back via status report
// message to add as a warning annotation to the run
export const WorkflowErrors = toCodedErrors({ export const WorkflowErrors = toCodedErrors({
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`, MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
MissingHooks: `Please specify on.push and on.pull_request hooks so that Code Scanning can compare pull requests against the state of the base branch.`, MissingHooks: `Please specify on.push and on.pull_request hooks so that Code Scanning can compare pull requests against the state of the base branch.`,
@@ -204,16 +205,20 @@ export const WorkflowErrors = toCodedErrors({
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`, PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`, PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
LintFailed: `Unable to lint workflow for CodeQL.`,
}); });
export function validateWorkflow(doc: Workflow): CodedError[] { export function getWorkflowErrors(doc: Workflow): CodedError[] {
const errors: CodedError[] = []; const errors: CodedError[] = [];
// .jobs[key].steps[].run const jobName = process.env.GITHUB_JOB;
for (const job of Object.values(doc?.jobs || {})) {
if (Array.isArray(job?.steps)) { if (jobName) {
for (const step of job?.steps) { const job = doc?.jobs?.[jobName];
const steps = job?.steps;
if (Array.isArray(steps)) {
for (const step of steps) {
// this was advice that we used to give in the README // this was advice that we used to give in the README
// we actually want to run the analysis on the merge commit // we actually want to run the analysis on the merge commit
// to produce results that are more inline with expectations // to produce results that are more inline with expectations
@@ -221,6 +226,7 @@ export function validateWorkflow(doc: Workflow): CodedError[] {
// and avoid some race conditions // and avoid some race conditions
if (step?.run === "git checkout HEAD^2") { if (step?.run === "git checkout HEAD^2") {
errors.push(WorkflowErrors.CheckoutWrongHead); errors.push(WorkflowErrors.CheckoutWrongHead);
break;
} }
} }
} }
@@ -229,33 +235,28 @@ export function validateWorkflow(doc: Workflow): CodedError[] {
let missing = MissingTriggers.None; let missing = MissingTriggers.None;
if (doc.on === undefined) { if (doc.on === undefined) {
missing = MissingTriggers.Push | MissingTriggers.PullRequest; // this is not a valid config
} else if (typeof doc.on === "string") { } else if (typeof doc.on === "string") {
switch (doc.on) { if (doc.on === "pull_request") {
case "push": missing = MissingTriggers.Push;
missing = MissingTriggers.PullRequest;
break;
case "pull_request":
missing = MissingTriggers.Push;
break;
default:
missing = MissingTriggers.Push | MissingTriggers.PullRequest;
break;
} }
} else if (Array.isArray(doc.on)) { } else if (Array.isArray(doc.on)) {
if (!doc.on.includes("push")) { const hasPush = doc.on.includes("push");
const hasPullRequest = doc.on.includes("pull_request");
if (hasPullRequest && !hasPush) {
missing = missing | MissingTriggers.Push; missing = missing | MissingTriggers.Push;
} }
if (!doc.on.includes("pull_request")) {
missing = missing | MissingTriggers.PullRequest;
}
} else if (isObject(doc.on)) { } else if (isObject(doc.on)) {
if (!Object.prototype.hasOwnProperty.call(doc.on, "pull_request")) { const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
missing = missing | MissingTriggers.PullRequest; const hasPullRequest = Object.prototype.hasOwnProperty.call(
} doc.on,
if (!Object.prototype.hasOwnProperty.call(doc.on, "push")) { "pull_request"
);
if (!hasPush && hasPullRequest) {
missing = missing | MissingTriggers.Push; missing = missing | MissingTriggers.Push;
} else { }
if (hasPush && hasPullRequest) {
const paths = doc.on.push?.paths; const paths = doc.on.push?.paths;
// if you specify paths or paths-ignore you can end up with commits that have no baseline // if you specify paths or paths-ignore you can end up with commits that have no baseline
// if they didn't change any files // if they didn't change any files
@@ -269,24 +270,29 @@ export function validateWorkflow(doc: Workflow): CodedError[] {
} }
} }
const push = branchesToArray(doc.on.push?.branches); // if doc.on.pull_request is null that means 'all branches'
// if doc.on.pull_request is undefined that means 'off'
// we only want to check for mismatched branches if pull_request is on.
if (doc.on.pull_request !== undefined) {
const push = branchesToArray(doc.on.push?.branches);
if (push !== "**") { if (push !== "**") {
const pull_request = branchesToArray(doc.on.pull_request?.branches); const pull_request = branchesToArray(doc.on.pull_request?.branches);
if (pull_request !== "**") { if (pull_request !== "**") {
const difference = pull_request.filter( const difference = pull_request.filter(
(value) => !push.some((o) => patternIsSuperset(o, value)) (value) => !push.some((o) => patternIsSuperset(o, value))
); );
if (difference.length > 0) { if (difference.length > 0) {
// there are branches in pull_request that may not have a baseline // there are branches in pull_request that may not have a baseline
// because we are not building them on push // because we are not building them on push
errors.push(WorkflowErrors.MismatchedBranches);
}
} else if (push.length > 0) {
// push is set up to run on a subset of branches
// and you could open a PR against a branch with no baseline
errors.push(WorkflowErrors.MismatchedBranches); errors.push(WorkflowErrors.MismatchedBranches);
} }
} else if (push.length > 0) {
// push is set up to run on a subset of branches
// and you could open a PR against a branch with no baseline
errors.push(WorkflowErrors.MismatchedBranches);
} }
} }
} else { } else {
@@ -310,18 +316,31 @@ export function validateWorkflow(doc: Workflow): CodedError[] {
return errors; return errors;
} }
export async function getWorkflowErrors(): Promise<CodedError[]> { export async function validateWorkflow(): Promise<undefined | string> {
let workflow: Workflow;
try { try {
const workflow = await getWorkflow(); workflow = await getWorkflow();
if (workflow === undefined) {
return [];
}
return validateWorkflow(workflow);
} catch (e) { } catch (e) {
return [WorkflowErrors.LintFailed]; return `error: getWorkflow() failed: ${e.toString()}`;
} }
let workflowErrors: CodedError[];
try {
workflowErrors = getWorkflowErrors(workflow);
} catch (e) {
return `error: getWorkflowErrors() failed: ${e.toString()}`;
}
if (workflowErrors.length > 0) {
let message: string;
try {
message = formatWorkflowErrors(workflowErrors);
} catch (e) {
return `error: formatWorkflowErrors() failed: ${e.toString()}`;
}
core.warning(message);
}
return `warning: ${formatWorkflowCause(workflowErrors)}`;
} }
export function formatWorkflowErrors(errors: CodedError[]): string { export function formatWorkflowErrors(errors: CodedError[]): string {
@@ -339,19 +358,14 @@ export function formatWorkflowCause(errors: CodedError[]): undefined | string {
return errors.map((e) => e.code).join(","); return errors.map((e) => e.code).join(",");
} }
export async function getWorkflow(): Promise<Workflow | undefined> { export async function getWorkflow(): Promise<Workflow> {
const relativePath = await getWorkflowPath(); const relativePath = await getWorkflowPath();
const absolutePath = path.join( const absolutePath = path.join(
getRequiredEnvParam("GITHUB_WORKSPACE"), getRequiredEnvParam("GITHUB_WORKSPACE"),
relativePath relativePath
); );
try { return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
} catch (e) {
core.warning(`Could not read workflow: ${e.toString()}`);
return undefined;
}
} }
/** /**

View File

@@ -1,16 +1,24 @@
import * as fs from "fs";
import * as path from "path";
import * as core from "@actions/core"; import * as core from "@actions/core";
import * as actionsUtil from "./actions-util"; import * as actionsUtil from "./actions-util";
import { import {
AnalysisStatusReport,
runAnalyze, runAnalyze,
CodeQLAnalysisError, CodeQLAnalysisError,
QueriesStatusReport,
} from "./analyze"; } from "./analyze";
import { getConfig } from "./config-utils"; import { Config, getConfig } from "./config-utils";
import { getActionsLogger } from "./logging"; import { getActionsLogger } from "./logging";
import { parseRepositoryNwo } from "./repository"; import { parseRepositoryNwo } from "./repository";
import * as upload_lib from "./upload-lib";
import * as util from "./util"; import * as util from "./util";
interface AnalysisStatusReport
extends upload_lib.UploadStatusReport,
QueriesStatusReport {}
interface FinishStatusReport interface FinishStatusReport
extends actionsUtil.StatusReportBase, extends actionsUtil.StatusReportBase,
AnalysisStatusReport {} AnalysisStatusReport {}
@@ -41,6 +49,7 @@ async function sendStatusReport(
async function run() { async function run() {
const startedAt = new Date(); const startedAt = new Date();
let stats: AnalysisStatusReport | undefined = undefined; let stats: AnalysisStatusReport | undefined = undefined;
let config: Config | undefined = undefined;
try { try {
actionsUtil.prepareLocalRunEnvironment(); actionsUtil.prepareLocalRunEnvironment();
if ( if (
@@ -55,7 +64,7 @@ async function run() {
return; return;
} }
const logger = getActionsLogger(); const logger = getActionsLogger();
const config = await getConfig( config = await getConfig(
actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), actionsUtil.getRequiredEnvParam("RUNNER_TEMP"),
logger logger
); );
@@ -68,25 +77,38 @@ async function run() {
auth: actionsUtil.getRequiredInput("token"), auth: actionsUtil.getRequiredInput("token"),
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"), url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
}; };
stats = await runAnalyze( const outputDir = actionsUtil.getRequiredInput("output");
parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), const queriesStats = await runAnalyze(
await actionsUtil.getCommitOid(), outputDir,
await actionsUtil.getRef(),
await actionsUtil.getAnalysisKey(),
actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"),
actionsUtil.getWorkflowRunID(),
actionsUtil.getRequiredInput("checkout_path"),
actionsUtil.getRequiredInput("matrix"),
apiDetails,
actionsUtil.getRequiredInput("upload") === "true",
"actions",
actionsUtil.getRequiredInput("output"),
util.getMemoryFlag(actionsUtil.getOptionalInput("ram")), util.getMemoryFlag(actionsUtil.getOptionalInput("ram")),
util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")),
util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger), util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger),
config, config,
logger logger
); );
if (actionsUtil.getRequiredInput("upload") === "true") {
const uploadStats = await upload_lib.uploadFromActions(
outputDir,
parseRepositoryNwo(
actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")
),
await actionsUtil.getCommitOid(),
await actionsUtil.getRef(),
await actionsUtil.getAnalysisKey(),
actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"),
actionsUtil.getWorkflowRunID(),
actionsUtil.getRequiredInput("checkout_path"),
actionsUtil.getRequiredInput("matrix"),
config.gitHubVersion,
apiDetails,
logger
);
stats = { ...queriesStats, ...uploadStats };
} else {
logger.info("Not uploading results");
stats = { ...queriesStats };
}
} catch (error) { } catch (error) {
core.setFailed(error.message); core.setFailed(error.message);
console.log(error); console.log(error);
@@ -97,6 +119,35 @@ async function run() {
await sendStatusReport(startedAt, stats, error); await sendStatusReport(startedAt, stats, error);
return; return;
} finally {
if (core.isDebug() && config !== undefined) {
core.info("Debug mode is on. Printing CodeQL debug logs...");
for (const language of config.languages) {
const databaseDirectory = util.getCodeQLDatabasePath(
config.tempDir,
language
);
const logsDirectory = path.join(databaseDirectory, "log");
const walkLogFiles = (dir: string) => {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile()) {
core.startGroup(
`CodeQL Debug Logs - ${language} - ${entry.name}`
);
process.stdout.write(
fs.readFileSync(path.resolve(dir, entry.name))
);
core.endGroup();
} else if (entry.isDirectory()) {
walkLogFiles(path.resolve(dir, entry.name));
}
}
};
walkLogFiles(logsDirectory);
}
}
} }
await sendStatusReport(startedAt, stats); await sendStatusReport(startedAt, stats);

View File

@@ -4,14 +4,11 @@ import * as path from "path";
import * as toolrunner from "@actions/exec/lib/toolrunner"; import * as toolrunner from "@actions/exec/lib/toolrunner";
import * as analysisPaths from "./analysis-paths"; import * as analysisPaths from "./analysis-paths";
import { GitHubApiDetails } from "./api-client";
import { getCodeQL } from "./codeql"; import { getCodeQL } from "./codeql";
import * as configUtils from "./config-utils"; import * as configUtils from "./config-utils";
import { isScannedLanguage, Language } from "./languages"; import { isScannedLanguage, Language } from "./languages";
import { Logger } from "./logging"; import { Logger } from "./logging";
import { RepositoryNwo } from "./repository";
import * as sharedEnv from "./shared-environment"; import * as sharedEnv from "./shared-environment";
import * as upload_lib from "./upload-lib";
import * as util from "./util"; import * as util from "./util";
export class CodeQLAnalysisError extends Error { export class CodeQLAnalysisError extends Error {
@@ -54,10 +51,6 @@ export interface QueriesStatusReport {
analyze_failure_language?: string; analyze_failure_language?: string;
} }
export interface AnalysisStatusReport
extends upload_lib.UploadStatusReport,
QueriesStatusReport {}
async function setupPythonExtractor(logger: Logger) { async function setupPythonExtractor(logger: Logger) {
const codeqlPython = process.env["CODEQL_PYTHON"]; const codeqlPython = process.env["CODEQL_PYTHON"];
if (codeqlPython === undefined || codeqlPython.length === 0) { if (codeqlPython === undefined || codeqlPython.length === 0) {
@@ -217,24 +210,13 @@ export async function runQueries(
} }
export async function runAnalyze( export async function runAnalyze(
repositoryNwo: RepositoryNwo,
commitOid: string,
ref: string,
analysisKey: string | undefined,
analysisName: string | undefined,
workflowRunID: number | undefined,
checkoutPath: string,
environment: string | undefined,
apiDetails: GitHubApiDetails,
doUpload: boolean,
mode: util.Mode,
outputDir: string, outputDir: string,
memoryFlag: string, memoryFlag: string,
addSnippetsFlag: string, addSnippetsFlag: string,
threadsFlag: string, threadsFlag: string,
config: configUtils.Config, config: configUtils.Config,
logger: Logger logger: Logger
): Promise<AnalysisStatusReport> { ): Promise<QueriesStatusReport> {
// Delete the tracer config env var to avoid tracing ourselves // Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION]; delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
@@ -253,26 +235,5 @@ export async function runAnalyze(
logger logger
); );
if (!doUpload) { return { ...queriesStats };
logger.info("Not uploading results");
return { ...queriesStats };
}
const uploadStats = await upload_lib.upload(
outputDir,
repositoryNwo,
commitOid,
ref,
analysisKey,
analysisName,
workflowRunID,
checkoutPath,
environment,
config.gitHubVersion,
apiDetails,
mode,
logger
);
return { ...queriesStats, ...uploadStats };
} }

View File

@@ -11,11 +11,19 @@ export enum DisallowedAPIVersionReason {
ACTION_TOO_NEW, ACTION_TOO_NEW,
} }
export type GitHubApiCombinedDetails = GitHubApiDetails &
GitHubApiExternalRepoDetails;
export interface GitHubApiDetails { export interface GitHubApiDetails {
auth: string; auth: string;
url: string; url: string;
} }
export interface GitHubApiExternalRepoDetails {
externalRepoAuth: string | undefined;
url: string;
}
export const getApiClient = function ( export const getApiClient = function (
apiDetails: GitHubApiDetails, apiDetails: GitHubApiDetails,
allowLocalRun = false allowLocalRun = false

View File

@@ -7,6 +7,7 @@ import * as toolrunner from "@actions/exec/lib/toolrunner";
import * as http from "@actions/http-client"; import * as http from "@actions/http-client";
import { IHeaders } from "@actions/http-client/interfaces"; import { IHeaders } from "@actions/http-client/interfaces";
import * as toolcache from "@actions/tool-cache"; import * as toolcache from "@actions/tool-cache";
import { default as deepEqual } from "fast-deep-equal";
import * as semver from "semver"; import * as semver from "semver";
import { v4 as uuidV4 } from "uuid"; import { v4 as uuidV4 } from "uuid";
@@ -176,7 +177,9 @@ async function getCodeQLBundleDownloadURL(
// We now filter out any duplicates. // We now filter out any duplicates.
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork. // Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
const uniqueDownloadSources = potentialDownloadSources.filter( const uniqueDownloadSources = potentialDownloadSources.filter(
(url, index, self) => index === self.indexOf(url) (source, index, self) => {
return !self.slice(0, index).some((other) => deepEqual(source, other));
}
); );
const codeQLBundleName = getCodeQLBundleName(); const codeQLBundleName = getCodeQLBundleName();
for (const downloadSource of uniqueDownloadSources) { for (const downloadSource of uniqueDownloadSources) {

View File

@@ -17,6 +17,7 @@ setupTests(test);
const sampleApiDetails = { const sampleApiDetails = {
auth: "token", auth: "token",
externalRepoAuth: "token",
url: "https://github.example.com", url: "https://github.example.com",
}; };
@@ -725,10 +726,7 @@ test("Invalid queries in workflow file handled correctly", async (t) => {
// This function just needs to be type-correct; it doesn't need to do anything, // This function just needs to be type-correct; it doesn't need to do anything,
// since we're deliberately passing in invalid data // since we're deliberately passing in invalid data
const codeQL = setCodeQL({ const codeQL = setCodeQL({
async resolveQueries( async resolveQueries() {
_queries: string[],
_extraSearchPath: string | undefined
) {
return { return {
byLanguage: { byLanguage: {
javascript: {}, javascript: {},

View File

@@ -274,7 +274,7 @@ async function addRemoteQueries(
resultMap: Queries, resultMap: Queries,
queryUses: string, queryUses: string,
tempDir: string, tempDir: string,
githubUrl: string, apiDetails: api.GitHubApiExternalRepoDetails,
logger: Logger, logger: Logger,
configFile?: string configFile?: string
) { ) {
@@ -302,7 +302,7 @@ async function addRemoteQueries(
const checkoutPath = await externalQueries.checkoutExternalRepository( const checkoutPath = await externalQueries.checkoutExternalRepository(
nwo, nwo,
ref, ref,
githubUrl, apiDetails,
tempDir, tempDir,
logger logger
); );
@@ -330,7 +330,7 @@ async function parseQueryUses(
queryUses: string, queryUses: string,
tempDir: string, tempDir: string,
checkoutPath: string, checkoutPath: string,
githubUrl: string, apiDetails: api.GitHubApiExternalRepoDetails,
logger: Logger, logger: Logger,
configFile?: string configFile?: string
) { ) {
@@ -369,7 +369,7 @@ async function parseQueryUses(
resultMap, resultMap,
queryUses, queryUses,
tempDir, tempDir,
githubUrl, apiDetails,
logger, logger,
configFile configFile
); );
@@ -685,7 +685,7 @@ async function addQueriesFromWorkflow(
resultMap: Queries, resultMap: Queries,
tempDir: string, tempDir: string,
checkoutPath: string, checkoutPath: string,
githubUrl: string, apiDetails: api.GitHubApiExternalRepoDetails,
logger: Logger logger: Logger
) { ) {
queriesInput = queriesInput.trim(); queriesInput = queriesInput.trim();
@@ -700,7 +700,7 @@ async function addQueriesFromWorkflow(
query, query,
tempDir, tempDir,
checkoutPath, checkoutPath,
githubUrl, apiDetails,
logger logger
); );
} }
@@ -730,7 +730,7 @@ export async function getDefaultConfig(
codeQL: CodeQL, codeQL: CodeQL,
checkoutPath: string, checkoutPath: string,
gitHubVersion: GitHubVersion, gitHubVersion: GitHubVersion,
apiDetails: api.GitHubApiDetails, apiDetails: api.GitHubApiCombinedDetails,
logger: Logger logger: Logger
): Promise<Config> { ): Promise<Config> {
const languages = await getLanguages( const languages = await getLanguages(
@@ -749,7 +749,7 @@ export async function getDefaultConfig(
queries, queries,
tempDir, tempDir,
checkoutPath, checkoutPath,
apiDetails.url, apiDetails,
logger logger
); );
} }
@@ -780,7 +780,7 @@ async function loadConfig(
codeQL: CodeQL, codeQL: CodeQL,
checkoutPath: string, checkoutPath: string,
gitHubVersion: GitHubVersion, gitHubVersion: GitHubVersion,
apiDetails: api.GitHubApiDetails, apiDetails: api.GitHubApiCombinedDetails,
logger: Logger logger: Logger
): Promise<Config> { ): Promise<Config> {
let parsedYAML: UserConfig; let parsedYAML: UserConfig;
@@ -838,7 +838,7 @@ async function loadConfig(
queries, queries,
tempDir, tempDir,
checkoutPath, checkoutPath,
apiDetails.url, apiDetails,
logger logger
); );
} }
@@ -863,7 +863,7 @@ async function loadConfig(
query[QUERIES_USES_PROPERTY], query[QUERIES_USES_PROPERTY],
tempDir, tempDir,
checkoutPath, checkoutPath,
apiDetails.url, apiDetails,
logger, logger,
configFile configFile
); );
@@ -947,7 +947,7 @@ export async function initConfig(
codeQL: CodeQL, codeQL: CodeQL,
checkoutPath: string, checkoutPath: string,
gitHubVersion: GitHubVersion, gitHubVersion: GitHubVersion,
apiDetails: api.GitHubApiDetails, apiDetails: api.GitHubApiCombinedDetails,
logger: Logger logger: Logger
): Promise<Config> { ): Promise<Config> {
let config: Config; let config: Config;

View File

@@ -85,7 +85,7 @@ test("checkoutExternalQueries", async (t) => {
await externalQueries.checkoutExternalRepository( await externalQueries.checkoutExternalRepository(
repoName, repoName,
commit1Sha, commit1Sha,
`file://${testRepoBaseDir}`, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" },
tmpDir, tmpDir,
getRunnerLogger(true) getRunnerLogger(true)
); );
@@ -99,7 +99,7 @@ test("checkoutExternalQueries", async (t) => {
await externalQueries.checkoutExternalRepository( await externalQueries.checkoutExternalRepository(
repoName, repoName,
commit2Sha, commit2Sha,
`file://${testRepoBaseDir}`, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" },
tmpDir, tmpDir,
getRunnerLogger(true) getRunnerLogger(true)
); );
@@ -108,3 +108,35 @@ test("checkoutExternalQueries", async (t) => {
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b"))); t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
}); });
}); });
test("buildCheckoutURL", (t) => {
t.deepEqual(
externalQueries.buildCheckoutURL("foo/bar", {
url: "https://github.com",
externalRepoAuth: undefined,
}),
"https://github.com/foo/bar"
);
t.deepEqual(
externalQueries.buildCheckoutURL("foo/bar", {
url: "https://github.example.com/",
externalRepoAuth: undefined,
}),
"https://github.example.com/foo/bar"
);
t.deepEqual(
externalQueries.buildCheckoutURL("foo/bar", {
url: "https://github.com",
externalRepoAuth: "abc",
}),
"https://x-access-token:abc@github.com/foo/bar"
);
t.deepEqual(
externalQueries.buildCheckoutURL("foo/bar", {
url: "https://github.example.com/",
externalRepoAuth: "abc",
}),
"https://x-access-token:abc@github.example.com/foo/bar"
);
});

View File

@@ -4,6 +4,7 @@ import * as path from "path";
import * as toolrunner from "@actions/exec/lib/toolrunner"; import * as toolrunner from "@actions/exec/lib/toolrunner";
import * as safeWhich from "@chrisgavin/safe-which"; import * as safeWhich from "@chrisgavin/safe-which";
import { GitHubApiExternalRepoDetails } from "./api-client";
import { Logger } from "./logging"; import { Logger } from "./logging";
/** /**
@@ -12,7 +13,7 @@ import { Logger } from "./logging";
export async function checkoutExternalRepository( export async function checkoutExternalRepository(
repository: string, repository: string,
ref: string, ref: string,
githubUrl: string, apiDetails: GitHubApiExternalRepoDetails,
tempDir: string, tempDir: string,
logger: Logger logger: Logger
): Promise<string> { ): Promise<string> {
@@ -28,10 +29,10 @@ export async function checkoutExternalRepository(
} }
if (!fs.existsSync(checkoutLocation)) { if (!fs.existsSync(checkoutLocation)) {
const repoURL = `${githubUrl}/${repository}`; const repoCloneURL = buildCheckoutURL(repository, apiDetails);
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [ await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
"clone", "clone",
repoURL, repoCloneURL,
checkoutLocation, checkoutLocation,
]).exec(); ]).exec();
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [ await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
@@ -44,3 +45,19 @@ export async function checkoutExternalRepository(
return checkoutLocation; return checkoutLocation;
} }
export function buildCheckoutURL(
repository: string,
apiDetails: GitHubApiExternalRepoDetails
): string {
const repoCloneURL = new URL(apiDetails.url);
if (apiDetails.externalRepoAuth !== undefined) {
repoCloneURL.username = "x-access-token";
repoCloneURL.password = apiDetails.externalRepoAuth;
}
if (!repoCloneURL.pathname.endsWith("/")) {
repoCloneURL.pathname += "/";
}
repoCloneURL.pathname += `${repository}`;
return repoCloneURL.toString();
}

View File

@@ -96,6 +96,7 @@ async function run() {
const apiDetails = { const apiDetails = {
auth: actionsUtil.getRequiredInput("token"), auth: actionsUtil.getRequiredInput("token"),
externalRepoAuth: actionsUtil.getOptionalInput("external-repository-token"),
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"), url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
}; };
@@ -107,18 +108,7 @@ async function run() {
try { try {
actionsUtil.prepareLocalRunEnvironment(); actionsUtil.prepareLocalRunEnvironment();
const workflowErrors = await actionsUtil.getWorkflowErrors(); const workflowErrors = await actionsUtil.validateWorkflow();
// we do not want to worry users if linting is failing
// but we do want to send a status report containing this error code
// below
const userWorkflowErrors = workflowErrors.filter(
(o) => o.code !== "LintFailed"
);
if (userWorkflowErrors.length > 0) {
core.warning(actionsUtil.formatWorkflowErrors(userWorkflowErrors));
}
if ( if (
!(await actionsUtil.sendStatusReport( !(await actionsUtil.sendStatusReport(
@@ -126,7 +116,7 @@ async function run() {
"init", "init",
"starting", "starting",
startedAt, startedAt,
actionsUtil.formatWorkflowCause(workflowErrors) workflowErrors
) )
)) ))
) { ) {

View File

@@ -5,7 +5,7 @@ import * as toolrunner from "@actions/exec/lib/toolrunner";
import * as safeWhich from "@chrisgavin/safe-which"; import * as safeWhich from "@chrisgavin/safe-which";
import * as analysisPaths from "./analysis-paths"; import * as analysisPaths from "./analysis-paths";
import { GitHubApiDetails } from "./api-client"; import { GitHubApiCombinedDetails, GitHubApiDetails } from "./api-client";
import { CodeQL, setupCodeQL } from "./codeql"; import { CodeQL, setupCodeQL } from "./codeql";
import * as configUtils from "./config-utils"; import * as configUtils from "./config-utils";
import { Logger } from "./logging"; import { Logger } from "./logging";
@@ -45,7 +45,7 @@ export async function initConfig(
codeQL: CodeQL, codeQL: CodeQL,
checkoutPath: string, checkoutPath: string,
gitHubVersion: util.GitHubVersion, gitHubVersion: util.GitHubVersion,
apiDetails: GitHubApiDetails, apiDetails: GitHubApiCombinedDetails,
logger: Logger logger: Logger
): Promise<configUtils.Config> { ): Promise<configUtils.Config> {
logger.startGroup("Load language configuration"); logger.startGroup("Load language configuration");

View File

@@ -96,6 +96,7 @@ interface InitArgs {
repository: string; repository: string;
githubUrl: string; githubUrl: string;
githubAuth: string; githubAuth: string;
externalRepositoryToken: string | undefined;
debug: boolean; debug: boolean;
} }
@@ -108,6 +109,10 @@ program
"--github-auth <auth>", "--github-auth <auth>",
"GitHub Apps token or personal access token. (Required)" "GitHub Apps token or personal access token. (Required)"
) )
.option(
"--external-repository-token <token>",
"A token for fetching external config files and queries if they reside in a private repository."
)
.option( .option(
"--languages <languages>", "--languages <languages>",
"Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo." "Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo."
@@ -150,6 +155,7 @@ program
const apiDetails = { const apiDetails = {
auth: cmd.githubAuth, auth: cmd.githubAuth,
externalRepoAuth: cmd.externalRepositoryToken,
url: parseGithubUrl(cmd.githubUrl), url: parseGithubUrl(cmd.githubUrl),
}; };
@@ -379,17 +385,6 @@ program
}; };
await runAnalyze( await runAnalyze(
parseRepositoryNwo(cmd.repository),
cmd.commit,
parseRef(cmd.ref),
undefined,
undefined,
undefined,
cmd.checkoutPath || process.cwd(),
undefined,
apiDetails,
cmd.upload,
"runner",
outputDir, outputDir,
getMemoryFlag(cmd.ram), getMemoryFlag(cmd.ram),
getAddSnippetsFlag(cmd.addSnippets), getAddSnippetsFlag(cmd.addSnippets),
@@ -397,6 +392,22 @@ program
config, config,
logger logger
); );
if (!cmd.upload) {
logger.info("Not uploading results");
return;
}
await upload_lib.uploadFromRunner(
outputDir,
parseRepositoryNwo(cmd.repository),
cmd.commit,
parseRef(cmd.ref),
cmd.checkoutPath || process.cwd(),
config.gitHubVersion,
apiDetails,
logger
);
} catch (e) { } catch (e) {
logger.error("Analyze failed"); logger.error("Analyze failed");
logger.error(e); logger.error(e);
@@ -448,19 +459,14 @@ program
}; };
try { try {
const gitHubVersion = await getGitHubVersion(apiDetails); const gitHubVersion = await getGitHubVersion(apiDetails);
await upload_lib.upload( await upload_lib.uploadFromRunner(
cmd.sarifFile, cmd.sarifFile,
parseRepositoryNwo(cmd.repository), parseRepositoryNwo(cmd.repository),
cmd.commit, cmd.commit,
parseRef(cmd.ref), parseRef(cmd.ref),
undefined,
undefined,
undefined,
cmd.checkoutPath || process.cwd(), cmd.checkoutPath || process.cwd(),
undefined,
gitHubVersion, gitHubVersion,
apiDetails, apiDetails,
"runner",
logger logger
); );
} catch (e) { } catch (e) {

View File

@@ -302,6 +302,7 @@ test("getCombinedTracerConfig - return undefined when no languages are traced la
async getTracerEnv() { async getTracerEnv() {
return { return {
ODASA_TRACER_CONFIGURATION: "abc", ODASA_TRACER_CONFIGURATION: "abc",
CODEQL_DIST: "/",
foo: "bar", foo: "bar",
}; };
}, },
@@ -318,21 +319,35 @@ test("getCombinedTracerConfig - valid spec file", async (t) => {
const spec = path.join(tmpDir, "spec"); const spec = path.join(tmpDir, "spec");
fs.writeFileSync(spec, "foo.log\n2\nabc\ndef"); fs.writeFileSync(spec, "foo.log\n2\nabc\ndef");
const bundlePath = path.join(tmpDir, "bundle");
const codeqlPlatform =
process.platform === "win32"
? "win64"
: process.platform === "darwin"
? "osx64"
: "linux64";
const codeQL = setCodeQL({ const codeQL = setCodeQL({
async getTracerEnv() { async getTracerEnv() {
return { return {
ODASA_TRACER_CONFIGURATION: spec, ODASA_TRACER_CONFIGURATION: spec,
CODEQL_DIST: bundlePath,
CODEQL_PLATFORM: codeqlPlatform,
foo: "bar", foo: "bar",
}; };
}, },
}); });
const result = await getCombinedTracerConfig(config, codeQL); const result = await getCombinedTracerConfig(config, codeQL);
t.notDeepEqual(result, undefined);
const expectedEnv = { const expectedEnv = {
foo: "bar", foo: "bar",
CODEQL_DIST: bundlePath,
CODEQL_PLATFORM: codeqlPlatform,
ODASA_TRACER_CONFIGURATION: result!.spec, ODASA_TRACER_CONFIGURATION: result!.spec,
}; };
if (process.platform === "darwin") { if (process.platform === "darwin") {
expectedEnv["DYLD_INSERT_LIBRARIES"] = path.join( expectedEnv["DYLD_INSERT_LIBRARIES"] = path.join(
path.dirname(codeQL.getPath()), path.dirname(codeQL.getPath()),
@@ -349,6 +364,23 @@ test("getCombinedTracerConfig - valid spec file", async (t) => {
); );
} }
if (process.platform === "win32") {
expectedEnv["CODEQL_RUNNER"] = path.join(
bundlePath,
"tools/win64/runner.exe"
);
} else if (process.platform === "darwin") {
expectedEnv["CODEQL_RUNNER"] = path.join(
bundlePath,
"tools/osx64/runner"
);
} else {
expectedEnv["CODEQL_RUNNER"] = path.join(
bundlePath,
"tools/linux64/runner"
);
}
t.deepEqual(result, { t.deepEqual(result, {
spec: path.join(tmpDir, "compound-spec"), spec: path.join(tmpDir, "compound-spec"),
env: expectedEnv, env: expectedEnv,

View File

@@ -185,5 +185,17 @@ export async function getCombinedTracerConfig(
); );
} }
// On macos it's necessary to prefix the build command with the runner exectuable
// on order to trace when System Integrity Protection is enabled.
// The exectuable also exists and works for other platforms so we output this env
// var with a path to the runner regardless so it's always available.
const runnerExeName = process.platform === "win32" ? "runner.exe" : "runner";
mainTracerConfig.env["CODEQL_RUNNER"] = path.join(
mainTracerConfig.env["CODEQL_DIST"],
"tools",
mainTracerConfig.env["CODEQL_PLATFORM"],
runnerExeName
);
return mainTracerConfig; return mainTracerConfig;
} }

View File

@@ -1,9 +1,12 @@
import * as fs from "fs";
import * as path from "path";
import test from "ava"; import test from "ava";
import { getRunnerLogger } from "./logging"; import { getRunnerLogger } from "./logging";
import { setupTests } from "./testing-utils"; import { setupTests } from "./testing-utils";
import * as uploadLib from "./upload-lib"; import * as uploadLib from "./upload-lib";
import { GitHubVersion } from "./util"; import { GitHubVersion, withTmpDir } from "./util";
setupTests(test); setupTests(test);
@@ -70,8 +73,8 @@ test("validate correct payload used per version", async (t) => {
version, version,
"actions" "actions"
); );
t.truthy(payload.base_ref); t.deepEqual(payload.base_ref, "refs/heads/master");
t.truthy(payload.base_sha); t.deepEqual(payload.base_sha, "f95f852bd8fca8fcc58a9a2d6c842781e32a215e");
} }
for (const version of oldVersions) { for (const version of oldVersions) {
@@ -93,3 +96,38 @@ test("validate correct payload used per version", async (t) => {
t.falsy(payload.base_sha); t.falsy(payload.base_sha);
} }
}); });
test("finding SARIF files", async (t) => {
await withTmpDir(async (tmpDir) => {
// include a couple of sarif files
fs.writeFileSync(path.join(tmpDir, "a.sarif"), "");
fs.writeFileSync(path.join(tmpDir, "b.sarif"), "");
// other random files shouldn't be returned
fs.writeFileSync(path.join(tmpDir, "c.foo"), "");
// we should recursively look in subdirectories
fs.mkdirSync(path.join(tmpDir, "dir1"));
fs.writeFileSync(path.join(tmpDir, "dir1", "d.sarif"), "");
fs.mkdirSync(path.join(tmpDir, "dir1", "dir2"));
fs.writeFileSync(path.join(tmpDir, "dir1", "dir2", "e.sarif"), "");
// we should ignore symlinks
fs.mkdirSync(path.join(tmpDir, "dir3"));
fs.symlinkSync(tmpDir, path.join(tmpDir, "dir3", "symlink1"), "dir");
fs.symlinkSync(
path.join(tmpDir, "a.sarif"),
path.join(tmpDir, "dir3", "symlink2.sarif"),
"file"
);
const sarifFiles = uploadLib.findSarifFilesInDir(tmpDir);
t.deepEqual(sarifFiles, [
path.join(tmpDir, "a.sarif"),
path.join(tmpDir, "b.sarif"),
path.join(tmpDir, "dir1", "d.sarif"),
path.join(tmpDir, "dir1", "dir2", "e.sarif"),
]);
});
});

View File

@@ -81,45 +81,43 @@ export interface UploadStatusReport {
num_results_in_sarif?: number; num_results_in_sarif?: number;
} }
// Recursively walks a directory and returns all SARIF files it finds.
// Does not follow symlinks.
export function findSarifFilesInDir(sarifPath: string): string[] {
const sarifFiles: string[] = [];
const walkSarifFiles = (dir: string) => {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && entry.name.endsWith(".sarif")) {
sarifFiles.push(path.resolve(dir, entry.name));
} else if (entry.isDirectory()) {
walkSarifFiles(path.resolve(dir, entry.name));
}
}
};
walkSarifFiles(sarifPath);
return sarifFiles;
}
// Uploads a single sarif file or a directory of sarif files // Uploads a single sarif file or a directory of sarif files
// depending on what the path happens to refer to. // depending on what the path happens to refer to.
// Returns true iff the upload occurred and succeeded // Returns true iff the upload occurred and succeeded
export async function upload( export async function uploadFromActions(
sarifPath: string, sarifPath: string,
repositoryNwo: RepositoryNwo, repositoryNwo: RepositoryNwo,
commitOid: string, commitOid: string,
ref: string, ref: string,
analysisKey: string | undefined, analysisKey: string,
analysisName: string | undefined, analysisName: string,
workflowRunID: number | undefined, workflowRunID: number,
checkoutPath: string, checkoutPath: string,
environment: string | undefined, environment: string,
gitHubVersion: util.GitHubVersion, gitHubVersion: util.GitHubVersion,
apiDetails: api.GitHubApiDetails, apiDetails: api.GitHubApiDetails,
mode: util.Mode,
logger: Logger logger: Logger
): Promise<UploadStatusReport> { ): Promise<UploadStatusReport> {
const sarifFiles: string[] = [];
if (!fs.existsSync(sarifPath)) {
throw new Error(`Path does not exist: ${sarifPath}`);
}
if (fs.lstatSync(sarifPath).isDirectory()) {
const paths = fs
.readdirSync(sarifPath)
.filter((f) => f.endsWith(".sarif"))
.map((f) => path.resolve(sarifPath, f));
for (const filepath of paths) {
sarifFiles.push(filepath);
}
if (sarifFiles.length === 0) {
throw new Error(`No SARIF files found to upload in "${sarifPath}".`);
}
} else {
sarifFiles.push(sarifPath);
}
return await uploadFiles( return await uploadFiles(
sarifFiles, getSarifFilePaths(sarifPath),
repositoryNwo, repositoryNwo,
commitOid, commitOid,
ref, ref,
@@ -130,11 +128,58 @@ export async function upload(
environment, environment,
gitHubVersion, gitHubVersion,
apiDetails, apiDetails,
mode, "actions",
logger logger
); );
} }
// Uploads a single sarif file or a directory of sarif files
// depending on what the path happens to refer to.
// Returns true iff the upload occurred and succeeded
export async function uploadFromRunner(
sarifPath: string,
repositoryNwo: RepositoryNwo,
commitOid: string,
ref: string,
checkoutPath: string,
gitHubVersion: util.GitHubVersion,
apiDetails: api.GitHubApiDetails,
logger: Logger
): Promise<UploadStatusReport> {
return await uploadFiles(
getSarifFilePaths(sarifPath),
repositoryNwo,
commitOid,
ref,
undefined,
undefined,
undefined,
checkoutPath,
undefined,
gitHubVersion,
apiDetails,
"runner",
logger
);
}
function getSarifFilePaths(sarifPath: string) {
if (!fs.existsSync(sarifPath)) {
throw new Error(`Path does not exist: ${sarifPath}`);
}
let sarifFiles: string[];
if (fs.lstatSync(sarifPath).isDirectory()) {
sarifFiles = findSarifFilesInDir(sarifPath);
if (sarifFiles.length === 0) {
throw new Error(`No SARIF files found to upload in "${sarifPath}".`);
}
} else {
sarifFiles = [sarifPath];
}
return sarifFiles;
}
// Counts the number of results in the given SARIF file // Counts the number of results in the given SARIF file
export function countResultsInSarif(sarif: string): number { export function countResultsInSarif(sarif: string): number {
let numResults = 0; let numResults = 0;
@@ -213,7 +258,7 @@ export function buildPayload(
const githubEvent = JSON.parse( const githubEvent = JSON.parse(
fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
); );
payloadObj.base_ref = `refs/heads/$githubEvent.pull_request.base.ref`; payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha; payloadObj.base_sha = githubEvent.pull_request.base.sha;
} }
} }

View File

@@ -48,7 +48,7 @@ async function run() {
const gitHubVersion = await getGitHubVersion(apiDetails); const gitHubVersion = await getGitHubVersion(apiDetails);
const uploadStats = await upload_lib.upload( const uploadStats = await upload_lib.uploadFromActions(
actionsUtil.getRequiredInput("sarif_file"), actionsUtil.getRequiredInput("sarif_file"),
parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")),
await actionsUtil.getCommitOid(), await actionsUtil.getCommitOid(),
@@ -60,7 +60,6 @@ async function run() {
actionsUtil.getRequiredInput("matrix"), actionsUtil.getRequiredInput("matrix"),
gitHubVersion, gitHubVersion,
apiDetails, apiDetails,
"actions",
getActionsLogger() getActionsLogger()
); );
await sendSuccessStatusReport(startedAt, uploadStats); await sendSuccessStatusReport(startedAt, uploadStats);

View File

@@ -267,14 +267,14 @@ export function checkGitHubVersionInRange(
disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_OLD disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_OLD
) { ) {
logger.warning( logger.warning(
`The CodeQL ${toolName} version you are using is too old to be compatible with GitHub Enterprise ${version}. If you experience issues, please upgrade to a more recent version of the CodeQL ${toolName}.` `The CodeQL ${toolName} version you are using is too old to be compatible with GitHub Enterprise ${version.version}. If you experience issues, please upgrade to a more recent version of the CodeQL ${toolName}.`
); );
} }
if ( if (
disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_NEW disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_NEW
) { ) {
logger.warning( logger.warning(
`GitHub Enterprise ${version} is too old to be compatible with this version of the CodeQL ${toolName}. If you experience issues, please upgrade to a more recent version of GitHub Enterprise or use an older version of the CodeQL ${toolName}.` `GitHub Enterprise ${version.version} is too old to be compatible with this version of the CodeQL ${toolName}. If you experience issues, please upgrade to a more recent version of GitHub Enterprise or use an older version of the CodeQL ${toolName}.`
); );
} }
hasBeenWarnedAboutVersion = true; hasBeenWarnedAboutVersion = true;

View File

@@ -3,7 +3,7 @@ description: 'Upload the analysis results'
author: 'GitHub' author: 'GitHub'
inputs: inputs:
sarif_file: sarif_file:
description: The SARIF file or directory of SARIF files to be uploaded. description: The SARIF file or directory of SARIF files to be uploaded. Each upload should contain a maximum of 1000 results, any additional results are ignored.
required: false required: false
default: '../results' default: '../results'
checkout_path: checkout_path: