mirror of
https://github.com/github/codeql-action.git
synced 2025-12-14 03:20:11 +08:00
Compare commits
214 Commits
codeql-bun
...
alexet/win
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8aadff8d91 | ||
|
|
cbbd811c0e | ||
|
|
8373707722 | ||
|
|
09024e50d4 | ||
|
|
daff33213e | ||
|
|
4c3c9b0d41 | ||
|
|
4df078eec5 | ||
|
|
cfec2bbc35 | ||
|
|
18f6367c46 | ||
|
|
2fcc4eb030 | ||
|
|
27ed6ac804 | ||
|
|
c78d81fa3e | ||
|
|
bcca43b391 | ||
|
|
6ddfab14f6 | ||
|
|
039625a3ce | ||
|
|
ce6b93eb0a | ||
|
|
bb51ece0b4 | ||
|
|
fb01860db6 | ||
|
|
2f4f48f767 | ||
|
|
0ff9c449b7 | ||
|
|
bde489c632 | ||
|
|
324d987bc6 | ||
|
|
964ae01287 | ||
|
|
fdf2494cfa | ||
|
|
7f1d7b8bda | ||
|
|
760681b052 | ||
|
|
bcd7b2de1d | ||
|
|
fd0ad84431 | ||
|
|
41b73e168c | ||
|
|
a2653534db | ||
|
|
f84e342ff3 | ||
|
|
2b1c88c014 | ||
|
|
0ab754c698 | ||
|
|
4c94e29f1b | ||
|
|
58defc0652 | ||
|
|
88714e3a60 | ||
|
|
3c63623824 | ||
|
|
1d92248672 | ||
|
|
c6390afb59 | ||
|
|
f2896eb08a | ||
|
|
f8c5dacab5 | ||
|
|
49cb962d82 | ||
|
|
04d2b0018e | ||
|
|
3568e4afcc | ||
|
|
7b72f1c330 | ||
|
|
6452109691 | ||
|
|
c9ca4ec1bd | ||
|
|
0656b2c1ad | ||
|
|
781e3bc540 | ||
|
|
10a2f1b1aa | ||
|
|
c4084e1c1a | ||
|
|
f3f5dfd3df | ||
|
|
169b9f804b | ||
|
|
3d8b1cb7ea | ||
|
|
8fef3928ba | ||
|
|
db540f07f0 | ||
|
|
80a8f97b9c | ||
|
|
1f07e287da | ||
|
|
ed751ece83 | ||
|
|
6408d72268 | ||
|
|
44ed1c6ce1 | ||
|
|
bfa9dfe827 | ||
|
|
19fe854945 | ||
|
|
c2377b2e49 | ||
|
|
74004631ca | ||
|
|
07943dcc5d | ||
|
|
2c62543901 | ||
|
|
18f9eb6b55 | ||
|
|
0dc37c7260 | ||
|
|
f109c77463 | ||
|
|
601dc8486f | ||
|
|
d182a0e3aa | ||
|
|
5261491807 | ||
|
|
24872f608c | ||
|
|
94b32884f9 | ||
|
|
8705aaff32 | ||
|
|
219142571c | ||
|
|
7a340d32a1 | ||
|
|
ee4d06713e | ||
|
|
6be1f5ce0e | ||
|
|
8a9922df92 | ||
|
|
795b1923ec | ||
|
|
28e2860afb | ||
|
|
4547749a2f | ||
|
|
484a9ad67e | ||
|
|
1013277382 | ||
|
|
504cb5e7a2 | ||
|
|
cfdf2eaf7a | ||
|
|
a1bfa7609f | ||
|
|
a3a8231e64 | ||
|
|
d0ac97e33f | ||
|
|
cb574a7d60 | ||
|
|
b0adc415a0 | ||
|
|
946779f5b6 | ||
|
|
9a753aa409 | ||
|
|
2a6d6c52d7 | ||
|
|
8659fb33f9 | ||
|
|
137e614f23 | ||
|
|
4bdcd08344 | ||
|
|
b6fc7138bf | ||
|
|
1a6f6a27b3 | ||
|
|
f86e200d13 | ||
|
|
c8abbce0a2 | ||
|
|
3d63fa4dad | ||
|
|
0853901c0d | ||
|
|
369cad8272 | ||
|
|
9ace6974f2 | ||
|
|
884ee1d129 | ||
|
|
a7f3c648eb | ||
|
|
087e7a3a1a | ||
|
|
97a70e6013 | ||
|
|
90d1a31dd4 | ||
|
|
70733e4ae5 | ||
|
|
a432f684f7 | ||
|
|
2f9814894f | ||
|
|
c796788c33 | ||
|
|
cd2eafc8e3 | ||
|
|
5a03a14bfb | ||
|
|
dbd8007298 | ||
|
|
a0c4707dcc | ||
|
|
c7275a75ce | ||
|
|
023add5df0 | ||
|
|
4e46a490ae | ||
|
|
54e0c67332 | ||
|
|
4bc186cf34 | ||
|
|
1da4ce5a03 | ||
|
|
20d8f91819 | ||
|
|
3792ed8ceb | ||
|
|
b1e0b46970 | ||
|
|
034bf318b8 | ||
|
|
bd4e3adfd9 | ||
|
|
230cb9b734 | ||
|
|
456cd431ff | ||
|
|
1511db33b3 | ||
|
|
55eae6652f | ||
|
|
094554cf89 | ||
|
|
3c494fdd7a | ||
|
|
6de1b753c2 | ||
|
|
45dd5ee97d | ||
|
|
82a8fa443e | ||
|
|
e89a24b8cb | ||
|
|
dc999c55d0 | ||
|
|
2d00e8c6f7 | ||
|
|
9f7bdecc04 | ||
|
|
cea5932aad | ||
|
|
cbd120ea91 | ||
|
|
71c7759fac | ||
|
|
9435055597 | ||
|
|
5d77983efc | ||
|
|
1fd28a0d4c | ||
|
|
46c74bba1d | ||
|
|
3e176f8293 | ||
|
|
8cbc02a4c2 | ||
|
|
222b57e35b | ||
|
|
6156eb9557 | ||
|
|
682158920d | ||
|
|
7afd3334e3 | ||
|
|
d0d858c809 | ||
|
|
494945f710 | ||
|
|
e9152c326e | ||
|
|
a43ce966aa | ||
|
|
18c6a7d6d1 | ||
|
|
1dc40ba165 | ||
|
|
7100f22932 | ||
|
|
7310a508e1 | ||
|
|
f3c9aee686 | ||
|
|
107fe8422f | ||
|
|
ec8015bc03 | ||
|
|
78b9d23d09 | ||
|
|
1dd265aef3 | ||
|
|
698e2a5487 | ||
|
|
4d862616ce | ||
|
|
56b1ead679 | ||
|
|
b1be00db57 | ||
|
|
c3a9325bd8 | ||
|
|
f99af1c014 | ||
|
|
145a3c1ed9 | ||
|
|
14719432ef | ||
|
|
c8ee1f4ef3 | ||
|
|
ac1c081de8 | ||
|
|
e6174fc6cf | ||
|
|
1010b1f743 | ||
|
|
8e0e34a3e6 | ||
|
|
f65e6c40fa | ||
|
|
fb2a3bf840 | ||
|
|
d99e994194 | ||
|
|
836cbe0784 | ||
|
|
9de6863c74 | ||
|
|
dff118f7ad | ||
|
|
c6dbd5a9bf | ||
|
|
32878b75b8 | ||
|
|
b1fd75309c | ||
|
|
90e780524b | ||
|
|
1110f7be49 | ||
|
|
d73e5cefb0 | ||
|
|
38ed96450e | ||
|
|
ff28c8d403 | ||
|
|
0c33f016fa | ||
|
|
7174a81563 | ||
|
|
81a21bfa1e | ||
|
|
9532bda6e4 | ||
|
|
57514f31db | ||
|
|
7ae9b0db35 | ||
|
|
4dcb3202d8 | ||
|
|
577fc451dd | ||
|
|
1de54f1d3b | ||
|
|
e3bfd25a41 | ||
|
|
935dd4041f | ||
|
|
253ef425f9 | ||
|
|
7507a5a9b1 | ||
|
|
4a270064ab | ||
|
|
5dc2db0028 | ||
|
|
8ea621e2e3 | ||
|
|
c7c948adb9 |
@@ -44,7 +44,6 @@
|
|||||||
"@typescript-eslint/no-unsafe-call": "off",
|
"@typescript-eslint/no-unsafe-call": "off",
|
||||||
"@typescript-eslint/no-unsafe-member-access": "off",
|
"@typescript-eslint/no-unsafe-member-access": "off",
|
||||||
"@typescript-eslint/no-unsafe-return": "off",
|
"@typescript-eslint/no-unsafe-return": "off",
|
||||||
"@typescript-eslint/no-unused-vars": "off",
|
|
||||||
"@typescript-eslint/no-var-requires": "off",
|
"@typescript-eslint/no-var-requires": "off",
|
||||||
"@typescript-eslint/prefer-regexp-exec": "off",
|
"@typescript-eslint/prefer-regexp-exec": "off",
|
||||||
"@typescript-eslint/require-await": "off",
|
"@typescript-eslint/require-await": "off",
|
||||||
|
|||||||
2
.github/update-release-branch.py
vendored
2
.github/update-release-branch.py
vendored
@@ -95,7 +95,7 @@ def get_conductor(repo, pull_requests, other_commits):
|
|||||||
# This will not include any commits that exist on the release branch
|
# This will not include any commits that exist on the release branch
|
||||||
# that aren't on main.
|
# that aren't on main.
|
||||||
def get_commit_difference(repo):
|
def get_commit_difference(repo):
|
||||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '...' + MAIN_BRANCH).strip().split('\n')
|
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '..' + MAIN_BRANCH).strip().split('\n')
|
||||||
|
|
||||||
# Convert to full-fledged commit objects
|
# Convert to full-fledged commit objects
|
||||||
commits = [repo.get_commit(c) for c in commits]
|
commits = [repo.get_commit(c) for c in commits]
|
||||||
|
|||||||
22
.github/workflows/check-expected-release-files.yml
vendored
Normal file
22
.github/workflows/check-expected-release-files.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
name: Check Expected Release Files
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- .github/workflows/check-expected-release-files.yml
|
||||||
|
- src/defaults.json
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-expected-release-files:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout CodeQL Action
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Check Expected Release Files
|
||||||
|
run: |
|
||||||
|
bundle_version="$(cat "./src/defaults.json" | jq -r ".bundleVersion")"
|
||||||
|
set -x
|
||||||
|
for expected_file in "codeql-bundle.tar.gz" "codeql-bundle-linux64.tar.gz" "codeql-bundle-osx64.tar.gz" "codeql-bundle-win64.tar.gz" "codeql-runner-linux" "codeql-runner-macos" "codeql-runner-win.exe"; do
|
||||||
|
curl --location --fail --head --request GET "https://github.com/github/codeql-action/releases/download/$bundle_version/$expected_file" > /dev/null
|
||||||
|
done
|
||||||
1
.github/workflows/codeql.yml
vendored
1
.github/workflows/codeql.yml
vendored
@@ -4,6 +4,7 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches: [main, v1]
|
branches: [main, v1]
|
||||||
pull_request:
|
pull_request:
|
||||||
|
branches: [main, v1]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
|||||||
484
.github/workflows/integration-testing.yml
vendored
484
.github/workflows/integration-testing.yml
vendored
@@ -1,484 +0,0 @@
|
|||||||
name: "Integration Testing"
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main, v1]
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
multi-language-repo_test-autodetect-languages:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
- uses: ./../action/init
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: ./build.sh
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
- run: |
|
|
||||||
cd "$RUNNER_TEMP/codeql_databases"
|
|
||||||
# List all directories as there will be precisely one directory per database
|
|
||||||
# but there may be other files in this directory such as query suites.
|
|
||||||
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
|
||||||
[[ ! -d cpp ]] || \
|
|
||||||
[[ ! -d csharp ]] || \
|
|
||||||
[[ ! -d go ]] || \
|
|
||||||
[[ ! -d java ]] || \
|
|
||||||
[[ ! -d javascript ]] || \
|
|
||||||
[[ ! -d python ]]; then
|
|
||||||
echo "Did not find expected number of databases. Database dir contains: $(ls)"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
multi-language-repo_test-custom-queries-and-remote-config:
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
|
||||||
tools: [~, latest]
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
tools: ${{ matrix.tools }}
|
|
||||||
languages: cpp,csharp,java,javascript,python
|
|
||||||
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: ./build.sh
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
# Currently is not possible to analyze Go in conjunction with other languages in macos
|
|
||||||
multi-language-repo_test-go-custom-queries:
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-go@v2
|
|
||||||
if: ${{ matrix.os == 'macos-latest' }}
|
|
||||||
with:
|
|
||||||
go-version: '^1.13.1'
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: go
|
|
||||||
config-file: ./.github/codeql/custom-queries.yml
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: ./build.sh
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
go-custom-tracing:
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
env:
|
|
||||||
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-go@v2
|
|
||||||
if: ${{ matrix.os == 'macos-latest' }}
|
|
||||||
with:
|
|
||||||
go-version: '^1.13.1'
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: go
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: go build main.go
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
multi-language-repo_rubocop:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
- name: Set up Ruby
|
|
||||||
uses: ruby/setup-ruby@v1
|
|
||||||
with:
|
|
||||||
ruby-version: 2.6
|
|
||||||
- name: Install Code Scanning integration
|
|
||||||
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
|
||||||
- name: Install dependencies
|
|
||||||
run: bundle install
|
|
||||||
- name: Rubocop run
|
|
||||||
run: |
|
|
||||||
bash -c "
|
|
||||||
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
|
||||||
[[ $? -ne 2 ]]
|
|
||||||
"
|
|
||||||
- uses: ./../action/upload-sarif
|
|
||||||
with:
|
|
||||||
sarif_file: rubocop.sarif
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
test-proxy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: ubuntu:18.04
|
|
||||||
options: --dns 127.0.0.1
|
|
||||||
services:
|
|
||||||
squid-proxy:
|
|
||||||
image: datadog/squid:latest
|
|
||||||
ports:
|
|
||||||
- 3128:3128
|
|
||||||
env:
|
|
||||||
https_proxy: http://squid-proxy:3128
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: javascript
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-javascript-ubuntu:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
# Pass --config-file here, but not for other jobs in this workflow.
|
|
||||||
# This means we're testing the config file parsing in the runner
|
|
||||||
# but not slowing down all jobs unnecessarily as it doesn't add much
|
|
||||||
# testing the parsing on different operating systems and languages.
|
|
||||||
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-javascript-windows:
|
|
||||||
runs-on: windows-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-javascript-macos:
|
|
||||||
runs-on: macos-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-csharp-ubuntu:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
run: |
|
|
||||||
. ./codeql-runner/codeql-env.sh
|
|
||||||
dotnet build
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-csharp-windows:
|
|
||||||
runs-on: windows-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
shell: powershell
|
|
||||||
run: |
|
|
||||||
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
|
|
||||||
dotnet build
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-csharp-macos:
|
|
||||||
runs-on: macos-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
. ./codeql-runner/codeql-env.sh
|
|
||||||
dotnet build
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
|
|
||||||
runner-analyze-csharp-autobuild-ubuntu:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-linux autobuild
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-csharp-autobuild-windows:
|
|
||||||
runs-on: windows-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
shell: powershell
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-win.exe autobuild
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-csharp-autobuild-macos:
|
|
||||||
runs-on: macos-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos autobuild
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-upload-sarif:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Upload with runner
|
|
||||||
run: |
|
|
||||||
# Deliberately don't use TEST_MODE here. This is specifically testing
|
|
||||||
# the compatibility with the API.
|
|
||||||
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
574
.github/workflows/pr-checks.yml
vendored
574
.github/workflows/pr-checks.yml
vendored
@@ -1,5 +1,8 @@
|
|||||||
name: "PR checks"
|
name: "PR checks"
|
||||||
|
|
||||||
|
env:
|
||||||
|
GO111MODULE: auto
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [main, v1]
|
branches: [main, v1]
|
||||||
@@ -20,25 +23,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Check generated JavaScript
|
- name: Check generated JavaScript
|
||||||
run: |
|
run: .github/workflows/script/check-js.sh
|
||||||
# Sanity check that repo is clean to start with
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then this workflow needs attention...
|
|
||||||
>&2 echo "Failed: Repo should be clean before testing!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
# Wipe the lib directory incase there are extra unnecessary files in there
|
|
||||||
rm -rf lib
|
|
||||||
# Generate the JavaScript files
|
|
||||||
npm run-script build
|
|
||||||
# Check that repo is still clean
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then the PR needs attention
|
|
||||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
|
||||||
git status
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "Success: JavaScript files are up to date"
|
|
||||||
|
|
||||||
check-node-modules:
|
check-node-modules:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -46,27 +31,10 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Check node modules up to date
|
- name: Check node modules up to date
|
||||||
run: |
|
run: .github/workflows/script/check-node-modules.sh
|
||||||
# Sanity check that repo is clean to start with
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then this workflow needs attention...
|
|
||||||
>&2 echo "Failed: Repo should be clean before testing!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
# Reinstall modules and then clean to remove absolute paths
|
|
||||||
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
|
||||||
npm ci
|
|
||||||
npm run removeNPMAbsolutePaths
|
|
||||||
# Check that repo is still clean
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then the PR needs attention
|
|
||||||
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
|
|
||||||
git status
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "Success: node_modules are up to date"
|
|
||||||
|
|
||||||
npm-test:
|
npm-test:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest,macos-latest]
|
os: [ubuntu-latest,macos-latest]
|
||||||
@@ -76,3 +44,533 @@ jobs:
|
|||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: npm run-script test
|
- name: npm run-script test
|
||||||
run: npm run-script test
|
run: npm run-script test
|
||||||
|
|
||||||
|
multi-language-repo_test-autodetect-languages:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- uses: ./../action/init
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- run: |
|
||||||
|
cd "$RUNNER_TEMP/codeql_databases"
|
||||||
|
# List all directories as there will be precisely one directory per database
|
||||||
|
# but there may be other files in this directory such as query suites.
|
||||||
|
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
||||||
|
[[ ! -d cpp ]] || \
|
||||||
|
[[ ! -d csharp ]] || \
|
||||||
|
[[ ! -d go ]] || \
|
||||||
|
[[ ! -d java ]] || \
|
||||||
|
[[ ! -d javascript ]] || \
|
||||||
|
[[ ! -d python ]]; then
|
||||||
|
echo "Did not find expected number of databases. Database dir contains: $(ls)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
multi-language-repo_test-custom-queries-and-remote-config:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||||
|
tools: [~, latest]
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
tools: ${{ matrix.tools }}
|
||||||
|
languages: cpp,csharp,java,javascript,python
|
||||||
|
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
# Currently is not possible to analyze Go in conjunction with other languages in macos
|
||||||
|
multi-language-repo_test-go-custom-queries:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/setup-go@v2
|
||||||
|
if: ${{ matrix.os == 'macos-latest' }}
|
||||||
|
with:
|
||||||
|
go-version: '^1.13.1'
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: go
|
||||||
|
config-file: ./.github/codeql/custom-queries.yml
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
go-custom-tracing:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
env:
|
||||||
|
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/setup-go@v2
|
||||||
|
if: ${{ matrix.os == 'macos-latest' }}
|
||||||
|
with:
|
||||||
|
go-version: '^1.13.1'
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: go
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: go build main.go
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
go-custom-tracing-autobuild:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
# No need to test Go autobuild on multiple OSes since
|
||||||
|
# we're testing Go custom tracing with a manual build on all OSes.
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: go
|
||||||
|
- uses: ./../action/autobuild
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
multi-language-repo_rubocop:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- name: Set up Ruby
|
||||||
|
uses: ruby/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: 2.6
|
||||||
|
- name: Install Code Scanning integration
|
||||||
|
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
||||||
|
- name: Install dependencies
|
||||||
|
run: bundle install
|
||||||
|
- name: Rubocop run
|
||||||
|
run: |
|
||||||
|
bash -c "
|
||||||
|
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
||||||
|
[[ $? -ne 2 ]]
|
||||||
|
"
|
||||||
|
- uses: ./../action/upload-sarif
|
||||||
|
with:
|
||||||
|
sarif_file: rubocop.sarif
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
test-proxy:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ubuntu:18.04
|
||||||
|
options: --dns 127.0.0.1
|
||||||
|
services:
|
||||||
|
squid-proxy:
|
||||||
|
image: datadog/squid:latest
|
||||||
|
ports:
|
||||||
|
- 3128:3128
|
||||||
|
env:
|
||||||
|
https_proxy: http://squid-proxy:3128
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: javascript
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-javascript-ubuntu:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
# Pass --config-file here, but not for other jobs in this workflow.
|
||||||
|
# This means we're testing the config file parsing in the runner
|
||||||
|
# but not slowing down all jobs unnecessarily as it doesn't add much
|
||||||
|
# testing the parsing on different operating systems and languages.
|
||||||
|
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-javascript-windows:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: windows-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-javascript-macos:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: macos-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-csharp-ubuntu:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd ../action/runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Build code
|
||||||
|
run: |
|
||||||
|
. ./codeql-runner/codeql-env.sh
|
||||||
|
$CODEQL_RUNNER dotnet build
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-csharp-windows:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: windows-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd ../action/runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Build code
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
|
||||||
|
& $Env:CODEQL_RUNNER dotnet build
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-csharp-macos:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: macos-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd ../action/runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
. ./codeql-runner/codeql-env.sh
|
||||||
|
$CODEQL_RUNNER dotnet build
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
|
||||||
|
runner-analyze-csharp-autobuild-ubuntu:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd ../action/runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Build code
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-linux autobuild
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-csharp-autobuild-windows:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: windows-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd ../action/runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Build code
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-win.exe autobuild
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-csharp-autobuild-macos:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: macos-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd ../action/runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-macos autobuild
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-upload-sarif:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Upload with runner
|
||||||
|
run: |
|
||||||
|
# Deliberately don't use TEST_MODE here. This is specifically testing
|
||||||
|
# the compatibility with the API.
|
||||||
|
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|||||||
21
.github/workflows/script/check-js.sh
vendored
Executable file
21
.github/workflows/script/check-js.sh
vendored
Executable file
@@ -0,0 +1,21 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
# Sanity check that repo is clean to start with
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then this workflow needs attention...
|
||||||
|
>&2 echo "Failed: Repo should be clean before testing!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
# Wipe the lib directory incase there are extra unnecessary files in there
|
||||||
|
rm -rf lib
|
||||||
|
# Generate the JavaScript files
|
||||||
|
npm run-script build
|
||||||
|
# Check that repo is still clean
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then the PR needs attention
|
||||||
|
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||||
|
git status
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Success: JavaScript files are up to date"
|
||||||
21
.github/workflows/script/check-node-modules.sh
vendored
Executable file
21
.github/workflows/script/check-node-modules.sh
vendored
Executable file
@@ -0,0 +1,21 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
# Sanity check that repo is clean to start with
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then this workflow needs attention...
|
||||||
|
>&2 echo "Failed: Repo should be clean before testing!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
# Reinstall modules and then clean to remove absolute paths
|
||||||
|
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||||
|
npm ci
|
||||||
|
npm run removeNPMAbsolutePaths
|
||||||
|
# Check that repo is still clean
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then the PR needs attention
|
||||||
|
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
|
||||||
|
git status
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Success: node_modules are up to date"
|
||||||
1
.github/workflows/update-release-branch.yml
vendored
1
.github/workflows/update-release-branch.yml
vendored
@@ -12,6 +12,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
update:
|
update:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ github.repository == 'github/codeql-action' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
|
|||||||
11
README.md
11
README.md
@@ -96,7 +96,16 @@ Use the `config-file` parameter of the `init` action to enable the configuration
|
|||||||
config-file: ./.github/codeql/codeql-config.yml
|
config-file: ./.github/codeql/codeql-config.yml
|
||||||
```
|
```
|
||||||
|
|
||||||
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration file](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration-file)."
|
The configuration file can be located in a different repository. This is useful if you want to share the same configuration across multiple repositories. If the configuration file is in a private repository you can also specify an `external-repository-token` option. This should be a personal access token that has read access to any repositories containing referenced config files and queries.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- uses: github/codeql-action/init@v1
|
||||||
|
with:
|
||||||
|
config-file: owner/repo/codeql-config.yml@branch
|
||||||
|
external-repository-token: ${{ secrets.EXTERNAL_REPOSITORY_TOKEN }}
|
||||||
|
```
|
||||||
|
|
||||||
|
For information on how to write a configuration file, see "[Using a custom configuration file](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration-file)."
|
||||||
|
|
||||||
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:
|
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:
|
||||||
|
|
||||||
|
|||||||
@@ -19,6 +19,9 @@ inputs:
|
|||||||
queries:
|
queries:
|
||||||
description: Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file; prefix with "+" to use both sets of queries.
|
description: Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file; prefix with "+" to use both sets of queries.
|
||||||
required: false
|
required: false
|
||||||
|
external-repository-token:
|
||||||
|
description: A token for fetching external config files and queries if they reside in a private repository.
|
||||||
|
required: false
|
||||||
setup-python-dependencies:
|
setup-python-dependencies:
|
||||||
description: Try to auto-install your python dependencies
|
description: Try to auto-install your python dependencies
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
185
lib/actions-util.js
generated
185
lib/actions-util.js
generated
@@ -51,6 +51,13 @@ function getRequiredEnvParam(paramName) {
|
|||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
exports.getRequiredEnvParam = getRequiredEnvParam;
|
exports.getRequiredEnvParam = getRequiredEnvParam;
|
||||||
|
function getTemporaryDirectory() {
|
||||||
|
const value = process.env["CODEQL_ACTION_TEMP"];
|
||||||
|
return value !== undefined && value !== ""
|
||||||
|
? value
|
||||||
|
: getRequiredEnvParam("RUNNER_TEMP");
|
||||||
|
}
|
||||||
|
exports.getTemporaryDirectory = getTemporaryDirectory;
|
||||||
/**
|
/**
|
||||||
* Ensures all required environment variables are set in the context of a local run.
|
* Ensures all required environment variables are set in the context of a local run.
|
||||||
*/
|
*/
|
||||||
@@ -101,93 +108,126 @@ exports.getCommitOid = async function () {
|
|||||||
function isObject(o) {
|
function isObject(o) {
|
||||||
return o !== null && typeof o === "object";
|
return o !== null && typeof o === "object";
|
||||||
}
|
}
|
||||||
var MissingTriggers;
|
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
|
||||||
(function (MissingTriggers) {
|
function escapeRegExp(string) {
|
||||||
MissingTriggers[MissingTriggers["None"] = 0] = "None";
|
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
||||||
MissingTriggers[MissingTriggers["Push"] = 1] = "Push";
|
}
|
||||||
MissingTriggers[MissingTriggers["PullRequest"] = 2] = "PullRequest";
|
function patternToRegExp(value) {
|
||||||
})(MissingTriggers || (MissingTriggers = {}));
|
return new RegExp(`^${value
|
||||||
|
.toString()
|
||||||
|
.split(GLOB_PATTERN)
|
||||||
|
.reduce(function (arr, cur) {
|
||||||
|
if (cur === "**") {
|
||||||
|
arr.push(".*?");
|
||||||
|
}
|
||||||
|
else if (cur === "*") {
|
||||||
|
arr.push("[^/]*?");
|
||||||
|
}
|
||||||
|
else if (cur) {
|
||||||
|
arr.push(escapeRegExp(cur));
|
||||||
|
}
|
||||||
|
return arr;
|
||||||
|
}, [])
|
||||||
|
.join("")}$`);
|
||||||
|
}
|
||||||
|
// this function should return true if patternA is a superset of patternB
|
||||||
|
// e.g: * is a superset of main-* but main-* is not a superset of *.
|
||||||
|
function patternIsSuperset(patternA, patternB) {
|
||||||
|
return patternToRegExp(patternA).test(patternB);
|
||||||
|
}
|
||||||
|
exports.patternIsSuperset = patternIsSuperset;
|
||||||
|
function branchesToArray(branches) {
|
||||||
|
if (typeof branches === "string") {
|
||||||
|
return [branches];
|
||||||
|
}
|
||||||
|
if (Array.isArray(branches)) {
|
||||||
|
if (branches.length === 0) {
|
||||||
|
return "**";
|
||||||
|
}
|
||||||
|
return branches;
|
||||||
|
}
|
||||||
|
return "**";
|
||||||
|
}
|
||||||
function toCodedErrors(errors) {
|
function toCodedErrors(errors) {
|
||||||
return Object.entries(errors).reduce((acc, [key, value]) => {
|
return Object.entries(errors).reduce((acc, [key, value]) => {
|
||||||
acc[key] = { message: value, code: key };
|
acc[key] = { message: value, code: key };
|
||||||
return acc;
|
return acc;
|
||||||
}, {});
|
}, {});
|
||||||
}
|
}
|
||||||
|
// code to send back via status report
|
||||||
|
// message to add as a warning annotation to the run
|
||||||
exports.WorkflowErrors = toCodedErrors({
|
exports.WorkflowErrors = toCodedErrors({
|
||||||
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
MissingHooks: `Please specify on.push and on.pull_request hooks so that Code Scanning can compare pull requests against the state of the base branch.`,
|
|
||||||
MissingPullRequestHook: `Please specify an on.pull_request hook so that Code Scanning is run against pull requests.`,
|
|
||||||
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
||||||
});
|
});
|
||||||
function validateWorkflow(doc) {
|
function getWorkflowErrors(doc) {
|
||||||
var _a, _b, _c, _d, _e;
|
var _a, _b, _c, _d, _e, _f, _g, _h;
|
||||||
const errors = [];
|
const errors = [];
|
||||||
// .jobs[key].steps[].run
|
const jobName = process.env.GITHUB_JOB;
|
||||||
for (const job of Object.values(((_a = doc) === null || _a === void 0 ? void 0 : _a.jobs) || {})) {
|
if (jobName) {
|
||||||
for (const step of ((_b = job) === null || _b === void 0 ? void 0 : _b.steps) || []) {
|
const job = (_b = (_a = doc) === null || _a === void 0 ? void 0 : _a.jobs) === null || _b === void 0 ? void 0 : _b[jobName];
|
||||||
|
const steps = (_c = job) === null || _c === void 0 ? void 0 : _c.steps;
|
||||||
|
if (Array.isArray(steps)) {
|
||||||
|
for (const step of steps) {
|
||||||
// this was advice that we used to give in the README
|
// this was advice that we used to give in the README
|
||||||
// we actually want to run the analysis on the merge commit
|
// we actually want to run the analysis on the merge commit
|
||||||
// to produce results that are more inline with expectations
|
// to produce results that are more inline with expectations
|
||||||
// (i.e: this is what will happen if you merge this PR)
|
// (i.e: this is what will happen if you merge this PR)
|
||||||
// and avoid some race conditions
|
// and avoid some race conditions
|
||||||
if (((_c = step) === null || _c === void 0 ? void 0 : _c.run) === "git checkout HEAD^2") {
|
if (((_d = step) === null || _d === void 0 ? void 0 : _d.run) === "git checkout HEAD^2") {
|
||||||
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
|
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let missing = MissingTriggers.None;
|
}
|
||||||
|
let missingPush = false;
|
||||||
if (doc.on === undefined) {
|
if (doc.on === undefined) {
|
||||||
missing = MissingTriggers.Push | MissingTriggers.PullRequest;
|
// this is not a valid config
|
||||||
}
|
}
|
||||||
else if (typeof doc.on === "string") {
|
else if (typeof doc.on === "string") {
|
||||||
switch (doc.on) {
|
if (doc.on === "pull_request") {
|
||||||
case "push":
|
missingPush = true;
|
||||||
missing = MissingTriggers.PullRequest;
|
|
||||||
break;
|
|
||||||
case "pull_request":
|
|
||||||
missing = MissingTriggers.Push;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
missing = MissingTriggers.Push | MissingTriggers.PullRequest;
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (Array.isArray(doc.on)) {
|
else if (Array.isArray(doc.on)) {
|
||||||
if (!doc.on.includes("push")) {
|
const hasPush = doc.on.includes("push");
|
||||||
missing = missing | MissingTriggers.Push;
|
const hasPullRequest = doc.on.includes("pull_request");
|
||||||
}
|
if (hasPullRequest && !hasPush) {
|
||||||
if (!doc.on.includes("pull_request")) {
|
missingPush = true;
|
||||||
missing = missing | MissingTriggers.PullRequest;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (isObject(doc.on)) {
|
else if (isObject(doc.on)) {
|
||||||
if (!Object.prototype.hasOwnProperty.call(doc.on, "pull_request")) {
|
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
||||||
missing = missing | MissingTriggers.PullRequest;
|
const hasPullRequest = Object.prototype.hasOwnProperty.call(doc.on, "pull_request");
|
||||||
|
if (!hasPush && hasPullRequest) {
|
||||||
|
missingPush = true;
|
||||||
}
|
}
|
||||||
if (!Object.prototype.hasOwnProperty.call(doc.on, "push")) {
|
if (hasPush && hasPullRequest) {
|
||||||
missing = missing | MissingTriggers.Push;
|
const paths = (_e = doc.on.push) === null || _e === void 0 ? void 0 : _e.paths;
|
||||||
}
|
|
||||||
else {
|
|
||||||
const paths = (_d = doc.on.push) === null || _d === void 0 ? void 0 : _d.paths;
|
|
||||||
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
||||||
// if they didn't change any files
|
// if they didn't change any files
|
||||||
// currently we cannot go back through the history and find the most recent baseline
|
// currently we cannot go back through the history and find the most recent baseline
|
||||||
if (Array.isArray(paths) && paths.length > 0) {
|
if (Array.isArray(paths) && paths.length > 0) {
|
||||||
errors.push(exports.WorkflowErrors.PathsSpecified);
|
errors.push(exports.WorkflowErrors.PathsSpecified);
|
||||||
}
|
}
|
||||||
const pathsIgnore = (_e = doc.on.push) === null || _e === void 0 ? void 0 : _e["paths-ignore"];
|
const pathsIgnore = (_f = doc.on.push) === null || _f === void 0 ? void 0 : _f["paths-ignore"];
|
||||||
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
||||||
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
|
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (doc.on.push) {
|
// if doc.on.pull_request is null that means 'all branches'
|
||||||
const push = doc.on.push.branches || [];
|
// if doc.on.pull_request is undefined that means 'off'
|
||||||
if (doc.on.pull_request) {
|
// we only want to check for mismatched branches if pull_request is on.
|
||||||
const pull_request = doc.on.pull_request.branches || [];
|
if (doc.on.pull_request !== undefined) {
|
||||||
const difference = pull_request.filter((value) => !push.includes(value));
|
const push = branchesToArray((_g = doc.on.push) === null || _g === void 0 ? void 0 : _g.branches);
|
||||||
|
if (push !== "**") {
|
||||||
|
const pull_request = branchesToArray((_h = doc.on.pull_request) === null || _h === void 0 ? void 0 : _h.branches);
|
||||||
|
if (pull_request !== "**") {
|
||||||
|
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
|
||||||
if (difference.length > 0) {
|
if (difference.length > 0) {
|
||||||
// there are branches in pull_request that may not have a baseline
|
// there are branches in pull_request that may not have a baseline
|
||||||
// because we are not building them on push
|
// because we are not building them on push
|
||||||
@@ -201,28 +241,41 @@ function validateWorkflow(doc) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
switch (missing) {
|
}
|
||||||
case MissingTriggers.PullRequest | MissingTriggers.Push:
|
if (missingPush) {
|
||||||
errors.push(exports.WorkflowErrors.MissingHooks);
|
|
||||||
break;
|
|
||||||
case MissingTriggers.PullRequest:
|
|
||||||
errors.push(exports.WorkflowErrors.MissingPullRequestHook);
|
|
||||||
break;
|
|
||||||
case MissingTriggers.Push:
|
|
||||||
errors.push(exports.WorkflowErrors.MissingPushHook);
|
errors.push(exports.WorkflowErrors.MissingPushHook);
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
return errors;
|
return errors;
|
||||||
}
|
}
|
||||||
exports.validateWorkflow = validateWorkflow;
|
|
||||||
async function getWorkflowErrors() {
|
|
||||||
const workflow = await getWorkflow();
|
|
||||||
if (workflow === undefined) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
return validateWorkflow(workflow);
|
|
||||||
}
|
|
||||||
exports.getWorkflowErrors = getWorkflowErrors;
|
exports.getWorkflowErrors = getWorkflowErrors;
|
||||||
|
async function validateWorkflow() {
|
||||||
|
let workflow;
|
||||||
|
try {
|
||||||
|
workflow = await getWorkflow();
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
return `error: getWorkflow() failed: ${e.toString()}`;
|
||||||
|
}
|
||||||
|
let workflowErrors;
|
||||||
|
try {
|
||||||
|
workflowErrors = getWorkflowErrors(workflow);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
return `error: getWorkflowErrors() failed: ${e.toString()}`;
|
||||||
|
}
|
||||||
|
if (workflowErrors.length > 0) {
|
||||||
|
let message;
|
||||||
|
try {
|
||||||
|
message = formatWorkflowErrors(workflowErrors);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
return `error: formatWorkflowErrors() failed: ${e.toString()}`;
|
||||||
|
}
|
||||||
|
core.warning(message);
|
||||||
|
}
|
||||||
|
return formatWorkflowCause(workflowErrors);
|
||||||
|
}
|
||||||
|
exports.validateWorkflow = validateWorkflow;
|
||||||
function formatWorkflowErrors(errors) {
|
function formatWorkflowErrors(errors) {
|
||||||
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
||||||
const errorsList = errors.map((e) => e.message).join(" ");
|
const errorsList = errors.map((e) => e.message).join(" ");
|
||||||
@@ -239,13 +292,7 @@ exports.formatWorkflowCause = formatWorkflowCause;
|
|||||||
async function getWorkflow() {
|
async function getWorkflow() {
|
||||||
const relativePath = await getWorkflowPath();
|
const relativePath = await getWorkflowPath();
|
||||||
const absolutePath = path.join(getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath);
|
const absolutePath = path.join(getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath);
|
||||||
try {
|
|
||||||
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
|
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
core.warning(`Could not read workflow: ${e.toString()}`);
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
exports.getWorkflow = getWorkflow;
|
exports.getWorkflow = getWorkflow;
|
||||||
/**
|
/**
|
||||||
@@ -428,10 +475,10 @@ async function sendStatusReport(statusReport) {
|
|||||||
// this means that this action version is no longer compatible with the API
|
// this means that this action version is no longer compatible with the API
|
||||||
// we still want to continue as it is likely the analysis endpoint will work
|
// we still want to continue as it is likely the analysis endpoint will work
|
||||||
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
|
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
|
||||||
core.warning("CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.");
|
core.debug("CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.");
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
core.warning("CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.");
|
core.debug("CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.");
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
312
lib/actions-util.test.js
generated
312
lib/actions-util.test.js
generated
@@ -11,9 +11,13 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const yaml = __importStar(require("js-yaml"));
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
const sinon_1 = __importDefault(require("sinon"));
|
||||||
const actionsutil = __importStar(require("./actions-util"));
|
const actionsutil = __importStar(require("./actions-util"));
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
function errorCodes(actual, expected) {
|
||||||
|
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
||||||
|
}
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
ava_1.default("getRef() throws on the empty string", async (t) => {
|
ava_1.default("getRef() throws on the empty string", async (t) => {
|
||||||
process.env["GITHUB_REF"] = "";
|
process.env["GITHUB_REF"] = "";
|
||||||
@@ -68,99 +72,181 @@ ava_1.default("prepareEnvironment() when a local run", (t) => {
|
|||||||
t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB");
|
t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB");
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
|
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on is missing", (t) => {
|
ava_1.default("getWorkflowErrors() when on is empty", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({});
|
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingHooks]);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is missing", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({ on: {} });
|
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
||||||
console.log(errors);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingHooks]);
|
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is an array missing pull_request", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({ on: ["push"] });
|
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingPullRequestHook]);
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook]));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is an array missing push", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is valid", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({ on: ["pull_request"] });
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingPushHook]);
|
|
||||||
});
|
|
||||||
ava_1.default("validateWorkflow() when on.push is valid", (t) => {
|
|
||||||
const errors = actionsutil.validateWorkflow({
|
|
||||||
on: ["push", "pull_request"],
|
on: ["push", "pull_request"],
|
||||||
});
|
});
|
||||||
t.deepEqual(errors.length, 0);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is a valid superset", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: ["push", "pull_request", "schedule"],
|
on: ["push", "pull_request", "schedule"],
|
||||||
});
|
});
|
||||||
t.deepEqual(errors.length, 0);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push should not have a path", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push should not have a path", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"], paths: ["test/*"] },
|
push: { branches: ["main"], paths: ["test/*"] },
|
||||||
pull_request: { branches: ["main"] },
|
pull_request: { branches: ["main"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.PathsSpecified]);
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified]));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is a correct object", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is a correct object", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
||||||
});
|
});
|
||||||
t.deepEqual(errors.length, 0);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is correct with empty objects", (t) => {
|
ava_1.default("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: { push: undefined, pull_request: undefined },
|
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
||||||
});
|
});
|
||||||
console.log(errors);
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||||
t.deepEqual(errors.length, 0);
|
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is mismatched", (t) => {
|
ava_1.default("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
|
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() when on.push is mismatched", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"] },
|
push: { branches: ["main"] },
|
||||||
pull_request: { branches: ["feature"] },
|
pull_request: { branches: ["feature"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is not mismatched", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main", "feature"] },
|
push: { branches: ["main", "feature"] },
|
||||||
pull_request: { branches: ["main"] },
|
pull_request: { branches: ["main"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
t.deepEqual(errors.length, 0);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is mismatched for pull_request", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"] },
|
push: { branches: ["main"] },
|
||||||
pull_request: { branches: ["main", "feature"] },
|
pull_request: { branches: ["main", "feature"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.pull_request for every branch but push specifies branches", (t) => {
|
ava_1.default("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"] },
|
push: 1,
|
||||||
pull_request: null,
|
pull_request: 1,
|
||||||
|
},
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: 1,
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: [1],
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { 1: 1 },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: 1 },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: [1] },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: { steps: 1 } },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: [undefined] },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: {
|
||||||
|
branches: 1,
|
||||||
|
},
|
||||||
|
pull_request: {
|
||||||
|
branches: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}), []));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: ["main"]
|
||||||
|
pull_request:
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["feature/*"] },
|
||||||
|
pull_request: { branches: "feature/moose" },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when HEAD^2 is checked out", (t) => {
|
ava_1.default("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["feature/moose"] },
|
||||||
|
pull_request: { branches: "feature/*" },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test";
|
||||||
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: ["push", "pull_request"],
|
on: ["push", "pull_request"],
|
||||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]);
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
||||||
});
|
});
|
||||||
ava_1.default("formatWorkflowErrors() when there is one error", (t) => {
|
ava_1.default("formatWorkflowErrors() when there is one error", (t) => {
|
||||||
const message = actionsutil.formatWorkflowErrors([
|
const message = actionsutil.formatWorkflowErrors([
|
||||||
@@ -175,6 +261,10 @@ ava_1.default("formatWorkflowErrors() when there are multiple errors", (t) => {
|
|||||||
]);
|
]);
|
||||||
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
||||||
});
|
});
|
||||||
|
ava_1.default("formatWorkflowCause() with no errors", (t) => {
|
||||||
|
const message = actionsutil.formatWorkflowCause([]);
|
||||||
|
t.deepEqual(message, undefined);
|
||||||
|
});
|
||||||
ava_1.default("formatWorkflowCause()", (t) => {
|
ava_1.default("formatWorkflowCause()", (t) => {
|
||||||
const message = actionsutil.formatWorkflowCause([
|
const message = actionsutil.formatWorkflowCause([
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||||
@@ -183,4 +273,130 @@ ava_1.default("formatWorkflowCause()", (t) => {
|
|||||||
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
||||||
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
|
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
|
||||||
});
|
});
|
||||||
|
ava_1.default("patternIsSuperset()", (t) => {
|
||||||
|
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("*", "*"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("*", "main-*"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("main-*", "*"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("main", "main"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("*", "feature/*"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("**", "feature/*"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("feature-*", "**"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**/d"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("a/**", "a/**/c"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("a/**/c", "a/main-**/c"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/d/**/c"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/c/**/c"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/d/**/c"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("a/**/c/d/**/c", "a/**/b/**/c"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("a/main-**/c", "a/**/c"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() when branches contain dots", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [4.1, master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [4.1, master]
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master, ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test";
|
||||||
|
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test3";
|
||||||
|
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() when on is missing", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() with a different on setup", (t) => {
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: "workflow_dispatch"
|
||||||
|
`)), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: [workflow_dispatch]
|
||||||
|
`)), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
`)), []));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
`)), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: ["push"]
|
||||||
|
`)), []));
|
||||||
|
});
|
||||||
//# sourceMappingURL=actions-util.test.js.map
|
//# sourceMappingURL=actions-util.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
3
lib/analysis-paths.test.js
generated
3
lib/analysis-paths.test.js
generated
@@ -27,6 +27,7 @@ ava_1.default("emptyPaths", async (t) => {
|
|||||||
tempDir: tmpDir,
|
tempDir: tmpDir,
|
||||||
toolCacheDir: tmpDir,
|
toolCacheDir: tmpDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||||
@@ -45,6 +46,7 @@ ava_1.default("nonEmptyPaths", async (t) => {
|
|||||||
tempDir: tmpDir,
|
tempDir: tmpDir,
|
||||||
toolCacheDir: tmpDir,
|
toolCacheDir: tmpDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||||
@@ -64,6 +66,7 @@ ava_1.default("exclude temp dir", async (t) => {
|
|||||||
tempDir,
|
tempDir,
|
||||||
toolCacheDir,
|
toolCacheDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;SACzE,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;SACzE,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;SACzE,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
41
lib/analyze-action.js
generated
41
lib/analyze-action.js
generated
@@ -7,12 +7,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const analyze_1 = require("./analyze");
|
const analyze_1 = require("./analyze");
|
||||||
const config_utils_1 = require("./config-utils");
|
const config_utils_1 = require("./config-utils");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const repository_1 = require("./repository");
|
const upload_lib = __importStar(require("./upload-lib"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
async function sendStatusReport(startedAt, stats, error) {
|
async function sendStatusReport(startedAt, stats, error) {
|
||||||
var _a, _b, _c;
|
var _a, _b, _c;
|
||||||
@@ -29,13 +31,14 @@ async function sendStatusReport(startedAt, stats, error) {
|
|||||||
async function run() {
|
async function run() {
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
let stats = undefined;
|
let stats = undefined;
|
||||||
|
let config = undefined;
|
||||||
try {
|
try {
|
||||||
actionsUtil.prepareLocalRunEnvironment();
|
actionsUtil.prepareLocalRunEnvironment();
|
||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
|
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const logger = logging_1.getActionsLogger();
|
const logger = logging_1.getActionsLogger();
|
||||||
const config = await config_utils_1.getConfig(actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), logger);
|
config = await config_utils_1.getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
||||||
if (config === undefined) {
|
if (config === undefined) {
|
||||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||||
}
|
}
|
||||||
@@ -43,7 +46,16 @@ async function run() {
|
|||||||
auth: actionsUtil.getRequiredInput("token"),
|
auth: actionsUtil.getRequiredInput("token"),
|
||||||
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
stats = await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), apiDetails, actionsUtil.getRequiredInput("upload") === "true", "actions", actionsUtil.getRequiredInput("output"), util.getMemoryFlag(actionsUtil.getOptionalInput("ram")), util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger), config, logger);
|
const outputDir = actionsUtil.getRequiredInput("output");
|
||||||
|
const queriesStats = await analyze_1.runAnalyze(outputDir, util.getMemoryFlag(actionsUtil.getOptionalInput("ram")), util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger), config, logger);
|
||||||
|
if (actionsUtil.getRequiredInput("upload") === "true") {
|
||||||
|
const uploadStats = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
|
||||||
|
stats = { ...queriesStats, ...uploadStats };
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
logger.info("Not uploading results");
|
||||||
|
stats = { ...queriesStats };
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
@@ -54,6 +66,29 @@ async function run() {
|
|||||||
await sendStatusReport(startedAt, stats, error);
|
await sendStatusReport(startedAt, stats, error);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
finally {
|
||||||
|
if (core.isDebug() && config !== undefined) {
|
||||||
|
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
||||||
|
for (const language of config.languages) {
|
||||||
|
const databaseDirectory = util.getCodeQLDatabasePath(config.tempDir, language);
|
||||||
|
const logsDirectory = path.join(databaseDirectory, "log");
|
||||||
|
const walkLogFiles = (dir) => {
|
||||||
|
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.isFile()) {
|
||||||
|
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
|
||||||
|
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
|
||||||
|
core.endGroup();
|
||||||
|
}
|
||||||
|
else if (entry.isDirectory()) {
|
||||||
|
walkLogFiles(path.resolve(dir, entry.name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
walkLogFiles(logsDirectory);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
await sendStatusReport(startedAt, stats);
|
await sendStatusReport(startedAt, stats);
|
||||||
}
|
}
|
||||||
async function runWrapper() {
|
async function runWrapper() {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAImB;AACnB,iDAA2C;AAC3C,uCAA6C;AAC7C,6CAAkD;AAClD,6CAA+B;AAM/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,MAAM,GAAG,MAAM,wBAAS,CAC5B,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QACF,KAAK,GAAG,MAAM,oBAAU,CACtB,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,MAAM,WAAW,CAAC,YAAY,EAAE,EAChC,MAAM,WAAW,CAAC,MAAM,EAAE,EAC1B,MAAM,WAAW,CAAC,cAAc,EAAE,EAClC,WAAW,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAClD,WAAW,CAAC,gBAAgB,EAAE,EAC9B,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,UAAU,EACV,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EACjD,SAAS,EACT,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EACvD,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACpE,MAAM,EACN,MAAM,CACP,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAEnB,IAAI,KAAK,YAAY,6BAAmB,EAAE;YACxC,KAAK,GAAG,EAAE,GAAG,KAAK,CAAC,mBAAmB,EAAE,CAAC;SAC1C;QAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAImB;AACnB,iDAAmD;AACnD,uCAA6C;AAC7C,yDAA2C;AAC3C,6CAA+B;AAU/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI,MAAM,GAAuB,SAAS,CAAC;IAC3C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,GAAG,MAAM,wBAAS,CAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;QACtE,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,YAAY,GAAG,MAAM,oBAAU,CACnC,SAAS,EACT,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EACvD,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACpE,MAAM,EACN,MAAM,CACP,CAAC;QAEF,IAAI,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EAAE;YACrD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,SAAS,EACT,MAAM,CAAC,aAAa,EACpB,UAAU,EACV,MAAM,CACP,CAAC;YACF,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;SAC7C;aAAM;YACL,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;YACrC,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,CAAC;SAC7B;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAEnB,IAAI,KAAK,YAAY,6BAAmB,EAAE;YACxC,KAAK,GAAG,EAAE,GAAG,KAAK,CAAC,mBAAmB,EAAE,CAAC;SAC1C;QAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;YAAS;QACR,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,MAAM,KAAK,SAAS,EAAE;YAC1C,IAAI,CAAC,IAAI,CAAC,iDAAiD,CAAC,CAAC;YAC7D,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,MAAM,iBAAiB,GAAG,IAAI,CAAC,qBAAqB,CAClD,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;gBACF,MAAM,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;gBAE1D,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE;oBACnC,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;oBAC7D,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;wBAC3B,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;4BAClB,IAAI,CAAC,UAAU,CACb,uBAAuB,QAAQ,MAAM,KAAK,CAAC,IAAI,EAAE,CAClD,CAAC;4BACF,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAC/C,CAAC;4BACF,IAAI,CAAC,QAAQ,EAAE,CAAC;yBACjB;6BAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;4BAC9B,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;yBAC7C;qBACF;gBACH,CAAC,CAAC;gBACF,YAAY,CAAC,aAAa,CAAC,CAAC;aAC7B;SACF;KACF;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
8
lib/analyze.js
generated
8
lib/analyze.js
generated
@@ -14,7 +14,6 @@ const analysisPaths = __importStar(require("./analysis-paths"));
|
|||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
const upload_lib = __importStar(require("./upload-lib"));
|
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
class CodeQLAnalysisError extends Error {
|
class CodeQLAnalysisError extends Error {
|
||||||
constructor(queriesStatusReport, message) {
|
constructor(queriesStatusReport, message) {
|
||||||
@@ -117,7 +116,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
return statusReport;
|
return statusReport;
|
||||||
}
|
}
|
||||||
exports.runQueries = runQueries;
|
exports.runQueries = runQueries;
|
||||||
async function runAnalyze(repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, apiDetails, doUpload, mode, outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
|
async function runAnalyze(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
|
||||||
// Delete the tracer config env var to avoid tracing ourselves
|
// Delete the tracer config env var to avoid tracing ourselves
|
||||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||||
fs.mkdirSync(outputDir, { recursive: true });
|
fs.mkdirSync(outputDir, { recursive: true });
|
||||||
@@ -125,12 +124,7 @@ async function runAnalyze(repositoryNwo, commitOid, ref, analysisKey, analysisNa
|
|||||||
await finalizeDatabaseCreation(config, threadsFlag, logger);
|
await finalizeDatabaseCreation(config, threadsFlag, logger);
|
||||||
logger.info("Analyzing database");
|
logger.info("Analyzing database");
|
||||||
const queriesStats = await runQueries(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger);
|
const queriesStats = await runQueries(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger);
|
||||||
if (!doUpload) {
|
|
||||||
logger.info("Not uploading results");
|
|
||||||
return { ...queriesStats };
|
return { ...queriesStats };
|
||||||
}
|
|
||||||
const uploadStats = await upload_lib.upload(outputDir, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, apiDetails, mode, logger);
|
|
||||||
return { ...queriesStats, ...uploadStats };
|
|
||||||
}
|
}
|
||||||
exports.runAnalyze = runAnalyze;
|
exports.runAnalyze = runAnalyze;
|
||||||
//# sourceMappingURL=analyze.js.map
|
//# sourceMappingURL=analyze.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAE3D,gEAAkD;AAElD,qCAAqC;AAErC,2CAA0D;AAG1D,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAE/B,MAAa,mBAAoB,SAAQ,KAAK;IAG5C,YAAY,mBAAwC,EAAE,OAAe;QACnE,KAAK,CAAC,OAAO,CAAC,CAAC;QAEf,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC;QAClC,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;CACF;AATD,kDASC;AAmCD,KAAK,UAAU,oBAAoB,CAAC,MAAc;IAChD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IAClD,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3D,+FAA+F;QAC/F,OAAO;KACR;IAED,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,OAAO,GAAG;QACd,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC;IAEF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ;QACE,IAAI;QACJ,8EAA8E;KAC/E,EACD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,kCAAkC,MAAM,EAAE,CAAC,CAAC;IACxD,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;IAE/C,MAAM,GAAG,EAAE,CAAC;IACZ,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ,CAAC,IAAI,EAAE,wCAAwC,CAAC,EAChD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,qCAAqC,MAAM,EAAE,CAAC,CAAC;IAC3D,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC,GAAG,MAAM,CAAC;AACpD,CAAC;AAED,KAAK,UAAU,4BAA4B,CACzC,MAA0B,EAC1B,MAAc;IAEd,sEAAsE;IACtE,oCAAoC;IACpC,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;YAE5C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,MAAM,EAAE;gBAChC,MAAM,oBAAoB,CAAC,MAAM,CAAC,CAAC;aACpC;YAED,MAAM,MAAM,CAAC,sBAAsB,CACjC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,CACT,CAAC;YACF,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CACrC,MAA0B,EAC1B,WAAmB,EACnB,MAAc;IAEd,MAAM,4BAA4B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,gBAAgB,CAC3B,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,WAAW,CACZ,CAAC;QACF,MAAM,CAAC,QAAQ,EAAE,CAAC;KACnB;AACH,CAAC;AAED,2DAA2D;AACpD,KAAK,UAAU,UAAU,CAC9B,WAAmB,EACnB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,MAAM,YAAY,GAAwB,EAAE,CAAC;IAE7C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,aAAa,QAAQ,EAAE,CAAC,CAAC;QAE3C,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QACzC,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;YAC/D,MAAM,IAAI,KAAK,CACb,qBAAqB,QAAQ,gDAAgD,CAC9E,CAAC;SACH;QAED,IAAI;YACF,KAAK,MAAM,IAAI,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,EAAE;gBACxC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBAEvC,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAC7C,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;oBACF,uEAAuE;oBACvE,2EAA2E;oBAC3E,MAAM,cAAc,GAAG,GAAG,YAAY,YAAY,IAAI,MAAM,CAAC;oBAC7D,MAAM,kBAAkB,GAAG,OAAO,CAAC,IAAI,CAAC;yBACrC,GAAG,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC;yBACnC,IAAI,CAAC,IAAI,CAAC,CAAC;oBACd,EAAE,CAAC,aAAa,CAAC,cAAc,EAAE,kBAAkB,CAAC,CAAC;oBACrD,MAAM,CAAC,KAAK,CACV,wBAAwB,QAAQ,QAAQ,kBAAkB,EAAE,CAC7D,CAAC;oBAEF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,GAAG,QAAQ,IAAI,IAAI,QAAQ,CAAC,CAAC;oBAEtE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;oBAC3C,MAAM,MAAM,CAAC,eAAe,CAC1B,YAAY,EACZ,SAAS,EACT,cAAc,EACd,UAAU,EACV,eAAe,EACf,WAAW,CACZ,CAAC;oBAEF,MAAM,CAAC,KAAK,CACV,8BAA8B,QAAQ,gBAAgB,SAAS,GAAG,CACnE,CAAC;oBACF,MAAM,CAAC,QAAQ,EAAE,CAAC;oBAElB,yBAAyB;oBACzB,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBACrC,YAAY,CAAC,WAAW,IAAI,YAAY,QAAQ,cAAc,CAAC;wBAC7D,OAAO,GAAG,SAAS,CAAC;iBACvB;aACF;SACF;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YACf,YAAY,CAAC,wBAAwB,GAAG,QAAQ,CAAC;YACjD,MAAM,IAAI,mBAAmB,CAC3B,YAAY,EACZ,8BAA8B,QAAQ,KAAK,CAAC,EAAE,CAC/C,CAAC;SACH;KACF;IAED,OAAO,YAAY,CAAC;AACtB,CAAC;AA1ED,gCA0EC;AAEM,KAAK,UAAU,UAAU,CAC9B,aAA4B,EAC5B,SAAiB,EACjB,GAAW,EACX,WAA+B,EAC/B,YAAgC,EAChC,aAAiC,EACjC,YAAoB,EACpB,WAA+B,EAC/B,UAA4B,EAC5B,QAAiB,EACjB,IAAe,EACf,SAAiB,EACjB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,8DAA8D;IAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;IAEzD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7C,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;IAC5C,MAAM,wBAAwB,CAAC,MAAM,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;IAE5D,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,MAAM,UAAU,CACnC,SAAS,EACT,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,MAAM,CACP,CAAC;IAEF,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACrC,OAAO,EAAE,GAAG,YAAY,EAAE,CAAC;KAC5B;IAED,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,SAAS,EACT,aAAa,EACb,SAAS,EACT,GAAG,EACH,WAAW,EACX,YAAY,EACZ,aAAa,EACb,YAAY,EACZ,WAAW,EACX,UAAU,EACV,IAAI,EACJ,MAAM,CACP,CAAC;IAEF,OAAO,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;AAC7C,CAAC;AA1DD,gCA0DC"}
|
{"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAE3D,gEAAkD;AAClD,qCAAqC;AAErC,2CAA0D;AAE1D,gEAAkD;AAClD,6CAA+B;AAE/B,MAAa,mBAAoB,SAAQ,KAAK;IAG5C,YAAY,mBAAwC,EAAE,OAAe;QACnE,KAAK,CAAC,OAAO,CAAC,CAAC;QAEf,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC;QAClC,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;CACF;AATD,kDASC;AA+BD,KAAK,UAAU,oBAAoB,CAAC,MAAc;IAChD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IAClD,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3D,+FAA+F;QAC/F,OAAO;KACR;IAED,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,OAAO,GAAG;QACd,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC;IAEF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ;QACE,IAAI;QACJ,8EAA8E;KAC/E,EACD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,kCAAkC,MAAM,EAAE,CAAC,CAAC;IACxD,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;IAE/C,MAAM,GAAG,EAAE,CAAC;IACZ,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ,CAAC,IAAI,EAAE,wCAAwC,CAAC,EAChD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,qCAAqC,MAAM,EAAE,CAAC,CAAC;IAC3D,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC,GAAG,MAAM,CAAC;AACpD,CAAC;AAED,KAAK,UAAU,4BAA4B,CACzC,MAA0B,EAC1B,MAAc;IAEd,sEAAsE;IACtE,oCAAoC;IACpC,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;YAE5C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,MAAM,EAAE;gBAChC,MAAM,oBAAoB,CAAC,MAAM,CAAC,CAAC;aACpC;YAED,MAAM,MAAM,CAAC,sBAAsB,CACjC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,CACT,CAAC;YACF,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CACrC,MAA0B,EAC1B,WAAmB,EACnB,MAAc;IAEd,MAAM,4BAA4B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,gBAAgB,CAC3B,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,WAAW,CACZ,CAAC;QACF,MAAM,CAAC,QAAQ,EAAE,CAAC;KACnB;AACH,CAAC;AAED,2DAA2D;AACpD,KAAK,UAAU,UAAU,CAC9B,WAAmB,EACnB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,MAAM,YAAY,GAAwB,EAAE,CAAC;IAE7C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,aAAa,QAAQ,EAAE,CAAC,CAAC;QAE3C,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QACzC,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;YAC/D,MAAM,IAAI,KAAK,CACb,qBAAqB,QAAQ,gDAAgD,CAC9E,CAAC;SACH;QAED,IAAI;YACF,KAAK,MAAM,IAAI,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,EAAE;gBACxC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBAEvC,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAC7C,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;oBACF,uEAAuE;oBACvE,2EAA2E;oBAC3E,MAAM,cAAc,GAAG,GAAG,YAAY,YAAY,IAAI,MAAM,CAAC;oBAC7D,MAAM,kBAAkB,GAAG,OAAO,CAAC,IAAI,CAAC;yBACrC,GAAG,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC;yBACnC,IAAI,CAAC,IAAI,CAAC,CAAC;oBACd,EAAE,CAAC,aAAa,CAAC,cAAc,EAAE,kBAAkB,CAAC,CAAC;oBACrD,MAAM,CAAC,KAAK,CACV,wBAAwB,QAAQ,QAAQ,kBAAkB,EAAE,CAC7D,CAAC;oBAEF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,GAAG,QAAQ,IAAI,IAAI,QAAQ,CAAC,CAAC;oBAEtE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;oBAC3C,MAAM,MAAM,CAAC,eAAe,CAC1B,YAAY,EACZ,SAAS,EACT,cAAc,EACd,UAAU,EACV,eAAe,EACf,WAAW,CACZ,CAAC;oBAEF,MAAM,CAAC,KAAK,CACV,8BAA8B,QAAQ,gBAAgB,SAAS,GAAG,CACnE,CAAC;oBACF,MAAM,CAAC,QAAQ,EAAE,CAAC;oBAElB,yBAAyB;oBACzB,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBACrC,YAAY,CAAC,WAAW,IAAI,YAAY,QAAQ,cAAc,CAAC;wBAC7D,OAAO,GAAG,SAAS,CAAC;iBACvB;aACF;SACF;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YACf,YAAY,CAAC,wBAAwB,GAAG,QAAQ,CAAC;YACjD,MAAM,IAAI,mBAAmB,CAC3B,YAAY,EACZ,8BAA8B,QAAQ,KAAK,CAAC,EAAE,CAC/C,CAAC;SACH;KACF;IAED,OAAO,YAAY,CAAC;AACtB,CAAC;AA1ED,gCA0EC;AAEM,KAAK,UAAU,UAAU,CAC9B,SAAiB,EACjB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,8DAA8D;IAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;IAEzD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7C,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;IAC5C,MAAM,wBAAwB,CAAC,MAAM,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;IAE5D,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,MAAM,UAAU,CACnC,SAAS,EACT,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,MAAM,CACP,CAAC;IAEF,OAAO,EAAE,GAAG,YAAY,EAAE,CAAC;AAC7B,CAAC;AA3BD,gCA2BC"}
|
||||||
3
lib/analyze.test.js
generated
3
lib/analyze.test.js
generated
@@ -39,6 +39,9 @@ ava_1.default("status report fields", async (t) => {
|
|||||||
tempDir: tmpDir,
|
tempDir: tmpDir,
|
||||||
toolCacheDir: tmpDir,
|
toolCacheDir: tmpDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
|
gitHubVersion: {
|
||||||
|
type: util.GitHubVariant.DOTCOM,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
fs.mkdirSync(util.getCodeQLDatabasePath(config.tempDir, language), {
|
fs.mkdirSync(util.getCodeQLDatabasePath(config.tempDir, language), {
|
||||||
recursive: true,
|
recursive: true,
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analyze.test.js","sourceRoot":"","sources":["../src/analyze.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AAEzB,8CAAuB;AAEvB,uCAAuC;AACvC,qCAAqC;AAErC,2CAAuC;AACvC,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,yEAAyE;AACzE,yCAAyC;AACzC,aAAI,CAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,kBAAS,CAAC;YACR,eAAe,EAAE,KAAK,IAAI,EAAE,CAAC,SAAS;SACvC,CAAC,CAAC;QAEH,MAAM,UAAU,GAAG,EAAE,CAAC;QACtB,MAAM,eAAe,GAAG,EAAE,CAAC;QAC3B,MAAM,WAAW,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,MAAM,CAAC,oBAAQ,CAAC,EAAE;YAC9C,MAAM,MAAM,GAAW;gBACrB,SAAS,EAAE,CAAC,QAAQ,CAAC;gBACrB,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,EAAE;gBACf,KAAK,EAAE,EAAE;gBACT,iBAAiB,EAAE,EAAE;gBACrB,OAAO,EAAE,MAAM;gBACf,YAAY,EAAE,MAAM;gBACpB,SAAS,EAAE,EAAE;aACd,CAAC;YACF,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE;gBACjE,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,CAAC,QAAQ,CAAC;gBACnB,MAAM,EAAE,EAAE;aACX,CAAC;YACF,MAAM,mBAAmB,GAAG,MAAM,oBAAU,CAC1C,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACxD,CAAC,CAAC,IAAI,CACJ,2BAA2B,QAAQ,cAAc,IAAI,mBAAmB,CACzE,CAAC;YAEF,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,EAAE;gBACX,MAAM,EAAE,CAAC,QAAQ,CAAC;aACnB,CAAC;YACF,MAAM,kBAAkB,GAAG,MAAM,oBAAU,CACzC,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACvD,CAAC,CAAC,IAAI,CACJ,0BAA0B,QAAQ,cAAc,IAAI,kBAAkB,CACvE,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"analyze.test.js","sourceRoot":"","sources":["../src/analyze.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AAEzB,8CAAuB;AAEvB,uCAAuC;AACvC,qCAAqC;AAErC,2CAAuC;AACvC,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,yEAAyE;AACzE,yCAAyC;AACzC,aAAI,CAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,kBAAS,CAAC;YACR,eAAe,EAAE,KAAK,IAAI,EAAE,CAAC,SAAS;SACvC,CAAC,CAAC;QAEH,MAAM,UAAU,GAAG,EAAE,CAAC;QACtB,MAAM,eAAe,GAAG,EAAE,CAAC;QAC3B,MAAM,WAAW,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,MAAM,CAAC,oBAAQ,CAAC,EAAE;YAC9C,MAAM,MAAM,GAAW;gBACrB,SAAS,EAAE,CAAC,QAAQ,CAAC;gBACrB,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,EAAE;gBACf,KAAK,EAAE,EAAE;gBACT,iBAAiB,EAAE,EAAE;gBACrB,OAAO,EAAE,MAAM;gBACf,YAAY,EAAE,MAAM;gBACpB,SAAS,EAAE,EAAE;gBACb,aAAa,EAAE;oBACb,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;iBACV;aACxB,CAAC;YACF,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE;gBACjE,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,CAAC,QAAQ,CAAC;gBACnB,MAAM,EAAE,EAAE;aACX,CAAC;YACF,MAAM,mBAAmB,GAAG,MAAM,oBAAU,CAC1C,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACxD,CAAC,CAAC,IAAI,CACJ,2BAA2B,QAAQ,cAAc,IAAI,mBAAmB,CACzE,CAAC;YAEF,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,EAAE;gBACX,MAAM,EAAE,CAAC,QAAQ,CAAC;aACnB,CAAC;YACF,MAAM,kBAAkB,GAAG,MAAM,oBAAU,CACzC,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACvD,CAAC,CAAC,IAAI,CACJ,0BAA0B,QAAQ,cAAc,IAAI,kBAAkB,CACvE,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
52
lib/api-client.js
generated
52
lib/api-client.js
generated
@@ -11,54 +11,20 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const core_1 = require("@actions/core");
|
|
||||||
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
||||||
const retry = __importStar(require("@octokit/plugin-retry"));
|
|
||||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||||
const semver = __importStar(require("semver"));
|
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
const apiCompatibility = __importStar(require("./api-compatibility.json"));
|
|
||||||
const logging_1 = require("./logging");
|
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
var DisallowedAPIVersionReason;
|
var DisallowedAPIVersionReason;
|
||||||
(function (DisallowedAPIVersionReason) {
|
(function (DisallowedAPIVersionReason) {
|
||||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
|
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
|
||||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
|
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
|
||||||
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
|
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
|
||||||
const GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
exports.getApiClient = function (apiDetails, allowLocalRun = false) {
|
||||||
const CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR = "CODEQL_ACTION_WARNED_ABOUT_VERSION";
|
|
||||||
let hasBeenWarnedAboutVersion = false;
|
|
||||||
exports.getApiClient = function (apiDetails, mode, logger, allowLocalRun = false, possibleFailureExpected = false) {
|
|
||||||
if (util_1.isLocalRun() && !allowLocalRun) {
|
if (util_1.isLocalRun() && !allowLocalRun) {
|
||||||
throw new Error("Invalid API call in local run");
|
throw new Error("Invalid API call in local run");
|
||||||
}
|
}
|
||||||
const customOctokit = githubUtils.GitHub.plugin(retry.retry, (octokit, _) => {
|
return new githubUtils.GitHub(githubUtils.getOctokitOptions(apiDetails.auth, {
|
||||||
octokit.hook.after("request", (response, __) => {
|
|
||||||
if (response.status < 400 && !possibleFailureExpected) {
|
|
||||||
if (hasBeenWarnedAboutVersion) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === undefined ||
|
|
||||||
process.env[CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR] === undefined) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const installedVersion = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER];
|
|
||||||
const disallowedAPIVersionReason = apiVersionInRange(installedVersion, apiCompatibility.minimumVersion, apiCompatibility.maximumVersion);
|
|
||||||
const toolName = mode === "actions" ? "Action" : "Runner";
|
|
||||||
if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_OLD) {
|
|
||||||
logger.warning(`The CodeQL ${toolName} version you are using is too old to be compatible with GitHub Enterprise ${installedVersion}. If you experience issues, please upgrade to a more recent version of the CodeQL ${toolName}.`);
|
|
||||||
}
|
|
||||||
if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_NEW) {
|
|
||||||
logger.warning(`GitHub Enterprise ${installedVersion} is too old to be compatible with this version of the CodeQL ${toolName}. If you experience issues, please upgrade to a more recent version of GitHub Enterprise or use an older version of the CodeQL ${toolName}.`);
|
|
||||||
}
|
|
||||||
hasBeenWarnedAboutVersion = true;
|
|
||||||
if (mode === "actions") {
|
|
||||||
core_1.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
return new customOctokit(githubUtils.getOctokitOptions(apiDetails.auth, {
|
|
||||||
baseUrl: getApiUrl(apiDetails.url),
|
baseUrl: getApiUrl(apiDetails.url),
|
||||||
userAgent: "CodeQL Action",
|
userAgent: "CodeQL Action",
|
||||||
log: console_log_level_1.default({ level: "debug" }),
|
log: console_log_level_1.default({ level: "debug" }),
|
||||||
@@ -66,7 +32,7 @@ exports.getApiClient = function (apiDetails, mode, logger, allowLocalRun = false
|
|||||||
};
|
};
|
||||||
function getApiUrl(githubUrl) {
|
function getApiUrl(githubUrl) {
|
||||||
const url = new URL(githubUrl);
|
const url = new URL(githubUrl);
|
||||||
// If we detect this is trying to be to github.com
|
// If we detect this is trying to connect to github.com
|
||||||
// then return with a fixed canonical URL.
|
// then return with a fixed canonical URL.
|
||||||
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
|
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
|
||||||
return "https://api.github.com";
|
return "https://api.github.com";
|
||||||
@@ -83,17 +49,7 @@ function getActionsApiClient(allowLocalRun = false) {
|
|||||||
auth: actions_util_1.getRequiredInput("token"),
|
auth: actions_util_1.getRequiredInput("token"),
|
||||||
url: actions_util_1.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: actions_util_1.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
return exports.getApiClient(apiDetails, "actions", logging_1.getActionsLogger(), allowLocalRun);
|
return exports.getApiClient(apiDetails, allowLocalRun);
|
||||||
}
|
}
|
||||||
exports.getActionsApiClient = getActionsApiClient;
|
exports.getActionsApiClient = getActionsApiClient;
|
||||||
function apiVersionInRange(version, minimumVersion, maximumVersion) {
|
|
||||||
if (!semver.satisfies(version, `>=${minimumVersion}`)) {
|
|
||||||
return DisallowedAPIVersionReason.ACTION_TOO_NEW;
|
|
||||||
}
|
|
||||||
if (!semver.satisfies(version, `<=${maximumVersion}`)) {
|
|
||||||
return DisallowedAPIVersionReason.ACTION_TOO_OLD;
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
exports.apiVersionInRange = apiVersionInRange;
|
|
||||||
//# sourceMappingURL=api-client.js.map
|
//# sourceMappingURL=api-client.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,wCAA+C;AAC/C,uEAAyD;AACzD,6DAA+C;AAE/C,0EAAgD;AAChD,+CAAiC;AAEjC,iDAAuE;AACvE,2EAA6D;AAC7D,uCAAqD;AACrD,iCAA0C;AAE1C,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAOD,MAAM,gCAAgC,GAAG,6BAA6B,CAAC;AACvE,MAAM,0CAA0C,GAC9C,oCAAoC,CAAC;AACvC,IAAI,yBAAyB,GAAG,KAAK,CAAC;AAEzB,QAAA,YAAY,GAAG,UAC1B,UAA4B,EAC5B,IAAU,EACV,MAAc,EACd,aAAa,GAAG,KAAK,EACrB,uBAAuB,GAAG,KAAK;IAE/B,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,MAAM,aAAa,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,CAAC,EAAE,EAAE;QAC1E,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,QAA8B,EAAE,EAAE,EAAE,EAAE;YACnE,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,IAAI,CAAC,uBAAuB,EAAE;gBACrD,IAAI,yBAAyB,EAAE;oBAC7B,OAAO;iBACR;aACF;YACD,IACE,QAAQ,CAAC,OAAO,CAAC,gCAAgC,CAAC,KAAK,SAAS;gBAChE,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC,KAAK,SAAS,EACrE;gBACA,OAAO;aACR;YACD,MAAM,gBAAgB,GAAG,QAAQ,CAAC,OAAO,CACvC,gCAAgC,CACvB,CAAC;YACZ,MAAM,0BAA0B,GAAG,iBAAiB,CAClD,gBAAgB,EAChB,gBAAgB,CAAC,cAAc,EAC/B,gBAAgB,CAAC,cAAc,CAChC,CAAC;YAEF,MAAM,QAAQ,GAAG,IAAI,KAAK,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;YAE1D,IACE,0BAA0B,KAAK,0BAA0B,CAAC,cAAc,EACxE;gBACA,MAAM,CAAC,OAAO,CACZ,cAAc,QAAQ,6EAA6E,gBAAgB,qFAAqF,QAAQ,GAAG,CACpN,CAAC;aACH;YACD,IACE,0BAA0B,KAAK,0BAA0B,CAAC,cAAc,EACxE;gBACA,MAAM,CAAC,OAAO,CACZ,qBAAqB,gBAAgB,gEAAgE,QAAQ,kIAAkI,QAAQ,GAAG,CAC3P,CAAC;aACH;YACD,yBAAyB,GAAG,IAAI,CAAC;YACjC,IAAI,IAAI,KAAK,SAAS,EAAE;gBACtB,qBAAc,CAAC,0CAA0C,EAAE,IAAI,CAAC,CAAC;aAClE;QACH,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;IACH,OAAO,IAAI,aAAa,CACtB,WAAW,CAAC,iBAAiB,CAAC,UAAU,CAAC,IAAI,EAAE;QAC7C,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,kDAAkD;IAClD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,+BAAgB,CAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,kCAAmB,CAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,oBAAY,CAAC,UAAU,EAAE,SAAS,EAAE,0BAAgB,EAAE,EAAE,aAAa,CAAC,CAAC;AAChF,CAAC;AAPD,kDAOC;AAED,SAAgB,iBAAiB,CAC/B,OAAe,EACf,cAAsB,EACtB,cAAsB;IAEtB,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,OAAO,EAAE,KAAK,cAAc,EAAE,CAAC,EAAE;QACrD,OAAO,0BAA0B,CAAC,cAAc,CAAC;KAClD;IACD,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,OAAO,EAAE,KAAK,cAAc,EAAE,CAAC,EAAE;QACrD,OAAO,0BAA0B,CAAC,cAAc,CAAC;KAClD;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAZD,8CAYC"}
|
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,0EAAgD;AAEhD,iDAAuE;AACvE,iCAAoC;AAEpC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeY,QAAA,YAAY,GAAG,UAC1B,UAA4B,EAC5B,aAAa,GAAG,KAAK;IAErB,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,WAAW,CAAC,MAAM,CAC3B,WAAW,CAAC,iBAAiB,CAAC,UAAU,CAAC,IAAI,EAAE;QAC7C,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,+BAAgB,CAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,kCAAmB,CAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,oBAAY,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;AACjD,CAAC;AAPD,kDAOC"}
|
||||||
17
lib/api-client.test.js
generated
17
lib/api-client.test.js
generated
@@ -1,17 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const ava_1 = __importDefault(require("ava"));
|
|
||||||
const api_client_1 = require("./api-client");
|
|
||||||
ava_1.default("allowed API versions", async (t) => {
|
|
||||||
t.is(api_client_1.apiVersionInRange("1.33.0", "1.33", "2.0"), undefined);
|
|
||||||
t.is(api_client_1.apiVersionInRange("1.33.1", "1.33", "2.0"), undefined);
|
|
||||||
t.is(api_client_1.apiVersionInRange("1.34.0", "1.33", "2.0"), undefined);
|
|
||||||
t.is(api_client_1.apiVersionInRange("2.0.0", "1.33", "2.0"), undefined);
|
|
||||||
t.is(api_client_1.apiVersionInRange("2.0.1", "1.33", "2.0"), undefined);
|
|
||||||
t.is(api_client_1.apiVersionInRange("1.32.0", "1.33", "2.0"), api_client_1.DisallowedAPIVersionReason.ACTION_TOO_NEW);
|
|
||||||
t.is(api_client_1.apiVersionInRange("2.1.0", "1.33", "2.0"), api_client_1.DisallowedAPIVersionReason.ACTION_TOO_OLD);
|
|
||||||
});
|
|
||||||
//# sourceMappingURL=api-client.test.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,6CAA6E;AAE7E,aAAI,CAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,CAAC,CAAC,EAAE,CAAC,8BAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC5D,CAAC,CAAC,EAAE,CAAC,8BAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC5D,CAAC,CAAC,EAAE,CAAC,8BAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC5D,CAAC,CAAC,EAAE,CAAC,8BAAiB,CAAC,OAAO,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC3D,CAAC,CAAC,EAAE,CAAC,8BAAiB,CAAC,OAAO,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC3D,CAAC,CAAC,EAAE,CACF,8BAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,CAAC,EAC1C,uCAA0B,CAAC,cAAc,CAC1C,CAAC;IACF,CAAC,CAAC,EAAE,CACF,8BAAiB,CAAC,OAAO,EAAE,MAAM,EAAE,KAAK,CAAC,EACzC,uCAA0B,CAAC,cAAc,CAC1C,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
|
||||||
@@ -1 +1 @@
|
|||||||
{ "maximumVersion": "3.0", "minimumVersion": "2.22" }
|
{ "maximumVersion": "3.1", "minimumVersion": "2.22" }
|
||||||
|
|||||||
2
lib/autobuild-action.js
generated
2
lib/autobuild-action.js
generated
@@ -34,7 +34,7 @@ async function run() {
|
|||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("autobuild", "starting", startedAt)))) {
|
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("autobuild", "starting", startedAt)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const config = await config_utils.getConfig(actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), logger);
|
const config = await config_utils.getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
||||||
if (config === undefined) {
|
if (config === undefined) {
|
||||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAS7C,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,WAAW,EACX,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,sCAA0B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,wBAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIAAmI,KAAK,CAAC,OAAO,EAAE,CACnJ,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAS7C,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,WAAW,EACX,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,sCAA0B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,wBAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIAAmI,KAAK,CAAC,OAAO,EAAE,CACnJ,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
54
lib/codeql.js
generated
54
lib/codeql.js
generated
@@ -6,6 +6,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
result["default"] = mod;
|
result["default"] = mod;
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
@@ -14,6 +17,8 @@ const globalutil = __importStar(require("util"));
|
|||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
const http = __importStar(require("@actions/http-client"));
|
const http = __importStar(require("@actions/http-client"));
|
||||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||||
|
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
|
||||||
|
const query_string_1 = __importDefault(require("query-string"));
|
||||||
const semver = __importStar(require("semver"));
|
const semver = __importStar(require("semver"));
|
||||||
const uuid_1 = require("uuid");
|
const uuid_1 = require("uuid");
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
@@ -49,6 +54,11 @@ function getCodeQLActionRepository(mode, logger) {
|
|||||||
if (mode !== "actions") {
|
if (mode !== "actions") {
|
||||||
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
return getActionsCodeQLActionRepository(logger);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function getActionsCodeQLActionRepository(logger) {
|
||||||
if (process.env["GITHUB_ACTION_REPOSITORY"] !== undefined) {
|
if (process.env["GITHUB_ACTION_REPOSITORY"] !== undefined) {
|
||||||
return process.env["GITHUB_ACTION_REPOSITORY"];
|
return process.env["GITHUB_ACTION_REPOSITORY"];
|
||||||
}
|
}
|
||||||
@@ -64,7 +74,7 @@ function getCodeQLActionRepository(mode, logger) {
|
|||||||
const relativeScriptPathParts = actions_util_1.getRelativeScriptPath().split(path.sep);
|
const relativeScriptPathParts = actions_util_1.getRelativeScriptPath().split(path.sep);
|
||||||
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
|
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
|
||||||
}
|
}
|
||||||
async function getCodeQLBundleDownloadURL(apiDetails, mode, logger) {
|
async function getCodeQLBundleDownloadURL(apiDetails, mode, variant, logger) {
|
||||||
const codeQLActionRepository = getCodeQLActionRepository(mode, logger);
|
const codeQLActionRepository = getCodeQLActionRepository(mode, logger);
|
||||||
const potentialDownloadSources = [
|
const potentialDownloadSources = [
|
||||||
// This GitHub instance, and this Action.
|
// This GitHub instance, and this Action.
|
||||||
@@ -76,8 +86,34 @@ async function getCodeQLBundleDownloadURL(apiDetails, mode, logger) {
|
|||||||
];
|
];
|
||||||
// We now filter out any duplicates.
|
// We now filter out any duplicates.
|
||||||
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
|
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
|
||||||
const uniqueDownloadSources = potentialDownloadSources.filter((url, index, self) => index === self.indexOf(url));
|
const uniqueDownloadSources = potentialDownloadSources.filter((source, index, self) => {
|
||||||
|
return !self.slice(0, index).some((other) => fast_deep_equal_1.default(source, other));
|
||||||
|
});
|
||||||
const codeQLBundleName = getCodeQLBundleName();
|
const codeQLBundleName = getCodeQLBundleName();
|
||||||
|
if (variant === util.GitHubVariant.GHAE) {
|
||||||
|
try {
|
||||||
|
const release = await api
|
||||||
|
.getApiClient(apiDetails)
|
||||||
|
.request("GET /enterprise/code-scanning/codeql-bundle/find/{tag}", {
|
||||||
|
tag: CODEQL_BUNDLE_VERSION,
|
||||||
|
});
|
||||||
|
const assetID = release.data.assets[codeQLBundleName];
|
||||||
|
if (assetID !== undefined) {
|
||||||
|
const download = await api
|
||||||
|
.getApiClient(apiDetails)
|
||||||
|
.request("GET /enterprise/code-scanning/codeql-bundle/download/{asset_id}", { asset_id: assetID });
|
||||||
|
const downloadURL = download.data.url;
|
||||||
|
logger.info(`Found CodeQL bundle at GitHub AE endpoint with URL ${downloadURL}.`);
|
||||||
|
return downloadURL;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
logger.info(`Attempted to fetch bundle from GitHub AE endpoint but the bundle ${codeQLBundleName} was not found in the assets ${JSON.stringify(release.data.assets)}.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
logger.info(`Attempted to fetch bundle from GitHub AE endpoint but got error ${e}.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
for (const downloadSource of uniqueDownloadSources) {
|
for (const downloadSource of uniqueDownloadSources) {
|
||||||
const [apiURL, repository] = downloadSource;
|
const [apiURL, repository] = downloadSource;
|
||||||
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
|
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
|
||||||
@@ -87,9 +123,7 @@ async function getCodeQLBundleDownloadURL(apiDetails, mode, logger) {
|
|||||||
}
|
}
|
||||||
const [repositoryOwner, repositoryName] = repository.split("/");
|
const [repositoryOwner, repositoryName] = repository.split("/");
|
||||||
try {
|
try {
|
||||||
const release = await api
|
const release = await api.getApiClient(apiDetails).repos.getReleaseByTag({
|
||||||
.getApiClient(apiDetails, mode, logger, false, true)
|
|
||||||
.repos.getReleaseByTag({
|
|
||||||
owner: repositoryOwner,
|
owner: repositoryOwner,
|
||||||
repo: repositoryName,
|
repo: repositoryName,
|
||||||
tag: CODEQL_BUNDLE_VERSION,
|
tag: CODEQL_BUNDLE_VERSION,
|
||||||
@@ -122,7 +156,7 @@ async function toolcacheDownloadTool(url, headers, tempDir, logger) {
|
|||||||
await pipeline(response.message, fs.createWriteStream(dest));
|
await pipeline(response.message, fs.createWriteStream(dest));
|
||||||
return dest;
|
return dest;
|
||||||
}
|
}
|
||||||
async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolsDir, mode, logger) {
|
async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolsDir, mode, variant, logger) {
|
||||||
// Setting these two env vars makes the toolcache code safe to use outside,
|
// Setting these two env vars makes the toolcache code safe to use outside,
|
||||||
// of actions but this is obviously not a great thing we're doing and it would
|
// of actions but this is obviously not a great thing we're doing and it would
|
||||||
// be better to write our own implementation to use outside of actions.
|
// be better to write our own implementation to use outside of actions.
|
||||||
@@ -157,13 +191,17 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolsDir, mode, logge
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if (!codeqlURL) {
|
if (!codeqlURL) {
|
||||||
codeqlURL = await getCodeQLBundleDownloadURL(apiDetails, mode, logger);
|
codeqlURL = await getCodeQLBundleDownloadURL(apiDetails, mode, variant, logger);
|
||||||
}
|
}
|
||||||
|
const parsedCodeQLURL = new URL(codeqlURL);
|
||||||
|
const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search);
|
||||||
const headers = { accept: "application/octet-stream" };
|
const headers = { accept: "application/octet-stream" };
|
||||||
// We only want to provide an authorization header if we are downloading
|
// We only want to provide an authorization header if we are downloading
|
||||||
// from the same GitHub instance the Action is running on.
|
// from the same GitHub instance the Action is running on.
|
||||||
// This avoids leaking Enterprise tokens to dotcom.
|
// This avoids leaking Enterprise tokens to dotcom.
|
||||||
if (codeqlURL.startsWith(`${apiDetails.url}/`)) {
|
// We also don't want to send an authorization header if there's already a token provided in the URL.
|
||||||
|
if (codeqlURL.startsWith(`${apiDetails.url}/`) &&
|
||||||
|
parsedQueryString["token"] === undefined) {
|
||||||
logger.debug("Downloading CodeQL bundle with token.");
|
logger.debug("Downloading CodeQL bundle with token.");
|
||||||
headers.authorization = `token ${apiDetails.auth}`;
|
headers.authorization = `token ${apiDetails.auth}`;
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
49
lib/codeql.test.js
generated
49
lib/codeql.test.js
generated
@@ -24,6 +24,10 @@ const sampleApiDetails = {
|
|||||||
auth: "token",
|
auth: "token",
|
||||||
url: "https://github.com",
|
url: "https://github.com",
|
||||||
};
|
};
|
||||||
|
const sampleGHAEApiDetails = {
|
||||||
|
auth: "token",
|
||||||
|
url: "https://example.githubenterprise.com",
|
||||||
|
};
|
||||||
ava_1.default("download codeql bundle cache", async (t) => {
|
ava_1.default("download codeql bundle cache", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const versions = ["20200601", "20200610"];
|
const versions = ["20200601", "20200610"];
|
||||||
@@ -32,7 +36,7 @@ ava_1.default("download codeql bundle cache", async (t) => {
|
|||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
||||||
}
|
}
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
@@ -44,12 +48,12 @@ ava_1.default("download codeql bundle cache explicitly requested with pinned dif
|
|||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -58,9 +62,9 @@ ava_1.default("don't download codeql bundle cache with pinned different version
|
|||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 1);
|
t.is(cachedVersions.length, 1);
|
||||||
});
|
});
|
||||||
@@ -70,7 +74,7 @@ ava_1.default("download codeql bundle cache with different version cached (not p
|
|||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
const platform = process.platform === "win32"
|
const platform = process.platform === "win32"
|
||||||
? "win64"
|
? "win64"
|
||||||
@@ -80,7 +84,7 @@ ava_1.default("download codeql bundle cache with different version cached (not p
|
|||||||
nock_1.default("https://github.com")
|
nock_1.default("https://github.com")
|
||||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 2);
|
t.is(cachedVersions.length, 2);
|
||||||
});
|
});
|
||||||
@@ -90,7 +94,7 @@ ava_1.default('download codeql bundle cache with pinned different version cached
|
|||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
const platform = process.platform === "win32"
|
const platform = process.platform === "win32"
|
||||||
? "win64"
|
? "win64"
|
||||||
@@ -100,11 +104,38 @@ ava_1.default('download codeql bundle cache with pinned different version cached
|
|||||||
nock_1.default("https://github.com")
|
nock_1.default("https://github.com")
|
||||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 2);
|
t.is(cachedVersions.length, 2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
ava_1.default("download codeql bundle from github ae endpoint", async (t) => {
|
||||||
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const bundleAssetID = 10;
|
||||||
|
const platform = process.platform === "win32"
|
||||||
|
? "win64"
|
||||||
|
: process.platform === "linux"
|
||||||
|
? "linux64"
|
||||||
|
: "osx64";
|
||||||
|
const codeQLBundleName = `codeql-bundle-${platform}.tar.gz`;
|
||||||
|
nock_1.default("https://example.githubenterprise.com")
|
||||||
|
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/find/${defaults.bundleVersion}`)
|
||||||
|
.reply(200, {
|
||||||
|
assets: { [codeQLBundleName]: bundleAssetID },
|
||||||
|
});
|
||||||
|
nock_1.default("https://example.githubenterprise.com")
|
||||||
|
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/download/${bundleAssetID}`)
|
||||||
|
.reply(200, {
|
||||||
|
url: `https://example.githubenterprise.com/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`,
|
||||||
|
});
|
||||||
|
nock_1.default("https://example.githubenterprise.com")
|
||||||
|
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
|
||||||
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
|
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.GHAE, logging_1.getRunnerLogger(true));
|
||||||
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
|
t.is(cachedVersions.length, 1);
|
||||||
|
});
|
||||||
|
});
|
||||||
ava_1.default("parse codeql bundle url version", (t) => {
|
ava_1.default("parse codeql bundle url version", (t) => {
|
||||||
t.deepEqual(codeql.getCodeQLURLVersion("https://github.com/.../codeql-bundle-20200601/..."), "20200601");
|
t.deepEqual(codeql.getCodeQLURLVersion("https://github.com/.../codeql-bundle-20200601/..."), "20200601");
|
||||||
});
|
});
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
50
lib/config-utils.js
generated
50
lib/config-utils.js
generated
@@ -128,7 +128,7 @@ async function addLocalQueries(codeQL, resultMap, localQueryPath, checkoutPath,
|
|||||||
/**
|
/**
|
||||||
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
|
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
|
||||||
*/
|
*/
|
||||||
async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile) {
|
async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile) {
|
||||||
let tok = queryUses.split("@");
|
let tok = queryUses.split("@");
|
||||||
if (tok.length !== 2) {
|
if (tok.length !== 2) {
|
||||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||||
@@ -147,7 +147,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl
|
|||||||
}
|
}
|
||||||
const nwo = `${tok[0]}/${tok[1]}`;
|
const nwo = `${tok[0]}/${tok[1]}`;
|
||||||
// Checkout the external repository
|
// Checkout the external repository
|
||||||
const checkoutPath = await externalQueries.checkoutExternalRepository(nwo, ref, githubUrl, tempDir, logger);
|
const checkoutPath = await externalQueries.checkoutExternalRepository(nwo, ref, apiDetails, tempDir, logger);
|
||||||
const queryPath = tok.length > 2
|
const queryPath = tok.length > 2
|
||||||
? path.join(checkoutPath, tok.slice(2).join("/"))
|
? path.join(checkoutPath, tok.slice(2).join("/"))
|
||||||
: checkoutPath;
|
: checkoutPath;
|
||||||
@@ -161,7 +161,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl
|
|||||||
* local paths starting with './', or references to remote repos, or
|
* local paths starting with './', or references to remote repos, or
|
||||||
* a finite set of hardcoded terms for builtin suites.
|
* a finite set of hardcoded terms for builtin suites.
|
||||||
*/
|
*/
|
||||||
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, checkoutPath, githubUrl, logger, configFile) {
|
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, checkoutPath, apiDetails, logger, configFile) {
|
||||||
queryUses = queryUses.trim();
|
queryUses = queryUses.trim();
|
||||||
if (queryUses === "") {
|
if (queryUses === "") {
|
||||||
throw new Error(getQueryUsesInvalid(configFile));
|
throw new Error(getQueryUsesInvalid(configFile));
|
||||||
@@ -177,7 +177,7 @@ async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir,
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Otherwise, must be a reference to another repo
|
// Otherwise, must be a reference to another repo
|
||||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile);
|
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
||||||
}
|
}
|
||||||
// Regex validating stars in paths or paths-ignore entries.
|
// Regex validating stars in paths or paths-ignore entries.
|
||||||
// The intention is to only allow ** to appear when immediately
|
// The intention is to only allow ** to appear when immediately
|
||||||
@@ -301,10 +301,10 @@ exports.getUnknownLanguagesError = getUnknownLanguagesError;
|
|||||||
/**
|
/**
|
||||||
* Gets the set of languages in the current repository
|
* Gets the set of languages in the current repository
|
||||||
*/
|
*/
|
||||||
async function getLanguagesInRepo(repository, apiDetails, mode, logger) {
|
async function getLanguagesInRepo(repository, apiDetails, logger) {
|
||||||
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
|
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
|
||||||
const response = await api
|
const response = await api
|
||||||
.getApiClient(apiDetails, mode, logger, true)
|
.getApiClient(apiDetails, true)
|
||||||
.repos.listLanguages({
|
.repos.listLanguages({
|
||||||
owner: repository.owner,
|
owner: repository.owner,
|
||||||
repo: repository.repo,
|
repo: repository.repo,
|
||||||
@@ -333,7 +333,7 @@ async function getLanguagesInRepo(repository, apiDetails, mode, logger) {
|
|||||||
* If no languages could be detected from either the workflow or the repository
|
* If no languages could be detected from either the workflow or the repository
|
||||||
* then throw an error.
|
* then throw an error.
|
||||||
*/
|
*/
|
||||||
async function getLanguages(languagesInput, repository, apiDetails, mode, logger) {
|
async function getLanguages(languagesInput, repository, apiDetails, logger) {
|
||||||
// Obtain from action input 'languages' if set
|
// Obtain from action input 'languages' if set
|
||||||
let languages = (languagesInput || "")
|
let languages = (languagesInput || "")
|
||||||
.split(",")
|
.split(",")
|
||||||
@@ -342,7 +342,7 @@ async function getLanguages(languagesInput, repository, apiDetails, mode, logger
|
|||||||
logger.info(`Languages from configuration: ${JSON.stringify(languages)}`);
|
logger.info(`Languages from configuration: ${JSON.stringify(languages)}`);
|
||||||
if (languages.length === 0) {
|
if (languages.length === 0) {
|
||||||
// Obtain languages as all languages in the repo that can be analysed
|
// Obtain languages as all languages in the repo that can be analysed
|
||||||
languages = await getLanguagesInRepo(repository, apiDetails, mode, logger);
|
languages = await getLanguagesInRepo(repository, apiDetails, logger);
|
||||||
logger.info(`Automatically detected languages: ${JSON.stringify(languages)}`);
|
logger.info(`Automatically detected languages: ${JSON.stringify(languages)}`);
|
||||||
}
|
}
|
||||||
// If the languages parameter was not given and no languages were
|
// If the languages parameter was not given and no languages were
|
||||||
@@ -367,12 +367,12 @@ async function getLanguages(languagesInput, repository, apiDetails, mode, logger
|
|||||||
}
|
}
|
||||||
return parsedLanguages;
|
return parsedLanguages;
|
||||||
}
|
}
|
||||||
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, checkoutPath, githubUrl, logger) {
|
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, checkoutPath, apiDetails, logger) {
|
||||||
queriesInput = queriesInput.trim();
|
queriesInput = queriesInput.trim();
|
||||||
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
||||||
queriesInput = queriesInput.replace(/^\+/, "");
|
queriesInput = queriesInput.replace(/^\+/, "");
|
||||||
for (const query of queriesInput.split(",")) {
|
for (const query of queriesInput.split(",")) {
|
||||||
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, githubUrl, logger);
|
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, apiDetails, logger);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Returns true if either no queries were provided in the workflow.
|
// Returns true if either no queries were provided in the workflow.
|
||||||
@@ -388,12 +388,12 @@ function shouldAddConfigFileQueries(queriesInput) {
|
|||||||
/**
|
/**
|
||||||
* Get the default config for when the user has not supplied one.
|
* Get the default config for when the user has not supplied one.
|
||||||
*/
|
*/
|
||||||
async function getDefaultConfig(languagesInput, queriesInput, repository, tempDir, toolCacheDir, codeQL, checkoutPath, apiDetails, mode, logger) {
|
async function getDefaultConfig(languagesInput, queriesInput, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
|
||||||
const languages = await getLanguages(languagesInput, repository, apiDetails, mode, logger);
|
const languages = await getLanguages(languagesInput, repository, apiDetails, logger);
|
||||||
const queries = {};
|
const queries = {};
|
||||||
await addDefaultQueries(codeQL, languages, queries);
|
await addDefaultQueries(codeQL, languages, queries);
|
||||||
if (queriesInput) {
|
if (queriesInput) {
|
||||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails.url, logger);
|
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails, logger);
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
languages,
|
languages,
|
||||||
@@ -404,13 +404,14 @@ async function getDefaultConfig(languagesInput, queriesInput, repository, tempDi
|
|||||||
tempDir,
|
tempDir,
|
||||||
toolCacheDir,
|
toolCacheDir,
|
||||||
codeQLCmd: codeQL.getPath(),
|
codeQLCmd: codeQL.getPath(),
|
||||||
|
gitHubVersion,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
exports.getDefaultConfig = getDefaultConfig;
|
exports.getDefaultConfig = getDefaultConfig;
|
||||||
/**
|
/**
|
||||||
* Load the config from the given file.
|
* Load the config from the given file.
|
||||||
*/
|
*/
|
||||||
async function loadConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, apiDetails, mode, logger) {
|
async function loadConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
|
||||||
let parsedYAML;
|
let parsedYAML;
|
||||||
if (isLocal(configFile)) {
|
if (isLocal(configFile)) {
|
||||||
// Treat the config file as relative to the workspace
|
// Treat the config file as relative to the workspace
|
||||||
@@ -418,7 +419,7 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
|
|||||||
parsedYAML = getLocalConfig(configFile, checkoutPath);
|
parsedYAML = getLocalConfig(configFile, checkoutPath);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
parsedYAML = await getRemoteConfig(configFile, apiDetails, mode, logger);
|
parsedYAML = await getRemoteConfig(configFile, apiDetails);
|
||||||
}
|
}
|
||||||
// Validate that the 'name' property is syntactically correct,
|
// Validate that the 'name' property is syntactically correct,
|
||||||
// even though we don't use the value yet.
|
// even though we don't use the value yet.
|
||||||
@@ -430,7 +431,7 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
|
|||||||
throw new Error(getNameInvalid(configFile));
|
throw new Error(getNameInvalid(configFile));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const languages = await getLanguages(languagesInput, repository, apiDetails, mode, logger);
|
const languages = await getLanguages(languagesInput, repository, apiDetails, logger);
|
||||||
const queries = {};
|
const queries = {};
|
||||||
const pathsIgnore = [];
|
const pathsIgnore = [];
|
||||||
const paths = [];
|
const paths = [];
|
||||||
@@ -449,7 +450,7 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
|
|||||||
// unless they're prefixed with "+", in which case they supplement those
|
// unless they're prefixed with "+", in which case they supplement those
|
||||||
// in the config file.
|
// in the config file.
|
||||||
if (queriesInput) {
|
if (queriesInput) {
|
||||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails.url, logger);
|
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails, logger);
|
||||||
}
|
}
|
||||||
if (shouldAddConfigFileQueries(queriesInput) &&
|
if (shouldAddConfigFileQueries(queriesInput) &&
|
||||||
QUERIES_PROPERTY in parsedYAML) {
|
QUERIES_PROPERTY in parsedYAML) {
|
||||||
@@ -461,7 +462,7 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
|
|||||||
typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||||
throw new Error(getQueryUsesInvalid(configFile));
|
throw new Error(getQueryUsesInvalid(configFile));
|
||||||
}
|
}
|
||||||
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, checkoutPath, apiDetails.url, logger, configFile);
|
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, checkoutPath, apiDetails, logger, configFile);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||||
@@ -505,6 +506,7 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
|
|||||||
tempDir,
|
tempDir,
|
||||||
toolCacheDir,
|
toolCacheDir,
|
||||||
codeQLCmd: codeQL.getPath(),
|
codeQLCmd: codeQL.getPath(),
|
||||||
|
gitHubVersion,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
@@ -513,15 +515,15 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
|
|||||||
* This will parse the config from the user input if present, or generate
|
* This will parse the config from the user input if present, or generate
|
||||||
* a default config. The parsed config is then stored to a known location.
|
* a default config. The parsed config is then stored to a known location.
|
||||||
*/
|
*/
|
||||||
async function initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, apiDetails, mode, logger) {
|
async function initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
|
||||||
let config;
|
let config;
|
||||||
// If no config file was provided create an empty one
|
// If no config file was provided create an empty one
|
||||||
if (!configFile) {
|
if (!configFile) {
|
||||||
logger.debug("No configuration file was provided");
|
logger.debug("No configuration file was provided");
|
||||||
config = await getDefaultConfig(languagesInput, queriesInput, repository, tempDir, toolCacheDir, codeQL, checkoutPath, apiDetails, mode, logger);
|
config = await getDefaultConfig(languagesInput, queriesInput, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
config = await loadConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, apiDetails, mode, logger);
|
config = await loadConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger);
|
||||||
}
|
}
|
||||||
// Save the config so we can easily access it again in the future
|
// Save the config so we can easily access it again in the future
|
||||||
await saveConfig(config, logger);
|
await saveConfig(config, logger);
|
||||||
@@ -546,7 +548,7 @@ function getLocalConfig(configFile, checkoutPath) {
|
|||||||
}
|
}
|
||||||
return yaml.safeLoad(fs.readFileSync(configFile, "utf8"));
|
return yaml.safeLoad(fs.readFileSync(configFile, "utf8"));
|
||||||
}
|
}
|
||||||
async function getRemoteConfig(configFile, apiDetails, mode, logger) {
|
async function getRemoteConfig(configFile, apiDetails) {
|
||||||
// retrieve the various parts of the config location, and ensure they're present
|
// retrieve the various parts of the config location, and ensure they're present
|
||||||
const format = new RegExp("(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)");
|
const format = new RegExp("(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)");
|
||||||
const pieces = format.exec(configFile);
|
const pieces = format.exec(configFile);
|
||||||
@@ -554,9 +556,7 @@ async function getRemoteConfig(configFile, apiDetails, mode, logger) {
|
|||||||
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
||||||
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
|
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
|
||||||
}
|
}
|
||||||
const response = await api
|
const response = await api.getApiClient(apiDetails, true).repos.getContent({
|
||||||
.getApiClient(apiDetails, mode, logger, true)
|
|
||||||
.repos.getContent({
|
|
||||||
owner: pieces.groups.owner,
|
owner: pieces.groups.owner,
|
||||||
repo: pieces.groups.repo,
|
repo: pieces.groups.repo,
|
||||||
path: pieces.groups.path,
|
path: pieces.groups.path,
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
45
lib/config-utils.test.js
generated
45
lib/config-utils.test.js
generated
@@ -25,8 +25,10 @@ const util = __importStar(require("./util"));
|
|||||||
testing_utils_1.setupTests(ava_1.default);
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
const sampleApiDetails = {
|
const sampleApiDetails = {
|
||||||
auth: "token",
|
auth: "token",
|
||||||
|
externalRepoAuth: "token",
|
||||||
url: "https://github.example.com",
|
url: "https://github.example.com",
|
||||||
};
|
};
|
||||||
|
const gitHubVersion = { type: util.GitHubVariant.DOTCOM };
|
||||||
// Returns the filepath of the newly-created file
|
// Returns the filepath of the newly-created file
|
||||||
function createConfigFile(inputFileContents, tmpDir) {
|
function createConfigFile(inputFileContents, tmpDir) {
|
||||||
const configFilePath = path.join(tmpDir, "input");
|
const configFilePath = path.join(tmpDir, "input");
|
||||||
@@ -70,8 +72,8 @@ ava_1.default("load empty config", async (t) => {
|
|||||||
};
|
};
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const config = await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logger);
|
const config = await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
|
||||||
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logger));
|
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("loading config saves config", async (t) => {
|
ava_1.default("loading config saves config", async (t) => {
|
||||||
@@ -90,7 +92,7 @@ ava_1.default("loading config saves config", async (t) => {
|
|||||||
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||||
// Sanity check that getConfig returns undefined before we have called initConfig
|
// Sanity check that getConfig returns undefined before we have called initConfig
|
||||||
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
|
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
|
||||||
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logger);
|
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
|
||||||
// The saved config file should now exist
|
// The saved config file should now exist
|
||||||
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||||
// And that same newly-initialised config should now be returned by getConfig
|
// And that same newly-initialised config should now be returned by getConfig
|
||||||
@@ -101,7 +103,7 @@ ava_1.default("loading config saves config", async (t) => {
|
|||||||
ava_1.default("load input outside of workspace", async (t) => {
|
ava_1.default("load input outside of workspace", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, "../input", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(undefined, undefined, "../input", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -114,7 +116,7 @@ ava_1.default("load non-local input with invalid repo syntax", async (t) => {
|
|||||||
// no filename given, just a repo
|
// no filename given, just a repo
|
||||||
const configFile = "octo-org/codeql-config@main";
|
const configFile = "octo-org/codeql-config@main";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(undefined, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -128,7 +130,7 @@ ava_1.default("load non-existent input", async (t) => {
|
|||||||
const configFile = "input";
|
const configFile = "input";
|
||||||
t.false(fs.existsSync(path.join(tmpDir, configFile)));
|
t.false(fs.existsSync(path.join(tmpDir, configFile)));
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -185,10 +187,11 @@ ava_1.default("load non-empty input", async (t) => {
|
|||||||
tempDir: tmpDir,
|
tempDir: tmpDir,
|
||||||
toolCacheDir: tmpDir,
|
toolCacheDir: tmpDir,
|
||||||
codeQLCmd: codeQL.getPath(),
|
codeQLCmd: codeQL.getPath(),
|
||||||
|
gitHubVersion,
|
||||||
};
|
};
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||||
const actualConfig = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
const actualConfig = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
// Should exactly equal the object we constructed earlier
|
// Should exactly equal the object we constructed earlier
|
||||||
t.deepEqual(actualConfig, expectedConfig);
|
t.deepEqual(actualConfig, expectedConfig);
|
||||||
});
|
});
|
||||||
@@ -224,7 +227,7 @@ ava_1.default("Default queries are used", async (t) => {
|
|||||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||||
await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
// Check resolve queries was called correctly
|
// Check resolve queries was called correctly
|
||||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||||
t.deepEqual(resolveQueriesArgs[0].queries, [
|
t.deepEqual(resolveQueriesArgs[0].queries, [
|
||||||
@@ -267,7 +270,7 @@ ava_1.default("Queries can be specified in config file", async (t) => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const config = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
const config = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
// Check resolveQueries was called correctly
|
// Check resolveQueries was called correctly
|
||||||
// It'll be called once for the default queries
|
// It'll be called once for the default queries
|
||||||
// and once for `./foo` from the config file.
|
// and once for `./foo` from the config file.
|
||||||
@@ -300,7 +303,7 @@ ava_1.default("Queries from config file can be overridden in workflow file", asy
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const config = await configUtils.initConfig(languages, testQueries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
const config = await configUtils.initConfig(languages, testQueries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
// Check resolveQueries was called correctly
|
// Check resolveQueries was called correctly
|
||||||
// It'll be called once for the default queries and once for `./override`,
|
// It'll be called once for the default queries and once for `./override`,
|
||||||
// but won't be called for './foo' from the config file.
|
// but won't be called for './foo' from the config file.
|
||||||
@@ -332,7 +335,7 @@ ava_1.default("Queries in workflow file can be used in tandem with the 'disable
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const config = await configUtils.initConfig(languages, testQueries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
const config = await configUtils.initConfig(languages, testQueries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
// Check resolveQueries was called correctly
|
// Check resolveQueries was called correctly
|
||||||
// It'll be called once for `./workflow-query`,
|
// It'll be called once for `./workflow-query`,
|
||||||
// but won't be called for the default one since that was disabled
|
// but won't be called for the default one since that was disabled
|
||||||
@@ -358,7 +361,7 @@ ava_1.default("Multiple queries can be specified in workflow file, no config fil
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const config = await configUtils.initConfig(languages, testQueries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
const config = await configUtils.initConfig(languages, testQueries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
// Check resolveQueries was called correctly:
|
// Check resolveQueries was called correctly:
|
||||||
// It'll be called once for the default queries,
|
// It'll be called once for the default queries,
|
||||||
// and then once for each of the two queries from the workflow
|
// and then once for each of the two queries from the workflow
|
||||||
@@ -397,7 +400,7 @@ ava_1.default("Queries in workflow file can be added to the set of queries witho
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const config = await configUtils.initConfig(languages, testQueries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
const config = await configUtils.initConfig(languages, testQueries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
// Check resolveQueries was called correctly
|
// Check resolveQueries was called correctly
|
||||||
// It'll be called once for the default queries,
|
// It'll be called once for the default queries,
|
||||||
// once for each of additional1 and additional2,
|
// once for each of additional1 and additional2,
|
||||||
@@ -425,7 +428,7 @@ ava_1.default("Invalid queries in workflow file handled correctly", async (t) =>
|
|||||||
// This function just needs to be type-correct; it doesn't need to do anything,
|
// This function just needs to be type-correct; it doesn't need to do anything,
|
||||||
// since we're deliberately passing in invalid data
|
// since we're deliberately passing in invalid data
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = codeql_1.setCodeQL({
|
||||||
async resolveQueries(_queries, _extraSearchPath) {
|
async resolveQueries() {
|
||||||
return {
|
return {
|
||||||
byLanguage: {
|
byLanguage: {
|
||||||
javascript: {},
|
javascript: {},
|
||||||
@@ -436,7 +439,7 @@ ava_1.default("Invalid queries in workflow file handled correctly", async (t) =>
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(languages, queries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, queries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
t.fail("initConfig did not throw error");
|
t.fail("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -479,7 +482,7 @@ ava_1.default("API client used when reading remote config", async (t) => {
|
|||||||
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
|
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
|
||||||
const configFile = "octo-org/codeql-config/config.yaml@main";
|
const configFile = "octo-org/codeql-config/config.yaml@main";
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
t.assert(spyGetContents.called);
|
t.assert(spyGetContents.called);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -489,7 +492,7 @@ ava_1.default("Remote config handles the case where a directory is provided", as
|
|||||||
mockGetContents(dummyResponse);
|
mockGetContents(dummyResponse);
|
||||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -505,7 +508,7 @@ ava_1.default("Invalid format of remote config handled correctly", async (t) =>
|
|||||||
mockGetContents(dummyResponse);
|
mockGetContents(dummyResponse);
|
||||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -517,7 +520,7 @@ ava_1.default("No detected languages", async (t) => {
|
|||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
mockListLanguages([]);
|
mockListLanguages([]);
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -529,7 +532,7 @@ ava_1.default("Unknown languages", async (t) => {
|
|||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const languages = "ruby,english";
|
const languages = "ruby,english";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -554,7 +557,7 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
|
|||||||
const inputFile = path.join(tmpDir, configFile);
|
const inputFile = path.join(tmpDir, configFile);
|
||||||
fs.writeFileSync(inputFile, inputFileContents, "utf8");
|
fs.writeFileSync(inputFile, inputFileContents, "utf8");
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, sampleApiDetails, "runner", logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -1,3 +1,3 @@
|
|||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-20201106"
|
"bundleVersion": "codeql-bundle-20210308"
|
||||||
}
|
}
|
||||||
|
|||||||
19
lib/external-queries.js
generated
19
lib/external-queries.js
generated
@@ -14,7 +14,7 @@ const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
|||||||
/**
|
/**
|
||||||
* Check out repository at the given ref, and return the directory of the checkout.
|
* Check out repository at the given ref, and return the directory of the checkout.
|
||||||
*/
|
*/
|
||||||
async function checkoutExternalRepository(repository, ref, githubUrl, tempDir, logger) {
|
async function checkoutExternalRepository(repository, ref, apiDetails, tempDir, logger) {
|
||||||
logger.info(`Checking out ${repository}`);
|
logger.info(`Checking out ${repository}`);
|
||||||
const checkoutLocation = path.join(tempDir, repository, ref);
|
const checkoutLocation = path.join(tempDir, repository, ref);
|
||||||
if (!checkoutLocation.startsWith(tempDir)) {
|
if (!checkoutLocation.startsWith(tempDir)) {
|
||||||
@@ -22,10 +22,10 @@ async function checkoutExternalRepository(repository, ref, githubUrl, tempDir, l
|
|||||||
throw new Error(`'${repository}@${ref}' is not a valid repository and reference.`);
|
throw new Error(`'${repository}@${ref}' is not a valid repository and reference.`);
|
||||||
}
|
}
|
||||||
if (!fs.existsSync(checkoutLocation)) {
|
if (!fs.existsSync(checkoutLocation)) {
|
||||||
const repoURL = `${githubUrl}/${repository}`;
|
const repoCloneURL = buildCheckoutURL(repository, apiDetails);
|
||||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
|
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
|
||||||
"clone",
|
"clone",
|
||||||
repoURL,
|
repoCloneURL,
|
||||||
checkoutLocation,
|
checkoutLocation,
|
||||||
]).exec();
|
]).exec();
|
||||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
|
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
|
||||||
@@ -38,4 +38,17 @@ async function checkoutExternalRepository(repository, ref, githubUrl, tempDir, l
|
|||||||
return checkoutLocation;
|
return checkoutLocation;
|
||||||
}
|
}
|
||||||
exports.checkoutExternalRepository = checkoutExternalRepository;
|
exports.checkoutExternalRepository = checkoutExternalRepository;
|
||||||
|
function buildCheckoutURL(repository, apiDetails) {
|
||||||
|
const repoCloneURL = new URL(apiDetails.url);
|
||||||
|
if (apiDetails.externalRepoAuth !== undefined) {
|
||||||
|
repoCloneURL.username = "x-access-token";
|
||||||
|
repoCloneURL.password = apiDetails.externalRepoAuth;
|
||||||
|
}
|
||||||
|
if (!repoCloneURL.pathname.endsWith("/")) {
|
||||||
|
repoCloneURL.pathname += "/";
|
||||||
|
}
|
||||||
|
repoCloneURL.pathname += `${repository}`;
|
||||||
|
return repoCloneURL.toString();
|
||||||
|
}
|
||||||
|
exports.buildCheckoutURL = buildCheckoutURL;
|
||||||
//# sourceMappingURL=external-queries.js.map
|
//# sourceMappingURL=external-queries.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAIpD;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,SAAiB,EACjB,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,GAAG,SAAS,IAAI,UAAU,EAAE,CAAC;QAC7C,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,OAAO;YACP,OAAO;YACP,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC"}
|
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAKpD;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,UAAwC,EACxC,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,YAAY,GAAG,gBAAgB,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAC9D,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,OAAO;YACP,YAAY;YACZ,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC;AAED,SAAgB,gBAAgB,CAC9B,UAAkB,EAClB,UAAwC;IAExC,MAAM,YAAY,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC7C,IAAI,UAAU,CAAC,gBAAgB,KAAK,SAAS,EAAE;QAC7C,YAAY,CAAC,QAAQ,GAAG,gBAAgB,CAAC;QACzC,YAAY,CAAC,QAAQ,GAAG,UAAU,CAAC,gBAAgB,CAAC;KACrD;IACD,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;QACxC,YAAY,CAAC,QAAQ,IAAI,GAAG,CAAC;KAC9B;IACD,YAAY,CAAC,QAAQ,IAAI,GAAG,UAAU,EAAE,CAAC;IACzC,OAAO,YAAY,CAAC,QAAQ,EAAE,CAAC;AACjC,CAAC;AAdD,4CAcC"}
|
||||||
22
lib/external-queries.test.js
generated
22
lib/external-queries.test.js
generated
@@ -81,17 +81,35 @@ ava_1.default("checkoutExternalQueries", async (t) => {
|
|||||||
const commit2Sha = await runGit(["rev-parse", "HEAD"]);
|
const commit2Sha = await runGit(["rev-parse", "HEAD"]);
|
||||||
// Checkout the first commit, which should contain 'a' and 'b'
|
// Checkout the first commit, which should contain 'a' and 'b'
|
||||||
t.false(fs.existsSync(path.join(tmpDir, repoName)));
|
t.false(fs.existsSync(path.join(tmpDir, repoName)));
|
||||||
await externalQueries.checkoutExternalRepository(repoName, commit1Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true));
|
await externalQueries.checkoutExternalRepository(repoName, commit1Sha, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" }, tmpDir, logging_1.getRunnerLogger(true));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName)));
|
t.true(fs.existsSync(path.join(tmpDir, repoName)));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha)));
|
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha)));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "a")));
|
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "a")));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "b")));
|
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "b")));
|
||||||
// Checkout the second commit as well, which should only contain 'a'
|
// Checkout the second commit as well, which should only contain 'a'
|
||||||
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
|
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
|
||||||
await externalQueries.checkoutExternalRepository(repoName, commit2Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true));
|
await externalQueries.checkoutExternalRepository(repoName, commit2Sha, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" }, tmpDir, logging_1.getRunnerLogger(true));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
|
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "a")));
|
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "a")));
|
||||||
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
|
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
ava_1.default("buildCheckoutURL", (t) => {
|
||||||
|
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
||||||
|
url: "https://github.com",
|
||||||
|
externalRepoAuth: undefined,
|
||||||
|
}), "https://github.com/foo/bar");
|
||||||
|
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
||||||
|
url: "https://github.example.com/",
|
||||||
|
externalRepoAuth: undefined,
|
||||||
|
}), "https://github.example.com/foo/bar");
|
||||||
|
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
||||||
|
url: "https://github.com",
|
||||||
|
externalRepoAuth: "abc",
|
||||||
|
}), "https://x-access-token:abc@github.com/foo/bar");
|
||||||
|
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
||||||
|
url: "https://github.example.com/",
|
||||||
|
externalRepoAuth: "abc",
|
||||||
|
}), "https://x-access-token:abc@github.example.com/foo/bar");
|
||||||
|
});
|
||||||
//# sourceMappingURL=external-queries.test.js.map
|
//# sourceMappingURL=external-queries.test.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AACpD,8CAAuB;AAEvB,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAChC,OAAO,EACP;oBACE,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CACF,CAAC,IAAI,EAAE,CAAC;aACV;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,gBAAgB,EAAE,OAAO,CAAC,CAAC,CAAC;QAEpD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AACpD,8CAAuB;AAEvB,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAChC,OAAO,EACP;oBACE,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CACF,CAAC,IAAI,EAAE,CAAC;aACV;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,gBAAgB,EAAE,OAAO,CAAC,CAAC,CAAC;QAEpD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,EAAE;IAC7B,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,4BAA4B,CAC7B,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,oCAAoC,CACrC,CAAC;IAEF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,+CAA+C,CAChD,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,uDAAuD,CACxD,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
||||||
23
lib/init-action.js
generated
23
lib/init-action.js
generated
@@ -13,6 +13,7 @@ const init_1 = require("./init");
|
|||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const repository_1 = require("./repository");
|
const repository_1 = require("./repository");
|
||||||
|
const util_1 = require("./util");
|
||||||
async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
|
async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
|
||||||
var _a;
|
var _a;
|
||||||
const statusReportBase = await actionsUtil.createStatusReportBase("init", "success", startedAt);
|
const statusReportBase = await actionsUtil.createStatusReportBase("init", "success", startedAt);
|
||||||
@@ -53,23 +54,23 @@ async function run() {
|
|||||||
let config;
|
let config;
|
||||||
let codeql;
|
let codeql;
|
||||||
let toolsVersion;
|
let toolsVersion;
|
||||||
try {
|
|
||||||
actionsUtil.prepareLocalRunEnvironment();
|
|
||||||
const workflowErrors = await actionsUtil.getWorkflowErrors();
|
|
||||||
if (workflowErrors.length > 0) {
|
|
||||||
core.warning(actionsUtil.formatWorkflowErrors(workflowErrors));
|
|
||||||
}
|
|
||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "starting", startedAt, actionsUtil.formatWorkflowCause(workflowErrors))))) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth: actionsUtil.getRequiredInput("token"),
|
auth: actionsUtil.getRequiredInput("token"),
|
||||||
|
externalRepoAuth: actionsUtil.getOptionalInput("external-repository-token"),
|
||||||
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
const initCodeQLResult = await init_1.initCodeQL(actionsUtil.getOptionalInput("tools"), apiDetails, actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), "actions", logger);
|
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
||||||
|
util_1.checkGitHubVersionInRange(gitHubVersion, "actions", logger);
|
||||||
|
try {
|
||||||
|
actionsUtil.prepareLocalRunEnvironment();
|
||||||
|
const workflowErrors = await actionsUtil.validateWorkflow();
|
||||||
|
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "starting", startedAt, workflowErrors)))) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const initCodeQLResult = await init_1.initCodeQL(actionsUtil.getOptionalInput("tools"), apiDetails, actionsUtil.getTemporaryDirectory(), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), "actions", gitHubVersion.type, logger);
|
||||||
codeql = initCodeQLResult.codeql;
|
codeql = initCodeQLResult.codeql;
|
||||||
toolsVersion = initCodeQLResult.toolsVersion;
|
toolsVersion = initCodeQLResult.toolsVersion;
|
||||||
config = await init_1.initConfig(actionsUtil.getOptionalInput("languages"), actionsUtil.getOptionalInput("queries"), actionsUtil.getOptionalInput("config-file"), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), codeql, actionsUtil.getRequiredEnvParam("GITHUB_WORKSPACE"), apiDetails, "actions", logger);
|
config = await init_1.initConfig(actionsUtil.getOptionalInput("languages"), actionsUtil.getOptionalInput("queries"), actionsUtil.getOptionalInput("config-file"), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), actionsUtil.getTemporaryDirectory(), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), codeql, actionsUtil.getRequiredEnvParam("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, logger);
|
||||||
if (config.languages.includes(languages_1.Language.python) &&
|
if (config.languages.includes(languages_1.Language.python) &&
|
||||||
actionsUtil.getRequiredInput("setup-python-dependencies") === "true") {
|
actionsUtil.getRequiredInput("setup-python-dependencies") === "true") {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"init-action.js","sourceRoot":"","sources":["../src/init-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAG9C,iCAMgB;AAChB,2CAAuC;AACvC,uCAA6C;AAC7C,6CAAkD;AAsBlD,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,MAA0B,EAC1B,YAAoB;;IAEpB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,MAAM,EACN,SAAS,EACT,SAAS,CACV,CAAC;IAEF,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC7C,MAAM,iBAAiB,GAAG,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAC;IACpE,MAAM,KAAK,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,WAAW,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CACvE,GAAG,CACJ,CAAC;IACF,MAAM,qBAAqB,GAAG,MAAM,CAAC,iBAAiB,CACpD,yBAAyB,CAC1B;QACC,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,EAAE,CAAC;IAEP,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,SAAG,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,0CAAE,IAAI,EAAE,CAAC;IACnE,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;QAC9D,OAAO,CAAC,IAAI,CACV,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,OAAO,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAC/D,CAAC;KACH;IACD,IAAI,YAAY,KAAK,SAAS,EAAE;QAC9B,YAAY,GAAG,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC;YACzC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC;YACxB,CAAC,CAAC,YAAY,CAAC;QACjB,OAAO,CAAC,IAAI,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;KAC1C;IAED,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,SAAS;QACT,kBAAkB,EAAE,iBAAiB,IAAI,EAAE;QAC3C,KAAK;QACL,YAAY,EAAE,WAAW;QACzB,uBAAuB,EAAE,qBAAqB;QAC9C,OAAO,EAAE,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC;QAC1B,WAAW,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,IAAI,EAAE;QACxD,sBAAsB,EAAE,YAAY;KACrC,CAAC;IAEF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,IAAI,MAA0B,CAAC;IAC/B,IAAI,MAAc,CAAC;IACnB,IAAI,YAAoB,CAAC;IAEzB,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QAEzC,MAAM,cAAc,GAAG,MAAM,WAAW,CAAC,iBAAiB,EAAE,CAAC;QAE7D,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE;YAC7B,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,oBAAoB,CAAC,cAAc,CAAC,CAAC,CAAC;SAChE;QAED,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,UAAU,EACV,SAAS,EACT,WAAW,CAAC,mBAAmB,CAAC,cAAc,CAAC,CAChD,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QAEF,MAAM,gBAAgB,GAAG,MAAM,iBAAU,CACvC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,UAAU,EACV,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,SAAS,EACT,MAAM,CACP,CAAC;QACF,MAAM,GAAG,gBAAgB,CAAC,MAAM,CAAC;QACjC,YAAY,GAAG,gBAAgB,CAAC,YAAY,CAAC;QAE7C,MAAM,GAAG,MAAM,iBAAU,CACvB,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,EACzC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EACvC,WAAW,CAAC,gBAAgB,CAAC,aAAa,CAAC,EAC3C,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,MAAM,EACN,WAAW,CAAC,mBAAmB,CAAC,kBAAkB,CAAC,EACnD,UAAU,EACV,SAAS,EACT,MAAM,CACP,CAAC;QAEF,IACE,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,oBAAQ,CAAC,MAAM,CAAC;YAC1C,WAAW,CAAC,gBAAgB,CAAC,2BAA2B,CAAC,KAAK,MAAM,EACpE;YACA,IAAI;gBACF,MAAM,wBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;aACzC;YAAC,OAAO,GAAG,EAAE;gBACZ,MAAM,CAAC,OAAO,CACZ,GAAG,GAAG,CAAC,OAAO,2FAA2F,CAC1G,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,CAAC,CAAC,OAAO,CACV,CACF,CAAC;QACF,OAAO;KACR;IAED,IAAI;QACF,mBAAmB;QACnB,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QACvC,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YACxC,IAAI,CAAC,OAAO,CACV,6GAA6G,CAC9G,CAAC;SACH;QAED,mGAAmG;QACnG,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,MAAM,CAAC;QACtD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;QAE7C,MAAM,YAAY,GAAG,MAAM,cAAO,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACnD,IAAI,YAAY,KAAK,SAAS,EAAE;YAC9B,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,EAAE;gBAC3D,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;aACjC;YAED,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,0BAAmB,CACvB,mBAAmB,EACnB,SAAS,EACT,MAAM,EACN,MAAM,EACN,YAAY,CACb,CAAC;aACH;SACF;QAED,IAAI,CAAC,SAAS,CAAC,aAAa,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;KACjD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;AACjE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,uBAAuB,KAAK,EAAE,CAAC,CAAC;QAC/C,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"init-action.js","sourceRoot":"","sources":["../src/init-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAG9C,iCAMgB;AAChB,2CAAuC;AACvC,uCAA6C;AAC7C,6CAAkD;AAClD,iCAAqE;AAsBrE,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,MAA0B,EAC1B,YAAoB;;IAEpB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,MAAM,EACN,SAAS,EACT,SAAS,CACV,CAAC;IAEF,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC7C,MAAM,iBAAiB,GAAG,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAC;IACpE,MAAM,KAAK,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,WAAW,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CACvE,GAAG,CACJ,CAAC;IACF,MAAM,qBAAqB,GAAG,MAAM,CAAC,iBAAiB,CACpD,yBAAyB,CAC1B;QACC,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,EAAE,CAAC;IAEP,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,SAAG,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,0CAAE,IAAI,EAAE,CAAC;IACnE,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;QAC9D,OAAO,CAAC,IAAI,CACV,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,OAAO,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAC/D,CAAC;KACH;IACD,IAAI,YAAY,KAAK,SAAS,EAAE;QAC9B,YAAY,GAAG,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC;YACzC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC;YACxB,CAAC,CAAC,YAAY,CAAC;QACjB,OAAO,CAAC,IAAI,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;KAC1C;IAED,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,SAAS;QACT,kBAAkB,EAAE,iBAAiB,IAAI,EAAE;QAC3C,KAAK;QACL,YAAY,EAAE,WAAW;QACzB,uBAAuB,EAAE,qBAAqB;QAC9C,OAAO,EAAE,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC;QAC1B,WAAW,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,IAAI,EAAE;QACxD,sBAAsB,EAAE,YAAY;KACrC,CAAC;IAEF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,IAAI,MAA0B,CAAC;IAC/B,IAAI,MAAc,CAAC;IACnB,IAAI,YAAoB,CAAC;IAEzB,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;QAC3C,gBAAgB,EAAE,WAAW,CAAC,gBAAgB,CAAC,2BAA2B,CAAC;QAC3E,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;KAC1D,CAAC;IAEF,MAAM,aAAa,GAAG,MAAM,uBAAgB,CAAC,UAAU,CAAC,CAAC;IACzD,gCAAyB,CAAC,aAAa,EAAE,SAAS,EAAE,MAAM,CAAC,CAAC;IAE5D,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QAEzC,MAAM,cAAc,GAAG,MAAM,WAAW,CAAC,gBAAgB,EAAE,CAAC;QAE5D,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,UAAU,EACV,SAAS,EACT,cAAc,CACf,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,gBAAgB,GAAG,MAAM,iBAAU,CACvC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,UAAU,EACV,WAAW,CAAC,qBAAqB,EAAE,EACnC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,SAAS,EACT,aAAa,CAAC,IAAI,EAClB,MAAM,CACP,CAAC;QACF,MAAM,GAAG,gBAAgB,CAAC,MAAM,CAAC;QACjC,YAAY,GAAG,gBAAgB,CAAC,YAAY,CAAC;QAE7C,MAAM,GAAG,MAAM,iBAAU,CACvB,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,EACzC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EACvC,WAAW,CAAC,gBAAgB,CAAC,aAAa,CAAC,EAC3C,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,WAAW,CAAC,qBAAqB,EAAE,EACnC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,MAAM,EACN,WAAW,CAAC,mBAAmB,CAAC,kBAAkB,CAAC,EACnD,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;QAEF,IACE,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,oBAAQ,CAAC,MAAM,CAAC;YAC1C,WAAW,CAAC,gBAAgB,CAAC,2BAA2B,CAAC,KAAK,MAAM,EACpE;YACA,IAAI;gBACF,MAAM,wBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;aACzC;YAAC,OAAO,GAAG,EAAE;gBACZ,MAAM,CAAC,OAAO,CACZ,GAAG,GAAG,CAAC,OAAO,2FAA2F,CAC1G,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,CAAC,CAAC,OAAO,CACV,CACF,CAAC;QACF,OAAO;KACR;IAED,IAAI;QACF,mBAAmB;QACnB,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QACvC,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YACxC,IAAI,CAAC,OAAO,CACV,6GAA6G,CAC9G,CAAC;SACH;QAED,mGAAmG;QACnG,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,MAAM,CAAC;QACtD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;QAE7C,MAAM,YAAY,GAAG,MAAM,cAAO,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACnD,IAAI,YAAY,KAAK,SAAS,EAAE;YAC9B,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,EAAE;gBAC3D,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;aACjC;YAED,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,0BAAmB,CACvB,mBAAmB,EACnB,SAAS,EACT,MAAM,EACN,MAAM,EACN,YAAY,CACb,CAAC;aACH;SACF;QAED,IAAI,CAAC,SAAS,CAAC,aAAa,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;KACjD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;AACjE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,uBAAuB,KAAK,EAAE,CAAC,CAAC;QAC/C,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
12
lib/init.js
generated
12
lib/init.js
generated
@@ -16,17 +16,17 @@ const codeql_1 = require("./codeql");
|
|||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
const tracer_config_1 = require("./tracer-config");
|
const tracer_config_1 = require("./tracer-config");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
async function initCodeQL(codeqlURL, apiDetails, tempDir, toolsDir, mode, logger) {
|
async function initCodeQL(codeqlURL, apiDetails, tempDir, toolsDir, mode, variant, logger) {
|
||||||
logger.startGroup("Setup CodeQL tools");
|
logger.startGroup("Setup CodeQL tools");
|
||||||
const { codeql, toolsVersion } = await codeql_1.setupCodeQL(codeqlURL, apiDetails, tempDir, toolsDir, mode, logger);
|
const { codeql, toolsVersion } = await codeql_1.setupCodeQL(codeqlURL, apiDetails, tempDir, toolsDir, mode, variant, logger);
|
||||||
await codeql.printVersion();
|
await codeql.printVersion();
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
return { codeql, toolsVersion };
|
return { codeql, toolsVersion };
|
||||||
}
|
}
|
||||||
exports.initCodeQL = initCodeQL;
|
exports.initCodeQL = initCodeQL;
|
||||||
async function initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, apiDetails, mode, logger) {
|
async function initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
|
||||||
logger.startGroup("Load language configuration");
|
logger.startGroup("Load language configuration");
|
||||||
const config = await configUtils.initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, apiDetails, mode, logger);
|
const config = await configUtils.initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger);
|
||||||
analysisPaths.printPathFiltersWarning(config, logger);
|
analysisPaths.printPathFiltersWarning(config, logger);
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
return config;
|
return config;
|
||||||
@@ -60,6 +60,7 @@ async function injectWindowsTracer(processName, processLevel, config, codeql, tr
|
|||||||
|
|
||||||
$id = $PID
|
$id = $PID
|
||||||
while ($true) {
|
while ($true) {
|
||||||
|
Write-Host "Looking for process with id : $id"
|
||||||
$p = Get-CimInstance -Class Win32_Process -Filter "ProcessId = $id"
|
$p = Get-CimInstance -Class Win32_Process -Filter "ProcessId = $id"
|
||||||
Write-Host "Found process: $p"
|
Write-Host "Found process: $p"
|
||||||
if ($p -eq $null) {
|
if ($p -eq $null) {
|
||||||
@@ -72,7 +73,8 @@ async function injectWindowsTracer(processName, processLevel, config, codeql, tr
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Write-Host "Final process: $p"
|
Write-Host "Final process: $p"
|
||||||
|
Write-Host "Final pid: $id"
|
||||||
|
Write-Host "Running: &$tracer --inject=$id"
|
||||||
Invoke-Expression "&$tracer --inject=$id"`;
|
Invoke-Expression "&$tracer --inject=$id"`;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,QAAgB,EAChB,IAAe,EACf,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,oBAAW,CAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,QAAQ,EACR,IAAI,EACJ,MAAM,CACP,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AApBD,gCAoBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,UAA4B,EAC5B,IAAe,EACf,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,UAAU,EACV,IAAI,EACJ,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9BD,gCA8BC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE9E,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnBD,0BAmBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,2CAA2C;IAC3C,IAAI,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,SAAS,EAAE;QACxC,IAAI;YACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC,CAAC,CAChD,CAAC,IAAI,EAAE,CAAC;aACV;iBAAM;gBACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;aACV;SACF;QAAC,OAAO,CAAC,EAAE;YACV,mGAAmG;YACnG,uDAAuD;YACvD,MAAM,CAAC,QAAQ,EAAE,CAAC;YAClB,MAAM,CAAC,OAAO,CACZ,mLAAmL,CACpL,CAAC;YACF,OAAO;SACR;KACF;IAED,uBAAuB;IACvB,IAAI;QACF,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,+IAA+I,CAChJ,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAnDD,8CAmDC"}
|
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,QAAgB,EAChB,IAAe,EACf,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,oBAAW,CAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,QAAQ,EACR,IAAI,EACJ,OAAO,EACP,MAAM,CACP,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AAtBD,gCAsBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,aAAiC,EACjC,UAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9BD,gCA8BC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE9E,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnBD,0BAmBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;;uCAa0B,WAAW;;8BAEpB,WAAW;;;;;;;;;gDASO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA9FD,kDA8FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,2CAA2C;IAC3C,IAAI,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,SAAS,EAAE;QACxC,IAAI;YACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC,CAAC,CAChD,CAAC,IAAI,EAAE,CAAC;aACV;iBAAM;gBACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;aACV;SACF;QAAC,OAAO,CAAC,EAAE;YACV,mGAAmG;YACnG,uDAAuD;YACvD,MAAM,CAAC,QAAQ,EAAE,CAAC;YAClB,MAAM,CAAC,OAAO,CACZ,mLAAmL,CACpL,CAAC;YACF,OAAO;SACR;KACF;IAED,uBAAuB;IACvB,IAAI;QACF,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,+IAA+I,CAChJ,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAnDD,8CAmDC"}
|
||||||
35
lib/runner.js
generated
35
lib/runner.js
generated
@@ -82,7 +82,8 @@ program
|
|||||||
.description("Initializes CodeQL")
|
.description("Initializes CodeQL")
|
||||||
.requiredOption("--repository <repository>", "Repository name. (Required)")
|
.requiredOption("--repository <repository>", "Repository name. (Required)")
|
||||||
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
|
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
|
||||||
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
|
.option("--github-auth <auth>", "GitHub Apps token or personal access token. This option is insecure and deprecated, please use `--github-auth-stdin` instead.")
|
||||||
|
.option("--github-auth-stdin", "Read GitHub Apps token or personal access token from stdin.")
|
||||||
.option("--languages <languages>", "Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.")
|
.option("--languages <languages>", "Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.")
|
||||||
.option("--queries <queries>", "Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.")
|
.option("--queries <queries>", "Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.")
|
||||||
.option("--config-file <file>", "Path to config file.")
|
.option("--config-file <file>", "Path to config file.")
|
||||||
@@ -103,18 +104,22 @@ program
|
|||||||
logger.info(`Cleaning temp directory ${tempDir}`);
|
logger.info(`Cleaning temp directory ${tempDir}`);
|
||||||
fs.rmdirSync(tempDir, { recursive: true });
|
fs.rmdirSync(tempDir, { recursive: true });
|
||||||
fs.mkdirSync(tempDir, { recursive: true });
|
fs.mkdirSync(tempDir, { recursive: true });
|
||||||
|
const auth = await util_1.getGitHubAuth(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth: cmd.githubAuth,
|
auth,
|
||||||
|
externalRepoAuth: auth,
|
||||||
url: util_1.parseGithubUrl(cmd.githubUrl),
|
url: util_1.parseGithubUrl(cmd.githubUrl),
|
||||||
};
|
};
|
||||||
|
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
||||||
|
util_1.checkGitHubVersionInRange(gitHubVersion, "runner", logger);
|
||||||
let codeql;
|
let codeql;
|
||||||
if (cmd.codeqlPath !== undefined) {
|
if (cmd.codeqlPath !== undefined) {
|
||||||
codeql = codeql_1.getCodeQL(cmd.codeqlPath);
|
codeql = codeql_1.getCodeQL(cmd.codeqlPath);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
codeql = (await init_1.initCodeQL(undefined, apiDetails, tempDir, toolsDir, "runner", logger)).codeql;
|
codeql = (await init_1.initCodeQL(undefined, apiDetails, tempDir, toolsDir, "runner", gitHubVersion.type, logger)).codeql;
|
||||||
}
|
}
|
||||||
const config = await init_1.initConfig(cmd.languages, cmd.queries, cmd.configFile, repository_1.parseRepositoryNwo(cmd.repository), tempDir, toolsDir, codeql, cmd.checkoutPath || process.cwd(), apiDetails, "runner", logger);
|
const config = await init_1.initConfig(cmd.languages, cmd.queries, cmd.configFile, repository_1.parseRepositoryNwo(cmd.repository), tempDir, toolsDir, codeql, cmd.checkoutPath || process.cwd(), gitHubVersion, apiDetails, logger);
|
||||||
const tracerConfig = await init_1.runInit(codeql, config);
|
const tracerConfig = await init_1.runInit(codeql, config);
|
||||||
if (tracerConfig === undefined) {
|
if (tracerConfig === undefined) {
|
||||||
return;
|
return;
|
||||||
@@ -203,7 +208,8 @@ program
|
|||||||
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
|
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
|
||||||
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
|
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
|
||||||
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
|
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
|
||||||
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
|
.option("--github-auth <auth>", "GitHub Apps token or personal access token. This option is insecure and deprecated, please use `--github-auth-stdin` instead.")
|
||||||
|
.option("--github-auth-stdin", "Read GitHub Apps token or personal access token from stdin.")
|
||||||
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
|
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
|
||||||
.option("--no-upload", "Do not upload results after analysis.")
|
.option("--no-upload", "Do not upload results after analysis.")
|
||||||
.option("--output-dir <dir>", "Directory to output SARIF files to. Default is in the temp directory.")
|
.option("--output-dir <dir>", "Directory to output SARIF files to. Default is in the temp directory.")
|
||||||
@@ -223,11 +229,17 @@ program
|
|||||||
throw new Error("Config file could not be found at expected location. " +
|
throw new Error("Config file could not be found at expected location. " +
|
||||||
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
|
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
|
||||||
}
|
}
|
||||||
|
const auth = await util_1.getGitHubAuth(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth: cmd.githubAuth,
|
auth,
|
||||||
url: util_1.parseGithubUrl(cmd.githubUrl),
|
url: util_1.parseGithubUrl(cmd.githubUrl),
|
||||||
};
|
};
|
||||||
await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, apiDetails, cmd.upload, "runner", outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), util_1.getThreadsFlag(cmd.threads, logger), config, logger);
|
await analyze_1.runAnalyze(outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), util_1.getThreadsFlag(cmd.threads, logger), config, logger);
|
||||||
|
if (!cmd.upload) {
|
||||||
|
logger.info("Not uploading results");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await upload_lib.uploadFromRunner(outputDir, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.checkoutPath || process.cwd(), config.gitHubVersion, apiDetails, logger);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.error("Analyze failed");
|
logger.error("Analyze failed");
|
||||||
@@ -243,17 +255,20 @@ program
|
|||||||
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
|
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
|
||||||
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
|
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
|
||||||
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
|
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
|
||||||
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
|
.option("--github-auth <auth>", "GitHub Apps token or personal access token. This option is insecure and deprecated, please use `--github-auth-stdin` instead.")
|
||||||
|
.option("--github-auth-stdin", "Read GitHub Apps token or personal access token from stdin.")
|
||||||
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
|
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
|
||||||
.option("--debug", "Print more verbose output", false)
|
.option("--debug", "Print more verbose output", false)
|
||||||
.action(async (cmd) => {
|
.action(async (cmd) => {
|
||||||
const logger = logging_1.getRunnerLogger(cmd.debug);
|
const logger = logging_1.getRunnerLogger(cmd.debug);
|
||||||
|
const auth = await util_1.getGitHubAuth(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth: cmd.githubAuth,
|
auth,
|
||||||
url: util_1.parseGithubUrl(cmd.githubUrl),
|
url: util_1.parseGithubUrl(cmd.githubUrl),
|
||||||
};
|
};
|
||||||
try {
|
try {
|
||||||
await upload_lib.upload(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, apiDetails, "runner", logger);
|
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
||||||
|
await upload_lib.uploadFromRunner(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.checkoutPath || process.cwd(), gitHubVersion, apiDetails, logger);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.error("Upload failed");
|
logger.error("Upload failed");
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
6
lib/tracer-config.js
generated
6
lib/tracer-config.js
generated
@@ -141,6 +141,12 @@ async function getCombinedTracerConfig(config, codeql) {
|
|||||||
else if (process.platform !== "win32") {
|
else if (process.platform !== "win32") {
|
||||||
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
|
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
|
||||||
}
|
}
|
||||||
|
// On macos it's necessary to prefix the build command with the runner exectuable
|
||||||
|
// on order to trace when System Integrity Protection is enabled.
|
||||||
|
// The exectuable also exists and works for other platforms so we output this env
|
||||||
|
// var with a path to the runner regardless so it's always available.
|
||||||
|
const runnerExeName = process.platform === "win32" ? "runner.exe" : "runner";
|
||||||
|
mainTracerConfig.env["CODEQL_RUNNER"] = path.join(mainTracerConfig.env["CODEQL_DIST"], "tools", mainTracerConfig.env["CODEQL_PLATFORM"], runnerExeName);
|
||||||
return mainTracerConfig;
|
return mainTracerConfig;
|
||||||
}
|
}
|
||||||
exports.getCombinedTracerConfig = getCombinedTracerConfig;
|
exports.getCombinedTracerConfig = getCombinedTracerConfig;
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
22
lib/tracer-config.test.js
generated
22
lib/tracer-config.test.js
generated
@@ -29,6 +29,7 @@ function getTestConfig(tmpDir) {
|
|||||||
tempDir: tmpDir,
|
tempDir: tmpDir,
|
||||||
toolCacheDir: tmpDir,
|
toolCacheDir: tmpDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
// A very minimal setup
|
// A very minimal setup
|
||||||
@@ -237,6 +238,7 @@ ava_1.default("getCombinedTracerConfig - return undefined when no languages are
|
|||||||
async getTracerEnv() {
|
async getTracerEnv() {
|
||||||
return {
|
return {
|
||||||
ODASA_TRACER_CONFIGURATION: "abc",
|
ODASA_TRACER_CONFIGURATION: "abc",
|
||||||
|
CODEQL_DIST: "/",
|
||||||
foo: "bar",
|
foo: "bar",
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
@@ -249,17 +251,28 @@ ava_1.default("getCombinedTracerConfig - valid spec file", async (t) => {
|
|||||||
const config = getTestConfig(tmpDir);
|
const config = getTestConfig(tmpDir);
|
||||||
const spec = path.join(tmpDir, "spec");
|
const spec = path.join(tmpDir, "spec");
|
||||||
fs.writeFileSync(spec, "foo.log\n2\nabc\ndef");
|
fs.writeFileSync(spec, "foo.log\n2\nabc\ndef");
|
||||||
|
const bundlePath = path.join(tmpDir, "bundle");
|
||||||
|
const codeqlPlatform = process.platform === "win32"
|
||||||
|
? "win64"
|
||||||
|
: process.platform === "darwin"
|
||||||
|
? "osx64"
|
||||||
|
: "linux64";
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = codeql_1.setCodeQL({
|
||||||
async getTracerEnv() {
|
async getTracerEnv() {
|
||||||
return {
|
return {
|
||||||
ODASA_TRACER_CONFIGURATION: spec,
|
ODASA_TRACER_CONFIGURATION: spec,
|
||||||
|
CODEQL_DIST: bundlePath,
|
||||||
|
CODEQL_PLATFORM: codeqlPlatform,
|
||||||
foo: "bar",
|
foo: "bar",
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const result = await tracer_config_1.getCombinedTracerConfig(config, codeQL);
|
const result = await tracer_config_1.getCombinedTracerConfig(config, codeQL);
|
||||||
|
t.notDeepEqual(result, undefined);
|
||||||
const expectedEnv = {
|
const expectedEnv = {
|
||||||
foo: "bar",
|
foo: "bar",
|
||||||
|
CODEQL_DIST: bundlePath,
|
||||||
|
CODEQL_PLATFORM: codeqlPlatform,
|
||||||
ODASA_TRACER_CONFIGURATION: result.spec,
|
ODASA_TRACER_CONFIGURATION: result.spec,
|
||||||
};
|
};
|
||||||
if (process.platform === "darwin") {
|
if (process.platform === "darwin") {
|
||||||
@@ -268,6 +281,15 @@ ava_1.default("getCombinedTracerConfig - valid spec file", async (t) => {
|
|||||||
else if (process.platform !== "win32") {
|
else if (process.platform !== "win32") {
|
||||||
expectedEnv["LD_PRELOAD"] = path.join(path.dirname(codeQL.getPath()), "tools", "linux64", "${LIB}trace.so");
|
expectedEnv["LD_PRELOAD"] = path.join(path.dirname(codeQL.getPath()), "tools", "linux64", "${LIB}trace.so");
|
||||||
}
|
}
|
||||||
|
if (process.platform === "win32") {
|
||||||
|
expectedEnv["CODEQL_RUNNER"] = path.join(bundlePath, "tools/win64/runner.exe");
|
||||||
|
}
|
||||||
|
else if (process.platform === "darwin") {
|
||||||
|
expectedEnv["CODEQL_RUNNER"] = path.join(bundlePath, "tools/osx64/runner");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
expectedEnv["CODEQL_RUNNER"] = path.join(bundlePath, "tools/linux64/runner");
|
||||||
|
}
|
||||||
t.deepEqual(result, {
|
t.deepEqual(result, {
|
||||||
spec: path.join(tmpDir, "compound-spec"),
|
spec: path.join(tmpDir, "compound-spec"),
|
||||||
env: expectedEnv,
|
env: expectedEnv,
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
124
lib/upload-lib.js
generated
124
lib/upload-lib.js
generated
@@ -16,8 +16,11 @@ const zlib_1 = __importDefault(require("zlib"));
|
|||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const file_url_1 = __importDefault(require("file-url"));
|
const file_url_1 = __importDefault(require("file-url"));
|
||||||
const jsonschema = __importStar(require("jsonschema"));
|
const jsonschema = __importStar(require("jsonschema"));
|
||||||
|
const semver = __importStar(require("semver"));
|
||||||
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const fingerprints = __importStar(require("./fingerprints"));
|
const fingerprints = __importStar(require("./fingerprints"));
|
||||||
|
const repository_1 = require("./repository");
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
// Takes a list of paths to sarif files and combines them together,
|
// Takes a list of paths to sarif files and combines them together,
|
||||||
@@ -50,7 +53,7 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, mode, logger) {
|
|||||||
if (testMode) {
|
if (testMode) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const client = api.getApiClient(apiDetails, mode, logger);
|
const client = api.getApiClient(apiDetails);
|
||||||
const reqURL = mode === "actions"
|
const reqURL = mode === "actions"
|
||||||
? "PUT /repos/:owner/:repo/code-scanning/analysis"
|
? "PUT /repos/:owner/:repo/code-scanning/analysis"
|
||||||
: "POST /repos/:owner/:repo/code-scanning/sarifs";
|
: "POST /repos/:owner/:repo/code-scanning/sarifs";
|
||||||
@@ -62,32 +65,55 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, mode, logger) {
|
|||||||
logger.debug(`response status: ${response.status}`);
|
logger.debug(`response status: ${response.status}`);
|
||||||
logger.info("Successfully uploaded results");
|
logger.info("Successfully uploaded results");
|
||||||
}
|
}
|
||||||
|
// Recursively walks a directory and returns all SARIF files it finds.
|
||||||
|
// Does not follow symlinks.
|
||||||
|
function findSarifFilesInDir(sarifPath) {
|
||||||
|
const sarifFiles = [];
|
||||||
|
const walkSarifFiles = (dir) => {
|
||||||
|
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.isFile() && entry.name.endsWith(".sarif")) {
|
||||||
|
sarifFiles.push(path.resolve(dir, entry.name));
|
||||||
|
}
|
||||||
|
else if (entry.isDirectory()) {
|
||||||
|
walkSarifFiles(path.resolve(dir, entry.name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
walkSarifFiles(sarifPath);
|
||||||
|
return sarifFiles;
|
||||||
|
}
|
||||||
|
exports.findSarifFilesInDir = findSarifFilesInDir;
|
||||||
// Uploads a single sarif file or a directory of sarif files
|
// Uploads a single sarif file or a directory of sarif files
|
||||||
// depending on what the path happens to refer to.
|
// depending on what the path happens to refer to.
|
||||||
// Returns true iff the upload occurred and succeeded
|
// Returns true iff the upload occurred and succeeded
|
||||||
async function upload(sarifPath, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, apiDetails, mode, logger) {
|
async function uploadFromActions(sarifPath, gitHubVersion, apiDetails, logger) {
|
||||||
const sarifFiles = [];
|
return await uploadFiles(getSarifFilePaths(sarifPath), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), gitHubVersion, apiDetails, "actions", logger);
|
||||||
|
}
|
||||||
|
exports.uploadFromActions = uploadFromActions;
|
||||||
|
// Uploads a single sarif file or a directory of sarif files
|
||||||
|
// depending on what the path happens to refer to.
|
||||||
|
// Returns true iff the upload occurred and succeeded
|
||||||
|
async function uploadFromRunner(sarifPath, repositoryNwo, commitOid, ref, checkoutPath, gitHubVersion, apiDetails, logger) {
|
||||||
|
return await uploadFiles(getSarifFilePaths(sarifPath), repositoryNwo, commitOid, ref, undefined, undefined, undefined, checkoutPath, undefined, gitHubVersion, apiDetails, "runner", logger);
|
||||||
|
}
|
||||||
|
exports.uploadFromRunner = uploadFromRunner;
|
||||||
|
function getSarifFilePaths(sarifPath) {
|
||||||
if (!fs.existsSync(sarifPath)) {
|
if (!fs.existsSync(sarifPath)) {
|
||||||
throw new Error(`Path does not exist: ${sarifPath}`);
|
throw new Error(`Path does not exist: ${sarifPath}`);
|
||||||
}
|
}
|
||||||
|
let sarifFiles;
|
||||||
if (fs.lstatSync(sarifPath).isDirectory()) {
|
if (fs.lstatSync(sarifPath).isDirectory()) {
|
||||||
const paths = fs
|
sarifFiles = findSarifFilesInDir(sarifPath);
|
||||||
.readdirSync(sarifPath)
|
|
||||||
.filter((f) => f.endsWith(".sarif"))
|
|
||||||
.map((f) => path.resolve(sarifPath, f));
|
|
||||||
for (const filepath of paths) {
|
|
||||||
sarifFiles.push(filepath);
|
|
||||||
}
|
|
||||||
if (sarifFiles.length === 0) {
|
if (sarifFiles.length === 0) {
|
||||||
throw new Error(`No SARIF files found to upload in "${sarifPath}".`);
|
throw new Error(`No SARIF files found to upload in "${sarifPath}".`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
sarifFiles.push(sarifPath);
|
sarifFiles = [sarifPath];
|
||||||
}
|
}
|
||||||
return await uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, apiDetails, mode, logger);
|
return sarifFiles;
|
||||||
}
|
}
|
||||||
exports.upload = upload;
|
|
||||||
// Counts the number of results in the given SARIF file
|
// Counts the number of results in the given SARIF file
|
||||||
function countResultsInSarif(sarif) {
|
function countResultsInSarif(sarif) {
|
||||||
let numResults = 0;
|
let numResults = 0;
|
||||||
@@ -117,9 +143,50 @@ function validateSarifFileSchema(sarifFilePath, logger) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||||
|
// buildPayload constructs a map ready to be uploaded to the API from the given
|
||||||
|
// parameters, respecting the current mode and target GitHub instance version.
|
||||||
|
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion, mode) {
|
||||||
|
if (mode === "actions") {
|
||||||
|
const payloadObj = {
|
||||||
|
commit_oid: commitOid,
|
||||||
|
ref,
|
||||||
|
analysis_key: analysisKey,
|
||||||
|
analysis_name: analysisName,
|
||||||
|
sarif: zippedSarif,
|
||||||
|
workflow_run_id: workflowRunID,
|
||||||
|
checkout_uri: checkoutURI,
|
||||||
|
environment,
|
||||||
|
started_at: process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT],
|
||||||
|
tool_names: toolNames,
|
||||||
|
base_ref: undefined,
|
||||||
|
base_sha: undefined,
|
||||||
|
};
|
||||||
|
// This behaviour can be made the default when support for GHES 3.0 is discontinued.
|
||||||
|
if (gitHubVersion.type !== util.GitHubVariant.GHES ||
|
||||||
|
semver.satisfies(gitHubVersion.version, `>=3.1`)) {
|
||||||
|
if (process.env.GITHUB_EVENT_NAME === "pull_request" &&
|
||||||
|
process.env.GITHUB_EVENT_PATH) {
|
||||||
|
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
|
||||||
|
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
|
||||||
|
payloadObj.base_sha = githubEvent.pull_request.base.sha;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return payloadObj;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return {
|
||||||
|
commit_sha: commitOid,
|
||||||
|
ref,
|
||||||
|
sarif: zippedSarif,
|
||||||
|
checkout_uri: checkoutURI,
|
||||||
|
tool_name: toolNames[0],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.buildPayload = buildPayload;
|
||||||
// Uploads the given set of sarif files.
|
// Uploads the given set of sarif files.
|
||||||
// Returns true iff the upload occurred and succeeded
|
// Returns true iff the upload occurred and succeeded
|
||||||
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, apiDetails, mode, logger) {
|
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, gitHubVersion, apiDetails, mode, logger) {
|
||||||
logger.info(`Uploading sarif files: ${JSON.stringify(sarifFiles)}`);
|
logger.info(`Uploading sarif files: ${JSON.stringify(sarifFiles)}`);
|
||||||
if (mode === "actions") {
|
if (mode === "actions") {
|
||||||
// This check only works on actions as env vars don't persist between calls to the runner
|
// This check only works on actions as env vars don't persist between calls to the runner
|
||||||
@@ -135,37 +202,14 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
|||||||
}
|
}
|
||||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||||
sarifPayload = fingerprints.addFingerprints(sarifPayload, checkoutPath, logger);
|
sarifPayload = fingerprints.addFingerprints(sarifPayload, checkoutPath, logger);
|
||||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
|
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
|
||||||
const checkoutURI = file_url_1.default(checkoutPath);
|
const checkoutURI = file_url_1.default(checkoutPath);
|
||||||
const toolNames = util.getToolNames(sarifPayload);
|
const toolNames = util.getToolNames(sarifPayload);
|
||||||
let payload;
|
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion, mode);
|
||||||
if (mode === "actions") {
|
|
||||||
payload = JSON.stringify({
|
|
||||||
commit_oid: commitOid,
|
|
||||||
ref,
|
|
||||||
analysis_key: analysisKey,
|
|
||||||
analysis_name: analysisName,
|
|
||||||
sarif: zipped_sarif,
|
|
||||||
workflow_run_id: workflowRunID,
|
|
||||||
checkout_uri: checkoutURI,
|
|
||||||
environment,
|
|
||||||
started_at: process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT],
|
|
||||||
tool_names: toolNames,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
payload = JSON.stringify({
|
|
||||||
commit_sha: commitOid,
|
|
||||||
ref,
|
|
||||||
sarif: zipped_sarif,
|
|
||||||
checkout_uri: checkoutURI,
|
|
||||||
tool_name: toolNames[0],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// Log some useful debug info about the info
|
// Log some useful debug info about the info
|
||||||
const rawUploadSizeBytes = sarifPayload.length;
|
const rawUploadSizeBytes = sarifPayload.length;
|
||||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||||
const zippedUploadSizeBytes = zipped_sarif.length;
|
const zippedUploadSizeBytes = zippedSarif.length;
|
||||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
65
lib/upload-lib.test.js
generated
65
lib/upload-lib.test.js
generated
@@ -1,7 +1,4 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
@@ -9,11 +6,17 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
result["default"] = mod;
|
result["default"] = mod;
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const uploadLib = __importStar(require("./upload-lib"));
|
const uploadLib = __importStar(require("./upload-lib"));
|
||||||
|
const util_1 = require("./util");
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
ava_1.default("validateSarifFileSchema - valid", (t) => {
|
ava_1.default("validateSarifFileSchema - valid", (t) => {
|
||||||
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
|
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
|
||||||
@@ -23,4 +26,60 @@ ava_1.default("validateSarifFileSchema - invalid", (t) => {
|
|||||||
const inputFile = `${__dirname}/../src/testdata/invalid-sarif.sarif`;
|
const inputFile = `${__dirname}/../src/testdata/invalid-sarif.sarif`;
|
||||||
t.throws(() => uploadLib.validateSarifFileSchema(inputFile, logging_1.getRunnerLogger(true)));
|
t.throws(() => uploadLib.validateSarifFileSchema(inputFile, logging_1.getRunnerLogger(true)));
|
||||||
});
|
});
|
||||||
|
ava_1.default("validate correct payload used per version", async (t) => {
|
||||||
|
const newVersions = [
|
||||||
|
{ type: util_1.GitHubVariant.DOTCOM },
|
||||||
|
{ type: util_1.GitHubVariant.GHES, version: "3.1.0" },
|
||||||
|
];
|
||||||
|
const oldVersions = [
|
||||||
|
{ type: util_1.GitHubVariant.GHES, version: "2.22.1" },
|
||||||
|
{ type: util_1.GitHubVariant.GHES, version: "3.0.0" },
|
||||||
|
];
|
||||||
|
const allVersions = newVersions.concat(oldVersions);
|
||||||
|
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||||
|
for (const version of allVersions) {
|
||||||
|
const payload = uploadLib.buildPayload("commit", "refs/heads/master", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "actions");
|
||||||
|
// Not triggered by a pull request
|
||||||
|
t.falsy(payload.base_ref);
|
||||||
|
t.falsy(payload.base_sha);
|
||||||
|
}
|
||||||
|
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
||||||
|
process.env["GITHUB_EVENT_PATH"] = `${__dirname}/../src/testdata/pull_request.json`;
|
||||||
|
for (const version of newVersions) {
|
||||||
|
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "actions");
|
||||||
|
t.deepEqual(payload.base_ref, "refs/heads/master");
|
||||||
|
t.deepEqual(payload.base_sha, "f95f852bd8fca8fcc58a9a2d6c842781e32a215e");
|
||||||
|
}
|
||||||
|
for (const version of oldVersions) {
|
||||||
|
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "actions");
|
||||||
|
// These older versions won't expect these values
|
||||||
|
t.falsy(payload.base_ref);
|
||||||
|
t.falsy(payload.base_sha);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
ava_1.default("finding SARIF files", async (t) => {
|
||||||
|
await util_1.withTmpDir(async (tmpDir) => {
|
||||||
|
// include a couple of sarif files
|
||||||
|
fs.writeFileSync(path.join(tmpDir, "a.sarif"), "");
|
||||||
|
fs.writeFileSync(path.join(tmpDir, "b.sarif"), "");
|
||||||
|
// other random files shouldn't be returned
|
||||||
|
fs.writeFileSync(path.join(tmpDir, "c.foo"), "");
|
||||||
|
// we should recursively look in subdirectories
|
||||||
|
fs.mkdirSync(path.join(tmpDir, "dir1"));
|
||||||
|
fs.writeFileSync(path.join(tmpDir, "dir1", "d.sarif"), "");
|
||||||
|
fs.mkdirSync(path.join(tmpDir, "dir1", "dir2"));
|
||||||
|
fs.writeFileSync(path.join(tmpDir, "dir1", "dir2", "e.sarif"), "");
|
||||||
|
// we should ignore symlinks
|
||||||
|
fs.mkdirSync(path.join(tmpDir, "dir3"));
|
||||||
|
fs.symlinkSync(tmpDir, path.join(tmpDir, "dir3", "symlink1"), "dir");
|
||||||
|
fs.symlinkSync(path.join(tmpDir, "a.sarif"), path.join(tmpDir, "dir3", "symlink2.sarif"), "file");
|
||||||
|
const sarifFiles = uploadLib.findSarifFilesInDir(tmpDir);
|
||||||
|
t.deepEqual(sarifFiles, [
|
||||||
|
path.join(tmpDir, "a.sarif"),
|
||||||
|
path.join(tmpDir, "b.sarif"),
|
||||||
|
path.join(tmpDir, "dir1", "d.sarif"),
|
||||||
|
path.join(tmpDir, "dir1", "dir2", "e.sarif"),
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
//# sourceMappingURL=upload-lib.test.js.map
|
//# sourceMappingURL=upload-lib.test.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,uCAA4C;AAC5C,mDAA6C;AAC7C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,MAAM,SAAS,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CACf,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC9C,MAAM,SAAS,GAAG,GAAG,SAAS,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CACZ,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,8CAAuB;AAEvB,uCAA4C;AAC5C,mDAA6C;AAC7C,wDAA0C;AAC1C,iCAAkE;AAElE,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,MAAM,SAAS,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CACf,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC9C,MAAM,SAAS,GAAG,GAAG,SAAS,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CACZ,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,2CAA2C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC5D,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE;QAC9B,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC/C,CAAC;IACF,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE;QAC/C,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC/C,CAAC;IACF,MAAM,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;IAEpD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,MAAM,CAAC;IAC1C,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,mBAAmB,EACnB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,kCAAkC;QAClC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;IAED,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,cAAc,CAAC;IAClD,OAAO,CAAC,GAAG,CACT,mBAAmB,CACpB,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACrD,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,QAAQ,EAAE,mBAAmB,CAAC,CAAC;QACnD,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,QAAQ,EAAE,0CAA0C,CAAC,CAAC;KAC3E;IAED,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,iDAAiD;QACjD,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qBAAqB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACtC,MAAM,iBAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,kCAAkC;QAClC,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QACnD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAEnD,2CAA2C;QAC3C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,EAAE,CAAC,CAAC;QAEjD,+CAA+C;QAC/C,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QACxC,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAC3D,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QAChD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAEnE,4BAA4B;QAC5B,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QACxC,EAAE,CAAC,WAAW,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,KAAK,CAAC,CAAC;QACrE,EAAE,CAAC,WAAW,CACZ,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,gBAAgB,CAAC,EAC3C,MAAM,CACP,CAAC;QAEF,MAAM,UAAU,GAAG,SAAS,CAAC,mBAAmB,CAAC,MAAM,CAAC,CAAC;QAEzD,CAAC,CAAC,SAAS,CAAC,UAAU,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;YAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;YAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC;YACpC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC;SAC7C,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
5
lib/upload-sarif-action.js
generated
5
lib/upload-sarif-action.js
generated
@@ -10,8 +10,8 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const repository_1 = require("./repository");
|
|
||||||
const upload_lib = __importStar(require("./upload-lib"));
|
const upload_lib = __importStar(require("./upload-lib"));
|
||||||
|
const util_1 = require("./util");
|
||||||
async function sendSuccessStatusReport(startedAt, uploadStats) {
|
async function sendSuccessStatusReport(startedAt, uploadStats) {
|
||||||
const statusReportBase = await actionsUtil.createStatusReportBase("upload-sarif", "success", startedAt);
|
const statusReportBase = await actionsUtil.createStatusReportBase("upload-sarif", "success", startedAt);
|
||||||
const statusReport = {
|
const statusReport = {
|
||||||
@@ -30,7 +30,8 @@ async function run() {
|
|||||||
auth: actionsUtil.getRequiredInput("token"),
|
auth: actionsUtil.getRequiredInput("token"),
|
||||||
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
const uploadStats = await upload_lib.upload(actionsUtil.getRequiredInput("sarif_file"), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), apiDetails, "actions", logging_1.getActionsLogger());
|
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
||||||
|
const uploadStats = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, logging_1.getActionsLogger());
|
||||||
await sendSuccessStatusReport(startedAt, uploadStats);
|
await sendSuccessStatusReport(startedAt, uploadStats);
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAM3C,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QAEF,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,MAAM,WAAW,CAAC,YAAY,EAAE,EAChC,MAAM,WAAW,CAAC,MAAM,EAAE,EAC1B,MAAM,WAAW,CAAC,cAAc,EAAE,EAClC,WAAW,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAClD,WAAW,CAAC,gBAAgB,EAAE,EAC9B,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,UAAU,EACV,SAAS,EACT,0BAAgB,EAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,yDAA2C;AAC3C,iCAA0C;AAM1C,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,uBAAgB,CAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,0BAAgB,EAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
136
lib/util.js
generated
136
lib/util.js
generated
@@ -10,6 +10,10 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
|
const semver = __importStar(require("semver"));
|
||||||
|
const api_client_1 = require("./api-client");
|
||||||
|
const apiCompatibility = __importStar(require("./api-compatibility.json"));
|
||||||
/**
|
/**
|
||||||
* The URL for github.com.
|
* The URL for github.com.
|
||||||
*/
|
*/
|
||||||
@@ -68,9 +72,21 @@ async function withTmpDir(body) {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
exports.withTmpDir = withTmpDir;
|
exports.withTmpDir = withTmpDir;
|
||||||
|
/**
|
||||||
|
* Gets an OS-specific amount of memory (in MB) to reserve for OS processes
|
||||||
|
* when the user doesn't explicitly specify a memory setting.
|
||||||
|
* This is a heuristic to avoid OOM errors (exit code 137 / SIGKILL)
|
||||||
|
* from committing too much of the available memory to CodeQL.
|
||||||
|
* @returns number
|
||||||
|
*/
|
||||||
|
function getSystemReservedMemoryMegaBytes() {
|
||||||
|
// Windows needs more memory for OS processes.
|
||||||
|
return 1024 * (process.platform === "win32" ? 1.5 : 1);
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
||||||
* specified, the total available memory will be used minus 256 MB.
|
* specified, the total available memory will be used minus a threshold
|
||||||
|
* reserved for the OS.
|
||||||
*
|
*
|
||||||
* @returns string
|
* @returns string
|
||||||
*/
|
*/
|
||||||
@@ -85,8 +101,8 @@ function getMemoryFlag(userInput) {
|
|||||||
else {
|
else {
|
||||||
const totalMemoryBytes = os.totalmem();
|
const totalMemoryBytes = os.totalmem();
|
||||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||||
const systemReservedMemoryMegaBytes = 256;
|
const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes();
|
||||||
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
|
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
|
||||||
}
|
}
|
||||||
return `--ram=${Math.floor(memoryToUseMegaBytes)}`;
|
return `--ram=${Math.floor(memoryToUseMegaBytes)}`;
|
||||||
}
|
}
|
||||||
@@ -190,4 +206,118 @@ function parseGithubUrl(inputUrl) {
|
|||||||
return url.toString();
|
return url.toString();
|
||||||
}
|
}
|
||||||
exports.parseGithubUrl = parseGithubUrl;
|
exports.parseGithubUrl = parseGithubUrl;
|
||||||
|
const GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
||||||
|
const CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR = "CODEQL_ACTION_WARNED_ABOUT_VERSION";
|
||||||
|
let hasBeenWarnedAboutVersion = false;
|
||||||
|
var GitHubVariant;
|
||||||
|
(function (GitHubVariant) {
|
||||||
|
GitHubVariant[GitHubVariant["DOTCOM"] = 0] = "DOTCOM";
|
||||||
|
GitHubVariant[GitHubVariant["GHES"] = 1] = "GHES";
|
||||||
|
GitHubVariant[GitHubVariant["GHAE"] = 2] = "GHAE";
|
||||||
|
})(GitHubVariant = exports.GitHubVariant || (exports.GitHubVariant = {}));
|
||||||
|
async function getGitHubVersion(apiDetails) {
|
||||||
|
// We can avoid making an API request in the standard dotcom case
|
||||||
|
if (parseGithubUrl(apiDetails.url) === exports.GITHUB_DOTCOM_URL) {
|
||||||
|
return { type: GitHubVariant.DOTCOM };
|
||||||
|
}
|
||||||
|
// Doesn't strictly have to be the meta endpoint as we're only
|
||||||
|
// using the response headers which are available on every request.
|
||||||
|
const apiClient = api_client_1.getApiClient(apiDetails);
|
||||||
|
const response = await apiClient.meta.get();
|
||||||
|
// This happens on dotcom, although we expect to have already returned in that
|
||||||
|
// case. This can also serve as a fallback in cases we haven't foreseen.
|
||||||
|
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === undefined) {
|
||||||
|
return { type: GitHubVariant.DOTCOM };
|
||||||
|
}
|
||||||
|
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "GitHub AE") {
|
||||||
|
return { type: GitHubVariant.GHAE };
|
||||||
|
}
|
||||||
|
const version = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER];
|
||||||
|
return { type: GitHubVariant.GHES, version };
|
||||||
|
}
|
||||||
|
exports.getGitHubVersion = getGitHubVersion;
|
||||||
|
function checkGitHubVersionInRange(version, mode, logger) {
|
||||||
|
if (hasBeenWarnedAboutVersion || version.type !== GitHubVariant.GHES) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const disallowedAPIVersionReason = apiVersionInRange(version.version, apiCompatibility.minimumVersion, apiCompatibility.maximumVersion);
|
||||||
|
const toolName = mode === "actions" ? "Action" : "Runner";
|
||||||
|
if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_OLD) {
|
||||||
|
logger.warning(`The CodeQL ${toolName} version you are using is too old to be compatible with GitHub Enterprise ${version.version}. If you experience issues, please upgrade to a more recent version of the CodeQL ${toolName}.`);
|
||||||
|
}
|
||||||
|
if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_NEW) {
|
||||||
|
logger.warning(`GitHub Enterprise ${version.version} is too old to be compatible with this version of the CodeQL ${toolName}. If you experience issues, please upgrade to a more recent version of GitHub Enterprise or use an older version of the CodeQL ${toolName}.`);
|
||||||
|
}
|
||||||
|
hasBeenWarnedAboutVersion = true;
|
||||||
|
if (mode === "actions") {
|
||||||
|
core.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.checkGitHubVersionInRange = checkGitHubVersionInRange;
|
||||||
|
var DisallowedAPIVersionReason;
|
||||||
|
(function (DisallowedAPIVersionReason) {
|
||||||
|
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
|
||||||
|
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
|
||||||
|
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
|
||||||
|
function apiVersionInRange(version, minimumVersion, maximumVersion) {
|
||||||
|
if (!semver.satisfies(version, `>=${minimumVersion}`)) {
|
||||||
|
return DisallowedAPIVersionReason.ACTION_TOO_NEW;
|
||||||
|
}
|
||||||
|
if (!semver.satisfies(version, `<=${maximumVersion}`)) {
|
||||||
|
return DisallowedAPIVersionReason.ACTION_TOO_OLD;
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
exports.apiVersionInRange = apiVersionInRange;
|
||||||
|
/**
|
||||||
|
* Retrieves the github auth token for use with the runner. There are
|
||||||
|
* three possible locations for the token:
|
||||||
|
*
|
||||||
|
* 1. from the cli (considered insecure)
|
||||||
|
* 2. from stdin
|
||||||
|
* 3. from the GITHUB_TOKEN environment variable
|
||||||
|
*
|
||||||
|
* If both 1 & 2 are specified, then an error is thrown.
|
||||||
|
* If 1 & 3 or 2 & 3 are specified, then the environment variable is ignored.
|
||||||
|
*
|
||||||
|
* @param githubAuth a github app token or PAT
|
||||||
|
* @param fromStdIn read the github app token or PAT from stdin up to, but excluding the first whitespace
|
||||||
|
* @param readable the readable stream to use for getting the token (defaults to stdin)
|
||||||
|
*
|
||||||
|
* @return a promise resolving to the auth token.
|
||||||
|
*/
|
||||||
|
async function getGitHubAuth(logger, githubAuth, fromStdIn, readable = process.stdin) {
|
||||||
|
if (githubAuth && fromStdIn) {
|
||||||
|
throw new Error("Cannot specify both `--github-auth` and `--github-auth-stdin`. Please use `--github-auth-stdin`, which is more secure.");
|
||||||
|
}
|
||||||
|
if (githubAuth) {
|
||||||
|
logger.warning("Using `--github-auth` via the CLI is insecure. Use `--github-auth-stdin` instead.");
|
||||||
|
return githubAuth;
|
||||||
|
}
|
||||||
|
if (fromStdIn) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
let token = "";
|
||||||
|
readable.on("data", (data) => {
|
||||||
|
token += data.toString("utf8");
|
||||||
|
});
|
||||||
|
readable.on("end", () => {
|
||||||
|
token = token.split(/\s+/)[0].trim();
|
||||||
|
if (token) {
|
||||||
|
resolve(token);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
reject(new Error("Standard input is empty"));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
readable.on("error", (err) => {
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (process.env.GITHUB_TOKEN) {
|
||||||
|
return process.env.GITHUB_TOKEN;
|
||||||
|
}
|
||||||
|
throw new Error("No GitHub authentication token was specified. Please provide a token via the GITHUB_TOKEN environment variable, or by adding the `--github-auth-stdin` flag and passing the token via standard input.");
|
||||||
|
}
|
||||||
|
exports.getGitHubAuth = getGitHubAuth;
|
||||||
//# sourceMappingURL=util.js.map
|
//# sourceMappingURL=util.js.map
|
||||||
File diff suppressed because one or more lines are too long
87
lib/util.test.js
generated
87
lib/util.test.js
generated
@@ -12,7 +12,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
|
const stream = __importStar(require("stream"));
|
||||||
|
const github = __importStar(require("@actions/github"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const sinon_1 = __importDefault(require("sinon"));
|
||||||
|
const api = __importStar(require("./api-client"));
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
@@ -24,9 +28,10 @@ ava_1.default("getToolNames", (t) => {
|
|||||||
});
|
});
|
||||||
ava_1.default("getMemoryFlag() should return the correct --ram flag", (t) => {
|
ava_1.default("getMemoryFlag() should return the correct --ram flag", (t) => {
|
||||||
const totalMem = Math.floor(os.totalmem() / (1024 * 1024));
|
const totalMem = Math.floor(os.totalmem() / (1024 * 1024));
|
||||||
|
const expectedThreshold = process.platform === "win32" ? 1536 : 1024;
|
||||||
const tests = [
|
const tests = [
|
||||||
[undefined, `--ram=${totalMem - 256}`],
|
[undefined, `--ram=${totalMem - expectedThreshold}`],
|
||||||
["", `--ram=${totalMem - 256}`],
|
["", `--ram=${totalMem - expectedThreshold}`],
|
||||||
["512", "--ram=512"],
|
["512", "--ram=512"],
|
||||||
];
|
];
|
||||||
for (const [input, expectedFlag] of tests) {
|
for (const [input, expectedFlag] of tests) {
|
||||||
@@ -121,4 +126,82 @@ ava_1.default("parseGithubUrl", (t) => {
|
|||||||
message: '"http:///::::433" is not a valid URL',
|
message: '"http:///::::433" is not a valid URL',
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
ava_1.default("allowed API versions", async (t) => {
|
||||||
|
t.is(util.apiVersionInRange("1.33.0", "1.33", "2.0"), undefined);
|
||||||
|
t.is(util.apiVersionInRange("1.33.1", "1.33", "2.0"), undefined);
|
||||||
|
t.is(util.apiVersionInRange("1.34.0", "1.33", "2.0"), undefined);
|
||||||
|
t.is(util.apiVersionInRange("2.0.0", "1.33", "2.0"), undefined);
|
||||||
|
t.is(util.apiVersionInRange("2.0.1", "1.33", "2.0"), undefined);
|
||||||
|
t.is(util.apiVersionInRange("1.32.0", "1.33", "2.0"), util.DisallowedAPIVersionReason.ACTION_TOO_NEW);
|
||||||
|
t.is(util.apiVersionInRange("2.1.0", "1.33", "2.0"), util.DisallowedAPIVersionReason.ACTION_TOO_OLD);
|
||||||
|
});
|
||||||
|
function mockGetMetaVersionHeader(versionHeader) {
|
||||||
|
// Passing an auth token is required, so we just use a dummy value
|
||||||
|
const client = github.getOctokit("123");
|
||||||
|
const response = {
|
||||||
|
headers: {
|
||||||
|
"x-github-enterprise-version": versionHeader,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const spyGetContents = sinon_1.default
|
||||||
|
.stub(client.meta, "get")
|
||||||
|
.resolves(response);
|
||||||
|
sinon_1.default.stub(api, "getApiClient").value(() => client);
|
||||||
|
return spyGetContents;
|
||||||
|
}
|
||||||
|
ava_1.default("getGitHubVersion", async (t) => {
|
||||||
|
const v = await util.getGitHubVersion({
|
||||||
|
auth: "",
|
||||||
|
url: "https://github.com",
|
||||||
|
});
|
||||||
|
t.deepEqual(util.GitHubVariant.DOTCOM, v.type);
|
||||||
|
mockGetMetaVersionHeader("2.0");
|
||||||
|
const v2 = await util.getGitHubVersion({
|
||||||
|
auth: "",
|
||||||
|
url: "https://ghe.example.com",
|
||||||
|
});
|
||||||
|
t.deepEqual({ type: util.GitHubVariant.GHES, version: "2.0" }, v2);
|
||||||
|
mockGetMetaVersionHeader("GitHub AE");
|
||||||
|
const ghae = await util.getGitHubVersion({
|
||||||
|
auth: "",
|
||||||
|
url: "https://example.githubenterprise.com",
|
||||||
|
});
|
||||||
|
t.deepEqual({ type: util.GitHubVariant.GHAE }, ghae);
|
||||||
|
mockGetMetaVersionHeader(undefined);
|
||||||
|
const v3 = await util.getGitHubVersion({
|
||||||
|
auth: "",
|
||||||
|
url: "https://ghe.example.com",
|
||||||
|
});
|
||||||
|
t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3);
|
||||||
|
});
|
||||||
|
ava_1.default("getGitHubAuth", async (t) => {
|
||||||
|
const msgs = [];
|
||||||
|
const mockLogger = {
|
||||||
|
warning: (msg) => msgs.push(msg),
|
||||||
|
};
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||||
|
t.throwsAsync(async () => util.getGitHubAuth(mockLogger, "abc", true));
|
||||||
|
process.env.GITHUB_TOKEN = "123";
|
||||||
|
t.is("123", await util.getGitHubAuth(mockLogger, undefined, undefined));
|
||||||
|
t.is(msgs.length, 0);
|
||||||
|
t.is("abc", await util.getGitHubAuth(mockLogger, "abc", undefined));
|
||||||
|
t.is(msgs.length, 1); // warning expected
|
||||||
|
msgs.length = 0;
|
||||||
|
await mockStdInForAuth(t, mockLogger, "def", "def");
|
||||||
|
await mockStdInForAuth(t, mockLogger, "def", "", "def");
|
||||||
|
await mockStdInForAuth(t, mockLogger, "def", "def\n some extra garbage", "ghi");
|
||||||
|
await mockStdInForAuth(t, mockLogger, "defghi", "def", "ghi\n123");
|
||||||
|
await mockStdInForAuthExpectError(t, mockLogger, "");
|
||||||
|
await mockStdInForAuthExpectError(t, mockLogger, "", " ", "abc");
|
||||||
|
await mockStdInForAuthExpectError(t, mockLogger, " def\n some extra garbage", "ghi");
|
||||||
|
t.is(msgs.length, 0);
|
||||||
|
});
|
||||||
|
async function mockStdInForAuth(t, mockLogger, expected, ...text) {
|
||||||
|
const stdin = stream.Readable.from(text);
|
||||||
|
t.is(expected, await util.getGitHubAuth(mockLogger, undefined, true, stdin));
|
||||||
|
}
|
||||||
|
async function mockStdInForAuthExpectError(t, mockLogger, ...text) {
|
||||||
|
const stdin = stream.Readable.from(text);
|
||||||
|
await t.throwsAsync(async () => util.getGitHubAuth(mockLogger, undefined, true, stdin));
|
||||||
|
}
|
||||||
//# sourceMappingURL=util.test.js.map
|
//# sourceMappingURL=util.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
5555
node_modules/.package-lock.json
generated
vendored
Normal file
5555
node_modules/.package-lock.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
94
node_modules/decode-uri-component/index.js
generated
vendored
Normal file
94
node_modules/decode-uri-component/index.js
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
'use strict';
|
||||||
|
var token = '%[a-f0-9]{2}';
|
||||||
|
var singleMatcher = new RegExp(token, 'gi');
|
||||||
|
var multiMatcher = new RegExp('(' + token + ')+', 'gi');
|
||||||
|
|
||||||
|
function decodeComponents(components, split) {
|
||||||
|
try {
|
||||||
|
// Try to decode the entire string first
|
||||||
|
return decodeURIComponent(components.join(''));
|
||||||
|
} catch (err) {
|
||||||
|
// Do nothing
|
||||||
|
}
|
||||||
|
|
||||||
|
if (components.length === 1) {
|
||||||
|
return components;
|
||||||
|
}
|
||||||
|
|
||||||
|
split = split || 1;
|
||||||
|
|
||||||
|
// Split the array in 2 parts
|
||||||
|
var left = components.slice(0, split);
|
||||||
|
var right = components.slice(split);
|
||||||
|
|
||||||
|
return Array.prototype.concat.call([], decodeComponents(left), decodeComponents(right));
|
||||||
|
}
|
||||||
|
|
||||||
|
function decode(input) {
|
||||||
|
try {
|
||||||
|
return decodeURIComponent(input);
|
||||||
|
} catch (err) {
|
||||||
|
var tokens = input.match(singleMatcher);
|
||||||
|
|
||||||
|
for (var i = 1; i < tokens.length; i++) {
|
||||||
|
input = decodeComponents(tokens, i).join('');
|
||||||
|
|
||||||
|
tokens = input.match(singleMatcher);
|
||||||
|
}
|
||||||
|
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function customDecodeURIComponent(input) {
|
||||||
|
// Keep track of all the replacements and prefill the map with the `BOM`
|
||||||
|
var replaceMap = {
|
||||||
|
'%FE%FF': '\uFFFD\uFFFD',
|
||||||
|
'%FF%FE': '\uFFFD\uFFFD'
|
||||||
|
};
|
||||||
|
|
||||||
|
var match = multiMatcher.exec(input);
|
||||||
|
while (match) {
|
||||||
|
try {
|
||||||
|
// Decode as big chunks as possible
|
||||||
|
replaceMap[match[0]] = decodeURIComponent(match[0]);
|
||||||
|
} catch (err) {
|
||||||
|
var result = decode(match[0]);
|
||||||
|
|
||||||
|
if (result !== match[0]) {
|
||||||
|
replaceMap[match[0]] = result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match = multiMatcher.exec(input);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add `%C2` at the end of the map to make sure it does not replace the combinator before everything else
|
||||||
|
replaceMap['%C2'] = '\uFFFD';
|
||||||
|
|
||||||
|
var entries = Object.keys(replaceMap);
|
||||||
|
|
||||||
|
for (var i = 0; i < entries.length; i++) {
|
||||||
|
// Replace all decoded components
|
||||||
|
var key = entries[i];
|
||||||
|
input = input.replace(new RegExp(key, 'g'), replaceMap[key]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = function (encodedURI) {
|
||||||
|
if (typeof encodedURI !== 'string') {
|
||||||
|
throw new TypeError('Expected `encodedURI` to be of type `string`, got `' + typeof encodedURI + '`');
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
encodedURI = encodedURI.replace(/\+/g, ' ');
|
||||||
|
|
||||||
|
// Try the built in decoder first
|
||||||
|
return decodeURIComponent(encodedURI);
|
||||||
|
} catch (err) {
|
||||||
|
// Fallback to a more advanced decoder
|
||||||
|
return customDecodeURIComponent(encodedURI);
|
||||||
|
}
|
||||||
|
};
|
||||||
21
node_modules/decode-uri-component/license
generated
vendored
Normal file
21
node_modules/decode-uri-component/license
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) Sam Verschueren <sam.verschueren@gmail.com> (github.com/SamVerschueren)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
37
node_modules/decode-uri-component/package.json
generated
vendored
Normal file
37
node_modules/decode-uri-component/package.json
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
"name": "decode-uri-component",
|
||||||
|
"version": "0.2.0",
|
||||||
|
"description": "A better decodeURIComponent",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "SamVerschueren/decode-uri-component",
|
||||||
|
"author": {
|
||||||
|
"name": "Sam Verschueren",
|
||||||
|
"email": "sam.verschueren@gmail.com",
|
||||||
|
"url": "github.com/SamVerschueren"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "xo && nyc ava",
|
||||||
|
"coveralls": "nyc report --reporter=text-lcov | coveralls"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"keywords": [
|
||||||
|
"decode",
|
||||||
|
"uri",
|
||||||
|
"component",
|
||||||
|
"decodeuricomponent",
|
||||||
|
"components",
|
||||||
|
"decoder",
|
||||||
|
"url"
|
||||||
|
],
|
||||||
|
"devDependencies": {
|
||||||
|
"ava": "^0.17.0",
|
||||||
|
"coveralls": "^2.13.1",
|
||||||
|
"nyc": "^10.3.2",
|
||||||
|
"xo": "^0.16.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
70
node_modules/decode-uri-component/readme.md
generated
vendored
Normal file
70
node_modules/decode-uri-component/readme.md
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
# decode-uri-component
|
||||||
|
|
||||||
|
[](https://travis-ci.org/SamVerschueren/decode-uri-component) [](https://coveralls.io/github/SamVerschueren/decode-uri-component?branch=master)
|
||||||
|
|
||||||
|
> A better [decodeURIComponent](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent)
|
||||||
|
|
||||||
|
|
||||||
|
## Why?
|
||||||
|
|
||||||
|
- Decodes `+` to a space.
|
||||||
|
- Converts the [BOM](https://en.wikipedia.org/wiki/Byte_order_mark) to a [replacement character](https://en.wikipedia.org/wiki/Specials_(Unicode_block)#Replacement_character) `<60>`.
|
||||||
|
- Does not throw with invalid encoded input.
|
||||||
|
- Decodes as much of the string as possible.
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install --save decode-uri-component
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
const decodeUriComponent = require('decode-uri-component');
|
||||||
|
|
||||||
|
decodeUriComponent('%25');
|
||||||
|
//=> '%'
|
||||||
|
|
||||||
|
decodeUriComponent('%');
|
||||||
|
//=> '%'
|
||||||
|
|
||||||
|
decodeUriComponent('st%C3%A5le');
|
||||||
|
//=> 'ståle'
|
||||||
|
|
||||||
|
decodeUriComponent('%st%C3%A5le%');
|
||||||
|
//=> '%ståle%'
|
||||||
|
|
||||||
|
decodeUriComponent('%%7Bst%C3%A5le%7D%');
|
||||||
|
//=> '%{ståle}%'
|
||||||
|
|
||||||
|
decodeUriComponent('%7B%ab%%7C%de%%7D');
|
||||||
|
//=> '{%ab%|%de%}'
|
||||||
|
|
||||||
|
decodeUriComponent('%FE%FF');
|
||||||
|
//=> '\uFFFD\uFFFD'
|
||||||
|
|
||||||
|
decodeUriComponent('%C2');
|
||||||
|
//=> '\uFFFD'
|
||||||
|
|
||||||
|
decodeUriComponent('%C2%B5');
|
||||||
|
//=> 'µ'
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
### decodeUriComponent(encodedURI)
|
||||||
|
|
||||||
|
#### encodedURI
|
||||||
|
|
||||||
|
Type: `string`
|
||||||
|
|
||||||
|
An encoded component of a Uniform Resource Identifier.
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT © [Sam Verschueren](https://github.com/SamVerschueren)
|
||||||
17
node_modules/filter-obj/index.js
generated
vendored
Normal file
17
node_modules/filter-obj/index.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
'use strict';
|
||||||
|
module.exports = function (obj, predicate) {
|
||||||
|
var ret = {};
|
||||||
|
var keys = Object.keys(obj);
|
||||||
|
var isArr = Array.isArray(predicate);
|
||||||
|
|
||||||
|
for (var i = 0; i < keys.length; i++) {
|
||||||
|
var key = keys[i];
|
||||||
|
var val = obj[key];
|
||||||
|
|
||||||
|
if (isArr ? predicate.indexOf(key) !== -1 : predicate(key, val, obj)) {
|
||||||
|
ret[key] = val;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
};
|
||||||
21
node_modules/filter-obj/license
generated
vendored
Normal file
21
node_modules/filter-obj/license
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
37
node_modules/filter-obj/package.json
generated
vendored
Normal file
37
node_modules/filter-obj/package.json
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
"name": "filter-obj",
|
||||||
|
"version": "1.1.0",
|
||||||
|
"description": "Filter object keys and values into a new object",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "sindresorhus/filter-obj",
|
||||||
|
"author": {
|
||||||
|
"name": "Sindre Sorhus",
|
||||||
|
"email": "sindresorhus@gmail.com",
|
||||||
|
"url": "sindresorhus.com"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "xo && node test.js"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"keywords": [
|
||||||
|
"filter",
|
||||||
|
"obj",
|
||||||
|
"object",
|
||||||
|
"key",
|
||||||
|
"keys",
|
||||||
|
"value",
|
||||||
|
"values",
|
||||||
|
"val",
|
||||||
|
"iterate",
|
||||||
|
"iterator"
|
||||||
|
],
|
||||||
|
"devDependencies": {
|
||||||
|
"ava": "0.0.4",
|
||||||
|
"xo": "*"
|
||||||
|
}
|
||||||
|
}
|
||||||
41
node_modules/filter-obj/readme.md
generated
vendored
Normal file
41
node_modules/filter-obj/readme.md
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# filter-obj [](https://travis-ci.org/sindresorhus/filter-obj)
|
||||||
|
|
||||||
|
> Filter object keys and values into a new object
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install --save filter-obj
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
var filterObj = require('filter-obj');
|
||||||
|
|
||||||
|
var obj = {
|
||||||
|
foo: true,
|
||||||
|
bar: false
|
||||||
|
};
|
||||||
|
|
||||||
|
var newObject = filterObj(obj, function (key, value, object) {
|
||||||
|
return value === true;
|
||||||
|
});
|
||||||
|
//=> {foo: true}
|
||||||
|
|
||||||
|
var newObject2 = filterObj(obj, ['bar']);
|
||||||
|
//=> {bar: true}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Related
|
||||||
|
|
||||||
|
- [map-obj](https://github.com/sindresorhus/map-obj) - Map object keys and values into a new object
|
||||||
|
- [object-assign](https://github.com/sindresorhus/object-assign) - Copy enumerable own properties from one or more source objects to a target object
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
||||||
80
node_modules/ini/ini.js
generated
vendored
80
node_modules/ini/ini.js
generated
vendored
@@ -15,7 +15,7 @@ function encode (obj, opt) {
|
|||||||
if (typeof opt === 'string') {
|
if (typeof opt === 'string') {
|
||||||
opt = {
|
opt = {
|
||||||
section: opt,
|
section: opt,
|
||||||
whitespace: false
|
whitespace: false,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
opt = opt || {}
|
opt = opt || {}
|
||||||
@@ -30,27 +30,25 @@ function encode (obj, opt) {
|
|||||||
val.forEach(function (item) {
|
val.forEach(function (item) {
|
||||||
out += safe(k + '[]') + separator + safe(item) + '\n'
|
out += safe(k + '[]') + separator + safe(item) + '\n'
|
||||||
})
|
})
|
||||||
} else if (val && typeof val === 'object') {
|
} else if (val && typeof val === 'object')
|
||||||
children.push(k)
|
children.push(k)
|
||||||
} else {
|
else
|
||||||
out += safe(k) + separator + safe(val) + eol
|
out += safe(k) + separator + safe(val) + eol
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if (opt.section && out.length) {
|
if (opt.section && out.length)
|
||||||
out = '[' + safe(opt.section) + ']' + eol + out
|
out = '[' + safe(opt.section) + ']' + eol + out
|
||||||
}
|
|
||||||
|
|
||||||
children.forEach(function (k, _, __) {
|
children.forEach(function (k, _, __) {
|
||||||
var nk = dotSplit(k).join('\\.')
|
var nk = dotSplit(k).join('\\.')
|
||||||
var section = (opt.section ? opt.section + '.' : '') + nk
|
var section = (opt.section ? opt.section + '.' : '') + nk
|
||||||
var child = encode(obj[k], {
|
var child = encode(obj[k], {
|
||||||
section: section,
|
section: section,
|
||||||
whitespace: opt.whitespace
|
whitespace: opt.whitespace,
|
||||||
})
|
})
|
||||||
if (out.length && child.length) {
|
if (out.length && child.length)
|
||||||
out += eol
|
out += eol
|
||||||
}
|
|
||||||
out += child
|
out += child
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -75,15 +73,25 @@ function decode (str) {
|
|||||||
var lines = str.split(/[\r\n]+/g)
|
var lines = str.split(/[\r\n]+/g)
|
||||||
|
|
||||||
lines.forEach(function (line, _, __) {
|
lines.forEach(function (line, _, __) {
|
||||||
if (!line || line.match(/^\s*[;#]/)) return
|
if (!line || line.match(/^\s*[;#]/))
|
||||||
|
return
|
||||||
var match = line.match(re)
|
var match = line.match(re)
|
||||||
if (!match) return
|
if (!match)
|
||||||
|
return
|
||||||
if (match[1] !== undefined) {
|
if (match[1] !== undefined) {
|
||||||
section = unsafe(match[1])
|
section = unsafe(match[1])
|
||||||
|
if (section === '__proto__') {
|
||||||
|
// not allowed
|
||||||
|
// keep parsing the section, but don't attach it.
|
||||||
|
p = {}
|
||||||
|
return
|
||||||
|
}
|
||||||
p = out[section] = out[section] || {}
|
p = out[section] = out[section] || {}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
var key = unsafe(match[2])
|
var key = unsafe(match[2])
|
||||||
|
if (key === '__proto__')
|
||||||
|
return
|
||||||
var value = match[3] ? unsafe(match[4]) : true
|
var value = match[3] ? unsafe(match[4]) : true
|
||||||
switch (value) {
|
switch (value) {
|
||||||
case 'true':
|
case 'true':
|
||||||
@@ -94,20 +102,20 @@ function decode (str) {
|
|||||||
// Convert keys with '[]' suffix to an array
|
// Convert keys with '[]' suffix to an array
|
||||||
if (key.length > 2 && key.slice(-2) === '[]') {
|
if (key.length > 2 && key.slice(-2) === '[]') {
|
||||||
key = key.substring(0, key.length - 2)
|
key = key.substring(0, key.length - 2)
|
||||||
if (!p[key]) {
|
if (key === '__proto__')
|
||||||
|
return
|
||||||
|
if (!p[key])
|
||||||
p[key] = []
|
p[key] = []
|
||||||
} else if (!Array.isArray(p[key])) {
|
else if (!Array.isArray(p[key]))
|
||||||
p[key] = [p[key]]
|
p[key] = [p[key]]
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// safeguard against resetting a previously defined
|
// safeguard against resetting a previously defined
|
||||||
// array by accidentally forgetting the brackets
|
// array by accidentally forgetting the brackets
|
||||||
if (Array.isArray(p[key])) {
|
if (Array.isArray(p[key]))
|
||||||
p[key].push(value)
|
p[key].push(value)
|
||||||
} else {
|
else
|
||||||
p[key] = value
|
p[key] = value
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
// {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}}
|
// {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}}
|
||||||
@@ -115,9 +123,9 @@ function decode (str) {
|
|||||||
Object.keys(out).filter(function (k, _, __) {
|
Object.keys(out).filter(function (k, _, __) {
|
||||||
if (!out[k] ||
|
if (!out[k] ||
|
||||||
typeof out[k] !== 'object' ||
|
typeof out[k] !== 'object' ||
|
||||||
Array.isArray(out[k])) {
|
Array.isArray(out[k]))
|
||||||
return false
|
return false
|
||||||
}
|
|
||||||
// see if the parent section is also an object.
|
// see if the parent section is also an object.
|
||||||
// if so, add it to that, and mark this one for deletion
|
// if so, add it to that, and mark this one for deletion
|
||||||
var parts = dotSplit(k)
|
var parts = dotSplit(k)
|
||||||
@@ -125,12 +133,15 @@ function decode (str) {
|
|||||||
var l = parts.pop()
|
var l = parts.pop()
|
||||||
var nl = l.replace(/\\\./g, '.')
|
var nl = l.replace(/\\\./g, '.')
|
||||||
parts.forEach(function (part, _, __) {
|
parts.forEach(function (part, _, __) {
|
||||||
if (!p[part] || typeof p[part] !== 'object') p[part] = {}
|
if (part === '__proto__')
|
||||||
|
return
|
||||||
|
if (!p[part] || typeof p[part] !== 'object')
|
||||||
|
p[part] = {}
|
||||||
p = p[part]
|
p = p[part]
|
||||||
})
|
})
|
||||||
if (p === out && nl === l) {
|
if (p === out && nl === l)
|
||||||
return false
|
return false
|
||||||
}
|
|
||||||
p[nl] = out[k]
|
p[nl] = out[k]
|
||||||
return true
|
return true
|
||||||
}).forEach(function (del, _, __) {
|
}).forEach(function (del, _, __) {
|
||||||
@@ -160,10 +171,12 @@ function unsafe (val, doUnesc) {
|
|||||||
val = (val || '').trim()
|
val = (val || '').trim()
|
||||||
if (isQuoted(val)) {
|
if (isQuoted(val)) {
|
||||||
// remove the single quotes before calling JSON.parse
|
// remove the single quotes before calling JSON.parse
|
||||||
if (val.charAt(0) === "'") {
|
if (val.charAt(0) === "'")
|
||||||
val = val.substr(1, val.length - 2)
|
val = val.substr(1, val.length - 2)
|
||||||
}
|
|
||||||
try { val = JSON.parse(val) } catch (_) {}
|
try {
|
||||||
|
val = JSON.parse(val)
|
||||||
|
} catch (_) {}
|
||||||
} else {
|
} else {
|
||||||
// walk the val to find the first not-escaped ; character
|
// walk the val to find the first not-escaped ; character
|
||||||
var esc = false
|
var esc = false
|
||||||
@@ -171,23 +184,22 @@ function unsafe (val, doUnesc) {
|
|||||||
for (var i = 0, l = val.length; i < l; i++) {
|
for (var i = 0, l = val.length; i < l; i++) {
|
||||||
var c = val.charAt(i)
|
var c = val.charAt(i)
|
||||||
if (esc) {
|
if (esc) {
|
||||||
if ('\\;#'.indexOf(c) !== -1) {
|
if ('\\;#'.indexOf(c) !== -1)
|
||||||
unesc += c
|
unesc += c
|
||||||
} else {
|
else
|
||||||
unesc += '\\' + c
|
unesc += '\\' + c
|
||||||
}
|
|
||||||
esc = false
|
esc = false
|
||||||
} else if (';#'.indexOf(c) !== -1) {
|
} else if (';#'.indexOf(c) !== -1)
|
||||||
break
|
break
|
||||||
} else if (c === '\\') {
|
else if (c === '\\')
|
||||||
esc = true
|
esc = true
|
||||||
} else {
|
else
|
||||||
unesc += c
|
unesc += c
|
||||||
}
|
}
|
||||||
}
|
if (esc)
|
||||||
if (esc) {
|
|
||||||
unesc += '\\'
|
unesc += '\\'
|
||||||
}
|
|
||||||
return unesc.trim()
|
return unesc.trim()
|
||||||
}
|
}
|
||||||
return val
|
return val
|
||||||
|
|||||||
23
node_modules/ini/package.json
generated
vendored
23
node_modules/ini/package.json
generated
vendored
@@ -2,26 +2,29 @@
|
|||||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||||
"name": "ini",
|
"name": "ini",
|
||||||
"description": "An ini encoder/decoder for node",
|
"description": "An ini encoder/decoder for node",
|
||||||
"version": "1.3.5",
|
"version": "1.3.8",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git://github.com/isaacs/ini.git"
|
"url": "git://github.com/isaacs/ini.git"
|
||||||
},
|
},
|
||||||
"main": "ini.js",
|
"main": "ini.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"pretest": "standard ini.js",
|
"eslint": "eslint",
|
||||||
"test": "tap test/*.js --100 -J",
|
"lint": "npm run eslint -- ini.js test/*.js",
|
||||||
|
"lintfix": "npm run lint -- --fix",
|
||||||
|
"test": "tap",
|
||||||
|
"posttest": "npm run lint",
|
||||||
"preversion": "npm test",
|
"preversion": "npm test",
|
||||||
"postversion": "npm publish",
|
"postversion": "npm publish",
|
||||||
"postpublish": "git push origin --all; git push origin --tags"
|
"prepublishOnly": "git push origin --follow-tags"
|
||||||
},
|
},
|
||||||
"engines": {
|
|
||||||
"node": "*"
|
|
||||||
},
|
|
||||||
"dependencies": {},
|
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"standard": "^10.0.3",
|
"eslint": "^7.9.0",
|
||||||
"tap": "^10.7.3 || 11"
|
"eslint-plugin-import": "^2.22.0",
|
||||||
|
"eslint-plugin-node": "^11.1.0",
|
||||||
|
"eslint-plugin-promise": "^4.2.1",
|
||||||
|
"eslint-plugin-standard": "^4.0.1",
|
||||||
|
"tap": "14"
|
||||||
},
|
},
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"files": [
|
"files": [
|
||||||
|
|||||||
489
node_modules/query-string/index.d.ts
generated
vendored
Normal file
489
node_modules/query-string/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,489 @@
|
|||||||
|
export interface ParseOptions {
|
||||||
|
/**
|
||||||
|
Decode the keys and values. URI components are decoded with [`decode-uri-component`](https://github.com/SamVerschueren/decode-uri-component).
|
||||||
|
|
||||||
|
@default true
|
||||||
|
*/
|
||||||
|
readonly decode?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
@default 'none'
|
||||||
|
|
||||||
|
- `bracket`: Parse arrays with bracket representation:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo[]=1&foo[]=2&foo[]=3', {arrayFormat: 'bracket'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `index`: Parse arrays with index representation:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo[0]=1&foo[1]=2&foo[3]=3', {arrayFormat: 'index'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `comma`: Parse arrays with elements separated by comma:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1,2,3', {arrayFormat: 'comma'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `separator`: Parse arrays with elements separated by a custom character:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1|2|3', {arrayFormat: 'separator', arrayFormatSeparator: '|'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `none`: Parse arrays with elements using duplicate keys:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1&foo=2&foo=3');
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly arrayFormat?: 'bracket' | 'index' | 'comma' | 'separator' | 'none';
|
||||||
|
|
||||||
|
/**
|
||||||
|
The character used to separate array elements when using `{arrayFormat: 'separator'}`.
|
||||||
|
|
||||||
|
@default ,
|
||||||
|
*/
|
||||||
|
readonly arrayFormatSeparator?: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Supports both `Function` as a custom sorting function or `false` to disable sorting.
|
||||||
|
|
||||||
|
If omitted, keys are sorted using `Array#sort`, which means, converting them to strings and comparing strings in Unicode code point order.
|
||||||
|
|
||||||
|
@default true
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
const order = ['c', 'a', 'b'];
|
||||||
|
|
||||||
|
queryString.parse('?a=one&b=two&c=three', {
|
||||||
|
sort: (itemLeft, itemRight) => order.indexOf(itemLeft) - order.indexOf(itemRight)
|
||||||
|
});
|
||||||
|
//=> {c: 'three', a: 'one', b: 'two'}
|
||||||
|
```
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('?a=one&c=three&b=two', {sort: false});
|
||||||
|
//=> {a: 'one', c: 'three', b: 'two'}
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly sort?: ((itemLeft: string, itemRight: string) => number) | false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Parse the value as a number type instead of string type if it's a number.
|
||||||
|
|
||||||
|
@default false
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1', {parseNumbers: true});
|
||||||
|
//=> {foo: 1}
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly parseNumbers?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Parse the value as a boolean type instead of string type if it's a boolean.
|
||||||
|
|
||||||
|
@default false
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=true', {parseBooleans: true});
|
||||||
|
//=> {foo: true}
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly parseBooleans?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Parse the fragment identifier from the URL and add it to result object.
|
||||||
|
|
||||||
|
@default false
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parseUrl('https://foo.bar?foo=bar#xyz', {parseFragmentIdentifier: true});
|
||||||
|
//=> {url: 'https://foo.bar', query: {foo: 'bar'}, fragmentIdentifier: 'xyz'}
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly parseFragmentIdentifier?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ParsedQuery<T = string> {
|
||||||
|
[key: string]: T | T[] | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
Parse a query string into an object. Leading `?` or `#` are ignored, so you can pass `location.search` or `location.hash` directly.
|
||||||
|
|
||||||
|
The returned object is created with [`Object.create(null)`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/create) and thus does not have a `prototype`.
|
||||||
|
|
||||||
|
@param query - The query string to parse.
|
||||||
|
*/
|
||||||
|
export function parse(query: string, options: {parseBooleans: true, parseNumbers: true} & ParseOptions): ParsedQuery<string | boolean | number>;
|
||||||
|
export function parse(query: string, options: {parseBooleans: true} & ParseOptions): ParsedQuery<string | boolean>;
|
||||||
|
export function parse(query: string, options: {parseNumbers: true} & ParseOptions): ParsedQuery<string | number>;
|
||||||
|
export function parse(query: string, options?: ParseOptions): ParsedQuery;
|
||||||
|
|
||||||
|
export interface ParsedUrl {
|
||||||
|
readonly url: string;
|
||||||
|
readonly query: ParsedQuery;
|
||||||
|
|
||||||
|
/**
|
||||||
|
The fragment identifier of the URL.
|
||||||
|
|
||||||
|
Present when the `parseFragmentIdentifier` option is `true`.
|
||||||
|
*/
|
||||||
|
readonly fragmentIdentifier?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
Extract the URL and the query string as an object.
|
||||||
|
|
||||||
|
If the `parseFragmentIdentifier` option is `true`, the object will also contain a `fragmentIdentifier` property.
|
||||||
|
|
||||||
|
@param url - The URL to parse.
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parseUrl('https://foo.bar?foo=bar');
|
||||||
|
//=> {url: 'https://foo.bar', query: {foo: 'bar'}}
|
||||||
|
|
||||||
|
queryString.parseUrl('https://foo.bar?foo=bar#xyz', {parseFragmentIdentifier: true});
|
||||||
|
//=> {url: 'https://foo.bar', query: {foo: 'bar'}, fragmentIdentifier: 'xyz'}
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
export function parseUrl(url: string, options?: ParseOptions): ParsedUrl;
|
||||||
|
|
||||||
|
export interface StringifyOptions {
|
||||||
|
/**
|
||||||
|
Strictly encode URI components with [`strict-uri-encode`](https://github.com/kevva/strict-uri-encode). It uses [`encodeURIComponent`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent) if set to `false`. You probably [don't care](https://github.com/sindresorhus/query-string/issues/42) about this option.
|
||||||
|
|
||||||
|
@default true
|
||||||
|
*/
|
||||||
|
readonly strict?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
[URL encode](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent) the keys and values.
|
||||||
|
|
||||||
|
@default true
|
||||||
|
*/
|
||||||
|
readonly encode?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
@default 'none'
|
||||||
|
|
||||||
|
- `bracket`: Serialize arrays using bracket representation:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'bracket'});
|
||||||
|
//=> 'foo[]=1&foo[]=2&foo[]=3'
|
||||||
|
```
|
||||||
|
|
||||||
|
- `index`: Serialize arrays using index representation:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'index'});
|
||||||
|
//=> 'foo[0]=1&foo[1]=2&foo[2]=3'
|
||||||
|
```
|
||||||
|
|
||||||
|
- `comma`: Serialize arrays by separating elements with comma:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'comma'});
|
||||||
|
//=> 'foo=1,2,3'
|
||||||
|
```
|
||||||
|
|
||||||
|
- `separator`: Serialize arrays by separating elements with character:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'separator', arrayFormatSeparator: '|'});
|
||||||
|
//=> 'foo=1|2|3'
|
||||||
|
```
|
||||||
|
|
||||||
|
- `none`: Serialize arrays by using duplicate keys:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]});
|
||||||
|
//=> 'foo=1&foo=2&foo=3'
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly arrayFormat?: 'bracket' | 'index' | 'comma' | 'separator' | 'none';
|
||||||
|
|
||||||
|
/**
|
||||||
|
The character used to separate array elements when using `{arrayFormat: 'separator'}`.
|
||||||
|
|
||||||
|
@default ,
|
||||||
|
*/
|
||||||
|
readonly arrayFormatSeparator?: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Supports both `Function` as a custom sorting function or `false` to disable sorting.
|
||||||
|
|
||||||
|
If omitted, keys are sorted using `Array#sort`, which means, converting them to strings and comparing strings in Unicode code point order.
|
||||||
|
|
||||||
|
@default true
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
const order = ['c', 'a', 'b'];
|
||||||
|
|
||||||
|
queryString.stringify({a: 1, b: 2, c: 3}, {
|
||||||
|
sort: (itemLeft, itemRight) => order.indexOf(itemLeft) - order.indexOf(itemRight)
|
||||||
|
});
|
||||||
|
//=> 'c=3&a=1&b=2'
|
||||||
|
```
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({b: 1, c: 2, a: 3}, {sort: false});
|
||||||
|
//=> 'b=1&c=2&a=3'
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly sort?: ((itemLeft: string, itemRight: string) => number) | false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Skip keys with `null` as the value.
|
||||||
|
|
||||||
|
Note that keys with `undefined` as the value are always skipped.
|
||||||
|
|
||||||
|
@default false
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({a: 1, b: undefined, c: null, d: 4}, {
|
||||||
|
skipNull: true
|
||||||
|
});
|
||||||
|
//=> 'a=1&d=4'
|
||||||
|
|
||||||
|
queryString.stringify({a: undefined, b: null}, {
|
||||||
|
skipNull: true
|
||||||
|
});
|
||||||
|
//=> ''
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly skipNull?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Skip keys with an empty string as the value.
|
||||||
|
|
||||||
|
@default false
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({a: 1, b: '', c: '', d: 4}, {
|
||||||
|
skipEmptyString: true
|
||||||
|
});
|
||||||
|
//=> 'a=1&d=4'
|
||||||
|
```
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({a: '', b: ''}, {
|
||||||
|
skipEmptyString: true
|
||||||
|
});
|
||||||
|
//=> ''
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly skipEmptyString?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Stringifiable = string | boolean | number | null | undefined;
|
||||||
|
|
||||||
|
export type StringifiableRecord = Record<
|
||||||
|
string,
|
||||||
|
Stringifiable | readonly Stringifiable[]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Stringify an object into a query string and sort the keys.
|
||||||
|
*/
|
||||||
|
export function stringify(
|
||||||
|
// TODO: Use the below instead when the following TS issues are fixed:
|
||||||
|
// - https://github.com/microsoft/TypeScript/issues/15300
|
||||||
|
// - https://github.com/microsoft/TypeScript/issues/42021
|
||||||
|
// Context: https://github.com/sindresorhus/query-string/issues/298
|
||||||
|
// object: StringifiableRecord,
|
||||||
|
object: Record<string, any>,
|
||||||
|
options?: StringifyOptions
|
||||||
|
): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Extract a query string from a URL that can be passed into `.parse()`.
|
||||||
|
|
||||||
|
Note: This behaviour can be changed with the `skipNull` option.
|
||||||
|
*/
|
||||||
|
export function extract(url: string): string;
|
||||||
|
|
||||||
|
export interface UrlObject {
|
||||||
|
readonly url: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Overrides queries in the `url` property.
|
||||||
|
*/
|
||||||
|
readonly query: StringifiableRecord;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Overrides the fragment identifier in the `url` property.
|
||||||
|
*/
|
||||||
|
readonly fragmentIdentifier?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
Stringify an object into a URL with a query string and sorting the keys. The inverse of [`.parseUrl()`](https://github.com/sindresorhus/query-string#parseurlstring-options)
|
||||||
|
|
||||||
|
Query items in the `query` property overrides queries in the `url` property.
|
||||||
|
|
||||||
|
The `fragmentIdentifier` property overrides the fragment identifier in the `url` property.
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
queryString.stringifyUrl({url: 'https://foo.bar', query: {foo: 'bar'}});
|
||||||
|
//=> 'https://foo.bar?foo=bar'
|
||||||
|
|
||||||
|
queryString.stringifyUrl({url: 'https://foo.bar?foo=baz', query: {foo: 'bar'}});
|
||||||
|
//=> 'https://foo.bar?foo=bar'
|
||||||
|
|
||||||
|
queryString.stringifyUrl({
|
||||||
|
url: 'https://foo.bar',
|
||||||
|
query: {
|
||||||
|
top: 'foo'
|
||||||
|
},
|
||||||
|
fragmentIdentifier: 'bar'
|
||||||
|
});
|
||||||
|
//=> 'https://foo.bar?top=foo#bar'
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
export function stringifyUrl(
|
||||||
|
object: UrlObject,
|
||||||
|
options?: StringifyOptions
|
||||||
|
): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Pick query parameters from a URL.
|
||||||
|
|
||||||
|
@param url - The URL containing the query parameters to pick.
|
||||||
|
@param keys - The names of the query parameters to keep. All other query parameters will be removed from the URL.
|
||||||
|
@param filter - A filter predicate that will be provided the name of each query parameter and its value. The `parseNumbers` and `parseBooleans` options also affect `value`.
|
||||||
|
|
||||||
|
@returns The URL with the picked query parameters.
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
queryString.pick('https://foo.bar?foo=1&bar=2#hello', ['foo']);
|
||||||
|
//=> 'https://foo.bar?foo=1#hello'
|
||||||
|
|
||||||
|
queryString.pick('https://foo.bar?foo=1&bar=2#hello', (name, value) => value === 2, {parseNumbers: true});
|
||||||
|
//=> 'https://foo.bar?bar=2#hello'
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
export function pick(
|
||||||
|
url: string,
|
||||||
|
keys: readonly string[],
|
||||||
|
options?: ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
|
export function pick(
|
||||||
|
url: string,
|
||||||
|
filter: (key: string, value: string | boolean | number) => boolean,
|
||||||
|
options?: {parseBooleans: true, parseNumbers: true} & ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
|
export function pick(
|
||||||
|
url: string,
|
||||||
|
filter: (key: string, value: string | boolean) => boolean,
|
||||||
|
options?: {parseBooleans: true} & ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
|
export function pick(
|
||||||
|
url: string,
|
||||||
|
filter: (key: string, value: string | number) => boolean,
|
||||||
|
options?: {parseNumbers: true} & ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
|
|
||||||
|
/**
|
||||||
|
Exclude query parameters from a URL. Like `.pick()` but reversed.
|
||||||
|
|
||||||
|
@param url - The URL containing the query parameters to exclude.
|
||||||
|
@param keys - The names of the query parameters to remove. All other query parameters will remain in the URL.
|
||||||
|
@param filter - A filter predicate that will be provided the name of each query parameter and its value. The `parseNumbers` and `parseBooleans` options also affect `value`.
|
||||||
|
|
||||||
|
@returns The URL without the excluded the query parameters.
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
queryString.exclude('https://foo.bar?foo=1&bar=2#hello', ['foo']);
|
||||||
|
//=> 'https://foo.bar?bar=2#hello'
|
||||||
|
|
||||||
|
queryString.exclude('https://foo.bar?foo=1&bar=2#hello', (name, value) => value === 2, {parseNumbers: true});
|
||||||
|
//=> 'https://foo.bar?foo=1#hello'
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
export function exclude(
|
||||||
|
url: string,
|
||||||
|
keys: readonly string[],
|
||||||
|
options?: ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
|
export function exclude(
|
||||||
|
url: string,
|
||||||
|
filter: (key: string, value: string | boolean | number) => boolean,
|
||||||
|
options?: {parseBooleans: true, parseNumbers: true} & ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
|
export function exclude(
|
||||||
|
url: string,
|
||||||
|
filter: (key: string, value: string | boolean) => boolean,
|
||||||
|
options?: {parseBooleans: true} & ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
|
export function exclude(
|
||||||
|
url: string,
|
||||||
|
filter: (key: string, value: string | number) => boolean,
|
||||||
|
options?: {parseNumbers: true} & ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
404
node_modules/query-string/index.js
generated
vendored
Normal file
404
node_modules/query-string/index.js
generated
vendored
Normal file
@@ -0,0 +1,404 @@
|
|||||||
|
'use strict';
|
||||||
|
const strictUriEncode = require('strict-uri-encode');
|
||||||
|
const decodeComponent = require('decode-uri-component');
|
||||||
|
const splitOnFirst = require('split-on-first');
|
||||||
|
const filterObject = require('filter-obj');
|
||||||
|
|
||||||
|
const isNullOrUndefined = value => value === null || value === undefined;
|
||||||
|
|
||||||
|
function encoderForArrayFormat(options) {
|
||||||
|
switch (options.arrayFormat) {
|
||||||
|
case 'index':
|
||||||
|
return key => (result, value) => {
|
||||||
|
const index = result.length;
|
||||||
|
|
||||||
|
if (
|
||||||
|
value === undefined ||
|
||||||
|
(options.skipNull && value === null) ||
|
||||||
|
(options.skipEmptyString && value === '')
|
||||||
|
) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value === null) {
|
||||||
|
return [...result, [encode(key, options), '[', index, ']'].join('')];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [
|
||||||
|
...result,
|
||||||
|
[encode(key, options), '[', encode(index, options), ']=', encode(value, options)].join('')
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
|
case 'bracket':
|
||||||
|
return key => (result, value) => {
|
||||||
|
if (
|
||||||
|
value === undefined ||
|
||||||
|
(options.skipNull && value === null) ||
|
||||||
|
(options.skipEmptyString && value === '')
|
||||||
|
) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value === null) {
|
||||||
|
return [...result, [encode(key, options), '[]'].join('')];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [...result, [encode(key, options), '[]=', encode(value, options)].join('')];
|
||||||
|
};
|
||||||
|
|
||||||
|
case 'comma':
|
||||||
|
case 'separator':
|
||||||
|
return key => (result, value) => {
|
||||||
|
if (value === null || value === undefined || value.length === 0) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.length === 0) {
|
||||||
|
return [[encode(key, options), '=', encode(value, options)].join('')];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [[result, encode(value, options)].join(options.arrayFormatSeparator)];
|
||||||
|
};
|
||||||
|
|
||||||
|
default:
|
||||||
|
return key => (result, value) => {
|
||||||
|
if (
|
||||||
|
value === undefined ||
|
||||||
|
(options.skipNull && value === null) ||
|
||||||
|
(options.skipEmptyString && value === '')
|
||||||
|
) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value === null) {
|
||||||
|
return [...result, encode(key, options)];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [...result, [encode(key, options), '=', encode(value, options)].join('')];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function parserForArrayFormat(options) {
|
||||||
|
let result;
|
||||||
|
|
||||||
|
switch (options.arrayFormat) {
|
||||||
|
case 'index':
|
||||||
|
return (key, value, accumulator) => {
|
||||||
|
result = /\[(\d*)\]$/.exec(key);
|
||||||
|
|
||||||
|
key = key.replace(/\[\d*\]$/, '');
|
||||||
|
|
||||||
|
if (!result) {
|
||||||
|
accumulator[key] = value;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (accumulator[key] === undefined) {
|
||||||
|
accumulator[key] = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
accumulator[key][result[1]] = value;
|
||||||
|
};
|
||||||
|
|
||||||
|
case 'bracket':
|
||||||
|
return (key, value, accumulator) => {
|
||||||
|
result = /(\[\])$/.exec(key);
|
||||||
|
key = key.replace(/\[\]$/, '');
|
||||||
|
|
||||||
|
if (!result) {
|
||||||
|
accumulator[key] = value;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (accumulator[key] === undefined) {
|
||||||
|
accumulator[key] = [value];
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
accumulator[key] = [].concat(accumulator[key], value);
|
||||||
|
};
|
||||||
|
|
||||||
|
case 'comma':
|
||||||
|
case 'separator':
|
||||||
|
return (key, value, accumulator) => {
|
||||||
|
const isArray = typeof value === 'string' && value.includes(options.arrayFormatSeparator);
|
||||||
|
const isEncodedArray = (typeof value === 'string' && !isArray && decode(value, options).includes(options.arrayFormatSeparator));
|
||||||
|
value = isEncodedArray ? decode(value, options) : value;
|
||||||
|
const newValue = isArray || isEncodedArray ? value.split(options.arrayFormatSeparator).map(item => decode(item, options)) : value === null ? value : decode(value, options);
|
||||||
|
accumulator[key] = newValue;
|
||||||
|
};
|
||||||
|
|
||||||
|
default:
|
||||||
|
return (key, value, accumulator) => {
|
||||||
|
if (accumulator[key] === undefined) {
|
||||||
|
accumulator[key] = value;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
accumulator[key] = [].concat(accumulator[key], value);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateArrayFormatSeparator(value) {
|
||||||
|
if (typeof value !== 'string' || value.length !== 1) {
|
||||||
|
throw new TypeError('arrayFormatSeparator must be single character string');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function encode(value, options) {
|
||||||
|
if (options.encode) {
|
||||||
|
return options.strict ? strictUriEncode(value) : encodeURIComponent(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
function decode(value, options) {
|
||||||
|
if (options.decode) {
|
||||||
|
return decodeComponent(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
function keysSorter(input) {
|
||||||
|
if (Array.isArray(input)) {
|
||||||
|
return input.sort();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof input === 'object') {
|
||||||
|
return keysSorter(Object.keys(input))
|
||||||
|
.sort((a, b) => Number(a) - Number(b))
|
||||||
|
.map(key => input[key]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeHash(input) {
|
||||||
|
const hashStart = input.indexOf('#');
|
||||||
|
if (hashStart !== -1) {
|
||||||
|
input = input.slice(0, hashStart);
|
||||||
|
}
|
||||||
|
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getHash(url) {
|
||||||
|
let hash = '';
|
||||||
|
const hashStart = url.indexOf('#');
|
||||||
|
if (hashStart !== -1) {
|
||||||
|
hash = url.slice(hashStart);
|
||||||
|
}
|
||||||
|
|
||||||
|
return hash;
|
||||||
|
}
|
||||||
|
|
||||||
|
function extract(input) {
|
||||||
|
input = removeHash(input);
|
||||||
|
const queryStart = input.indexOf('?');
|
||||||
|
if (queryStart === -1) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
return input.slice(queryStart + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseValue(value, options) {
|
||||||
|
if (options.parseNumbers && !Number.isNaN(Number(value)) && (typeof value === 'string' && value.trim() !== '')) {
|
||||||
|
value = Number(value);
|
||||||
|
} else if (options.parseBooleans && value !== null && (value.toLowerCase() === 'true' || value.toLowerCase() === 'false')) {
|
||||||
|
value = value.toLowerCase() === 'true';
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parse(query, options) {
|
||||||
|
options = Object.assign({
|
||||||
|
decode: true,
|
||||||
|
sort: true,
|
||||||
|
arrayFormat: 'none',
|
||||||
|
arrayFormatSeparator: ',',
|
||||||
|
parseNumbers: false,
|
||||||
|
parseBooleans: false
|
||||||
|
}, options);
|
||||||
|
|
||||||
|
validateArrayFormatSeparator(options.arrayFormatSeparator);
|
||||||
|
|
||||||
|
const formatter = parserForArrayFormat(options);
|
||||||
|
|
||||||
|
// Create an object with no prototype
|
||||||
|
const ret = Object.create(null);
|
||||||
|
|
||||||
|
if (typeof query !== 'string') {
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
query = query.trim().replace(/^[?#&]/, '');
|
||||||
|
|
||||||
|
if (!query) {
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const param of query.split('&')) {
|
||||||
|
if (param === '') {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let [key, value] = splitOnFirst(options.decode ? param.replace(/\+/g, ' ') : param, '=');
|
||||||
|
|
||||||
|
// Missing `=` should be `null`:
|
||||||
|
// http://w3.org/TR/2012/WD-url-20120524/#collect-url-parameters
|
||||||
|
value = value === undefined ? null : ['comma', 'separator'].includes(options.arrayFormat) ? value : decode(value, options);
|
||||||
|
formatter(decode(key, options), value, ret);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const key of Object.keys(ret)) {
|
||||||
|
const value = ret[key];
|
||||||
|
if (typeof value === 'object' && value !== null) {
|
||||||
|
for (const k of Object.keys(value)) {
|
||||||
|
value[k] = parseValue(value[k], options);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
ret[key] = parseValue(value, options);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.sort === false) {
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (options.sort === true ? Object.keys(ret).sort() : Object.keys(ret).sort(options.sort)).reduce((result, key) => {
|
||||||
|
const value = ret[key];
|
||||||
|
if (Boolean(value) && typeof value === 'object' && !Array.isArray(value)) {
|
||||||
|
// Sort object keys, not values
|
||||||
|
result[key] = keysSorter(value);
|
||||||
|
} else {
|
||||||
|
result[key] = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}, Object.create(null));
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.extract = extract;
|
||||||
|
exports.parse = parse;
|
||||||
|
|
||||||
|
exports.stringify = (object, options) => {
|
||||||
|
if (!object) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
options = Object.assign({
|
||||||
|
encode: true,
|
||||||
|
strict: true,
|
||||||
|
arrayFormat: 'none',
|
||||||
|
arrayFormatSeparator: ','
|
||||||
|
}, options);
|
||||||
|
|
||||||
|
validateArrayFormatSeparator(options.arrayFormatSeparator);
|
||||||
|
|
||||||
|
const shouldFilter = key => (
|
||||||
|
(options.skipNull && isNullOrUndefined(object[key])) ||
|
||||||
|
(options.skipEmptyString && object[key] === '')
|
||||||
|
);
|
||||||
|
|
||||||
|
const formatter = encoderForArrayFormat(options);
|
||||||
|
|
||||||
|
const objectCopy = {};
|
||||||
|
|
||||||
|
for (const key of Object.keys(object)) {
|
||||||
|
if (!shouldFilter(key)) {
|
||||||
|
objectCopy[key] = object[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const keys = Object.keys(objectCopy);
|
||||||
|
|
||||||
|
if (options.sort !== false) {
|
||||||
|
keys.sort(options.sort);
|
||||||
|
}
|
||||||
|
|
||||||
|
return keys.map(key => {
|
||||||
|
const value = object[key];
|
||||||
|
|
||||||
|
if (value === undefined) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value === null) {
|
||||||
|
return encode(key, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
return value
|
||||||
|
.reduce(formatter(key), [])
|
||||||
|
.join('&');
|
||||||
|
}
|
||||||
|
|
||||||
|
return encode(key, options) + '=' + encode(value, options);
|
||||||
|
}).filter(x => x.length > 0).join('&');
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.parseUrl = (url, options) => {
|
||||||
|
options = Object.assign({
|
||||||
|
decode: true
|
||||||
|
}, options);
|
||||||
|
|
||||||
|
const [url_, hash] = splitOnFirst(url, '#');
|
||||||
|
|
||||||
|
return Object.assign(
|
||||||
|
{
|
||||||
|
url: url_.split('?')[0] || '',
|
||||||
|
query: parse(extract(url), options)
|
||||||
|
},
|
||||||
|
options && options.parseFragmentIdentifier && hash ? {fragmentIdentifier: decode(hash, options)} : {}
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.stringifyUrl = (object, options) => {
|
||||||
|
options = Object.assign({
|
||||||
|
encode: true,
|
||||||
|
strict: true
|
||||||
|
}, options);
|
||||||
|
|
||||||
|
const url = removeHash(object.url).split('?')[0] || '';
|
||||||
|
const queryFromUrl = exports.extract(object.url);
|
||||||
|
const parsedQueryFromUrl = exports.parse(queryFromUrl, {sort: false});
|
||||||
|
|
||||||
|
const query = Object.assign(parsedQueryFromUrl, object.query);
|
||||||
|
let queryString = exports.stringify(query, options);
|
||||||
|
if (queryString) {
|
||||||
|
queryString = `?${queryString}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
let hash = getHash(object.url);
|
||||||
|
if (object.fragmentIdentifier) {
|
||||||
|
hash = `#${encode(object.fragmentIdentifier, options)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return `${url}${queryString}${hash}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.pick = (input, filter, options) => {
|
||||||
|
options = Object.assign({
|
||||||
|
parseFragmentIdentifier: true
|
||||||
|
}, options);
|
||||||
|
|
||||||
|
const {url, query, fragmentIdentifier} = exports.parseUrl(input, options);
|
||||||
|
return exports.stringifyUrl({
|
||||||
|
url,
|
||||||
|
query: filterObject(query, filter),
|
||||||
|
fragmentIdentifier
|
||||||
|
}, options);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.exclude = (input, filter, options) => {
|
||||||
|
const exclusionFilter = Array.isArray(filter) ? key => !filter.includes(key) : (key, value) => !filter(key, value);
|
||||||
|
|
||||||
|
return exports.pick(input, exclusionFilter, options);
|
||||||
|
};
|
||||||
9
node_modules/query-string/license
generated
vendored
Normal file
9
node_modules/query-string/license
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (http://sindresorhus.com)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
54
node_modules/query-string/package.json
generated
vendored
Normal file
54
node_modules/query-string/package.json
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
{
|
||||||
|
"name": "query-string",
|
||||||
|
"version": "6.14.0",
|
||||||
|
"description": "Parse and stringify URL query strings",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "sindresorhus/query-string",
|
||||||
|
"funding": "https://github.com/sponsors/sindresorhus",
|
||||||
|
"author": {
|
||||||
|
"name": "Sindre Sorhus",
|
||||||
|
"email": "sindresorhus@gmail.com",
|
||||||
|
"url": "https://sindresorhus.com"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"benchmark": "node benchmark.js",
|
||||||
|
"test": "xo && ava && tsd"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js",
|
||||||
|
"index.d.ts"
|
||||||
|
],
|
||||||
|
"keywords": [
|
||||||
|
"browser",
|
||||||
|
"querystring",
|
||||||
|
"query",
|
||||||
|
"string",
|
||||||
|
"qs",
|
||||||
|
"param",
|
||||||
|
"parameter",
|
||||||
|
"url",
|
||||||
|
"parse",
|
||||||
|
"stringify",
|
||||||
|
"encode",
|
||||||
|
"decode",
|
||||||
|
"searchparams",
|
||||||
|
"filter"
|
||||||
|
],
|
||||||
|
"dependencies": {
|
||||||
|
"decode-uri-component": "^0.2.0",
|
||||||
|
"filter-obj": "^1.1.0",
|
||||||
|
"split-on-first": "^1.0.0",
|
||||||
|
"strict-uri-encode": "^2.0.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"ava": "^1.4.1",
|
||||||
|
"benchmark": "^2.1.4",
|
||||||
|
"deep-equal": "^1.0.1",
|
||||||
|
"fast-check": "^1.5.0",
|
||||||
|
"tsd": "^0.7.3",
|
||||||
|
"xo": "^0.24.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
527
node_modules/query-string/readme.md
generated
vendored
Normal file
527
node_modules/query-string/readme.md
generated
vendored
Normal file
@@ -0,0 +1,527 @@
|
|||||||
|
# query-string
|
||||||
|
|
||||||
|
> Parse and stringify URL [query strings](https://en.wikipedia.org/wiki/Query_string)
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
<p>
|
||||||
|
<p>
|
||||||
|
<sup>
|
||||||
|
<a href="https://github.com/sponsors/sindresorhus">My open source work is supported by the community</a>
|
||||||
|
</sup>
|
||||||
|
</p>
|
||||||
|
<sup>Special thanks to:</sup>
|
||||||
|
<br>
|
||||||
|
<br>
|
||||||
|
<a href="https://standardresume.co/tech">
|
||||||
|
<img src="https://sindresorhus.com/assets/thanks/standard-resume-logo.svg" width="200"/>
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install query-string
|
||||||
|
```
|
||||||
|
|
||||||
|
This module targets Node.js 6 or later and the latest version of Chrome, Firefox, and Safari. If you want support for older browsers, or, if your project is using create-react-app v1, use version 5: `npm install query-string@5`.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
console.log(location.search);
|
||||||
|
//=> '?foo=bar'
|
||||||
|
|
||||||
|
const parsed = queryString.parse(location.search);
|
||||||
|
console.log(parsed);
|
||||||
|
//=> {foo: 'bar'}
|
||||||
|
|
||||||
|
console.log(location.hash);
|
||||||
|
//=> '#token=bada55cafe'
|
||||||
|
|
||||||
|
const parsedHash = queryString.parse(location.hash);
|
||||||
|
console.log(parsedHash);
|
||||||
|
//=> {token: 'bada55cafe'}
|
||||||
|
|
||||||
|
parsed.foo = 'unicorn';
|
||||||
|
parsed.ilike = 'pizza';
|
||||||
|
|
||||||
|
const stringified = queryString.stringify(parsed);
|
||||||
|
//=> 'foo=unicorn&ilike=pizza'
|
||||||
|
|
||||||
|
location.search = stringified;
|
||||||
|
// note that `location.search` automatically prepends a question mark
|
||||||
|
console.log(location.search);
|
||||||
|
//=> '?foo=unicorn&ilike=pizza'
|
||||||
|
```
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
### .parse(string, options?)
|
||||||
|
|
||||||
|
Parse a query string into an object. Leading `?` or `#` are ignored, so you can pass `location.search` or `location.hash` directly.
|
||||||
|
|
||||||
|
The returned object is created with [`Object.create(null)`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/create) and thus does not have a `prototype`.
|
||||||
|
|
||||||
|
#### options
|
||||||
|
|
||||||
|
Type: `object`
|
||||||
|
|
||||||
|
##### decode
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
Decode the keys and values. URL components are decoded with [`decode-uri-component`](https://github.com/SamVerschueren/decode-uri-component).
|
||||||
|
|
||||||
|
##### arrayFormat
|
||||||
|
|
||||||
|
Type: `string`\
|
||||||
|
Default: `'none'`
|
||||||
|
|
||||||
|
- `'bracket'`: Parse arrays with bracket representation:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo[]=1&foo[]=2&foo[]=3', {arrayFormat: 'bracket'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `'index'`: Parse arrays with index representation:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo[0]=1&foo[1]=2&foo[3]=3', {arrayFormat: 'index'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `'comma'`: Parse arrays with elements separated by comma:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1,2,3', {arrayFormat: 'comma'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `'separator'`: Parse arrays with elements separated by a custom character:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1|2|3', {arrayFormat: 'separator', arrayFormatSeparator: '|'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `'none'`: Parse arrays with elements using duplicate keys:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1&foo=2&foo=3');
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
##### arrayFormatSeparator
|
||||||
|
|
||||||
|
Type: `string`\
|
||||||
|
Default: `','`
|
||||||
|
|
||||||
|
The character used to separate array elements when using `{arrayFormat: 'separator'}`.
|
||||||
|
|
||||||
|
##### sort
|
||||||
|
|
||||||
|
Type: `Function | boolean`\
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
Supports both `Function` as a custom sorting function or `false` to disable sorting.
|
||||||
|
|
||||||
|
##### parseNumbers
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1', {parseNumbers: true});
|
||||||
|
//=> {foo: 1}
|
||||||
|
```
|
||||||
|
|
||||||
|
Parse the value as a number type instead of string type if it's a number.
|
||||||
|
|
||||||
|
##### parseBooleans
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=true', {parseBooleans: true});
|
||||||
|
//=> {foo: true}
|
||||||
|
```
|
||||||
|
|
||||||
|
Parse the value as a boolean type instead of string type if it's a boolean.
|
||||||
|
|
||||||
|
### .stringify(object, options?)
|
||||||
|
|
||||||
|
Stringify an object into a query string and sorting the keys.
|
||||||
|
|
||||||
|
#### options
|
||||||
|
|
||||||
|
Type: `object`
|
||||||
|
|
||||||
|
##### strict
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
Strictly encode URI components with [strict-uri-encode](https://github.com/kevva/strict-uri-encode). It uses [encodeURIComponent](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent) if set to false. You probably [don't care](https://github.com/sindresorhus/query-string/issues/42) about this option.
|
||||||
|
|
||||||
|
##### encode
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
[URL encode](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent) the keys and values.
|
||||||
|
|
||||||
|
##### arrayFormat
|
||||||
|
|
||||||
|
Type: `string`\
|
||||||
|
Default: `'none'`
|
||||||
|
|
||||||
|
- `'bracket'`: Serialize arrays using bracket representation:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'bracket'});
|
||||||
|
//=> 'foo[]=1&foo[]=2&foo[]=3'
|
||||||
|
```
|
||||||
|
|
||||||
|
- `'index'`: Serialize arrays using index representation:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'index'});
|
||||||
|
//=> 'foo[0]=1&foo[1]=2&foo[2]=3'
|
||||||
|
```
|
||||||
|
|
||||||
|
- `'comma'`: Serialize arrays by separating elements with comma:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'comma'});
|
||||||
|
//=> 'foo=1,2,3'
|
||||||
|
```
|
||||||
|
|
||||||
|
- `'none'`: Serialize arrays by using duplicate keys:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]});
|
||||||
|
//=> 'foo=1&foo=2&foo=3'
|
||||||
|
```
|
||||||
|
|
||||||
|
##### arrayFormatSeparator
|
||||||
|
|
||||||
|
Type: `string`\
|
||||||
|
Default: `','`
|
||||||
|
|
||||||
|
The character used to separate array elements when using `{arrayFormat: 'separator'}`.
|
||||||
|
|
||||||
|
##### sort
|
||||||
|
|
||||||
|
Type: `Function | boolean`
|
||||||
|
|
||||||
|
Supports both `Function` as a custom sorting function or `false` to disable sorting.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
const order = ['c', 'a', 'b'];
|
||||||
|
|
||||||
|
queryString.stringify({a: 1, b: 2, c: 3}, {
|
||||||
|
sort: (a, b) => order.indexOf(a) - order.indexOf(b)
|
||||||
|
});
|
||||||
|
//=> 'c=3&a=1&b=2'
|
||||||
|
```
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({b: 1, c: 2, a: 3}, {sort: false});
|
||||||
|
//=> 'b=1&c=2&a=3'
|
||||||
|
```
|
||||||
|
|
||||||
|
If omitted, keys are sorted using `Array#sort()`, which means, converting them to strings and comparing strings in Unicode code point order.
|
||||||
|
|
||||||
|
##### skipNull
|
||||||
|
|
||||||
|
Skip keys with `null` as the value.
|
||||||
|
|
||||||
|
Note that keys with `undefined` as the value are always skipped.
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({a: 1, b: undefined, c: null, d: 4}, {
|
||||||
|
skipNull: true
|
||||||
|
});
|
||||||
|
//=> 'a=1&d=4'
|
||||||
|
```
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({a: undefined, b: null}, {
|
||||||
|
skipNull: true
|
||||||
|
});
|
||||||
|
//=> ''
|
||||||
|
```
|
||||||
|
|
||||||
|
##### skipEmptyString
|
||||||
|
|
||||||
|
Skip keys with an empty string as the value.
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({a: 1, b: '', c: '', d: 4}, {
|
||||||
|
skipEmptyString: true
|
||||||
|
});
|
||||||
|
//=> 'a=1&d=4'
|
||||||
|
```
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({a: '', b: ''}, {
|
||||||
|
skipEmptyString: true
|
||||||
|
});
|
||||||
|
//=> ''
|
||||||
|
```
|
||||||
|
|
||||||
|
### .extract(string)
|
||||||
|
|
||||||
|
Extract a query string from a URL that can be passed into `.parse()`.
|
||||||
|
|
||||||
|
Note: This behaviour can be changed with the `skipNull` option.
|
||||||
|
|
||||||
|
### .parseUrl(string, options?)
|
||||||
|
|
||||||
|
Extract the URL and the query string as an object.
|
||||||
|
|
||||||
|
Returns an object with a `url` and `query` property.
|
||||||
|
|
||||||
|
If the `parseFragmentIdentifier` option is `true`, the object will also contain a `fragmentIdentifier` property.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parseUrl('https://foo.bar?foo=bar');
|
||||||
|
//=> {url: 'https://foo.bar', query: {foo: 'bar'}}
|
||||||
|
|
||||||
|
queryString.parseUrl('https://foo.bar?foo=bar#xyz', {parseFragmentIdentifier: true});
|
||||||
|
//=> {url: 'https://foo.bar', query: {foo: 'bar'}, fragmentIdentifier: 'xyz'}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### options
|
||||||
|
|
||||||
|
Type: `object`
|
||||||
|
|
||||||
|
The options are the same as for `.parse()`.
|
||||||
|
|
||||||
|
Extra options are as below.
|
||||||
|
|
||||||
|
##### parseFragmentIdentifier
|
||||||
|
|
||||||
|
Parse the fragment identifier from the URL.
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parseUrl('https://foo.bar?foo=bar#xyz', {parseFragmentIdentifier: true});
|
||||||
|
//=> {url: 'https://foo.bar', query: {foo: 'bar'}, fragmentIdentifier: 'xyz'}
|
||||||
|
```
|
||||||
|
|
||||||
|
### .stringifyUrl(object, options?)
|
||||||
|
|
||||||
|
Stringify an object into a URL with a query string and sorting the keys. The inverse of [`.parseUrl()`](https://github.com/sindresorhus/query-string#parseurlstring-options)
|
||||||
|
|
||||||
|
The `options` are the same as for `.stringify()`.
|
||||||
|
|
||||||
|
Returns a string with the URL and a query string.
|
||||||
|
|
||||||
|
Query items in the `query` property overrides queries in the `url` property.
|
||||||
|
|
||||||
|
The `fragmentIdentifier` property overrides the fragment identifier in the `url` property.
|
||||||
|
|
||||||
|
```js
|
||||||
|
queryString.stringifyUrl({url: 'https://foo.bar', query: {foo: 'bar'}});
|
||||||
|
//=> 'https://foo.bar?foo=bar'
|
||||||
|
|
||||||
|
queryString.stringifyUrl({url: 'https://foo.bar?foo=baz', query: {foo: 'bar'}});
|
||||||
|
//=> 'https://foo.bar?foo=bar'
|
||||||
|
|
||||||
|
queryString.stringifyUrl({
|
||||||
|
url: 'https://foo.bar',
|
||||||
|
query: {
|
||||||
|
top: 'foo'
|
||||||
|
},
|
||||||
|
fragmentIdentifier: 'bar'
|
||||||
|
});
|
||||||
|
//=> 'https://foo.bar?top=foo#bar'
|
||||||
|
```
|
||||||
|
|
||||||
|
#### object
|
||||||
|
|
||||||
|
Type: `object`
|
||||||
|
|
||||||
|
##### url
|
||||||
|
|
||||||
|
Type: `string`
|
||||||
|
|
||||||
|
The URL to stringify.
|
||||||
|
|
||||||
|
##### query
|
||||||
|
|
||||||
|
Type: `object`
|
||||||
|
|
||||||
|
Query items to add to the URL.
|
||||||
|
|
||||||
|
### .pick(url, keys, options?)
|
||||||
|
### .pick(url, filter, options?)
|
||||||
|
|
||||||
|
Pick query parameters from a URL.
|
||||||
|
|
||||||
|
Returns a string with the new URL.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.pick('https://foo.bar?foo=1&bar=2#hello', ['foo']);
|
||||||
|
//=> 'https://foo.bar?foo=1#hello'
|
||||||
|
|
||||||
|
queryString.pick('https://foo.bar?foo=1&bar=2#hello', (name, value) => value === 2, {parseNumbers: true});
|
||||||
|
//=> 'https://foo.bar?bar=2#hello'
|
||||||
|
```
|
||||||
|
|
||||||
|
### .exclude(url, keys, options?)
|
||||||
|
### .exclude(url, filter, options?)
|
||||||
|
|
||||||
|
Exclude query parameters from a URL.
|
||||||
|
|
||||||
|
Returns a string with the new URL.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.exclude('https://foo.bar?foo=1&bar=2#hello', ['foo']);
|
||||||
|
//=> 'https://foo.bar?bar=2#hello'
|
||||||
|
|
||||||
|
queryString.exclude('https://foo.bar?foo=1&bar=2#hello', (name, value) => value === 2, {parseNumbers: true});
|
||||||
|
//=> 'https://foo.bar?foo=1#hello'
|
||||||
|
```
|
||||||
|
|
||||||
|
#### url
|
||||||
|
|
||||||
|
Type: `string`
|
||||||
|
|
||||||
|
The URL containing the query parameters to filter.
|
||||||
|
|
||||||
|
#### keys
|
||||||
|
|
||||||
|
Type: `string[]`
|
||||||
|
|
||||||
|
The names of the query parameters to filter based on the function used.
|
||||||
|
|
||||||
|
#### filter
|
||||||
|
|
||||||
|
Type: `(key, value) => boolean`
|
||||||
|
|
||||||
|
A filter predicate that will be provided the name of each query parameter and its value. The `parseNumbers` and `parseBooleans` options also affect `value`.
|
||||||
|
|
||||||
|
#### options
|
||||||
|
|
||||||
|
Type: `object`
|
||||||
|
|
||||||
|
[Parse options](#options) and [stringify options](#options-1).
|
||||||
|
|
||||||
|
## Nesting
|
||||||
|
|
||||||
|
This module intentionally doesn't support nesting as it's not spec'd and varies between implementations, which causes a lot of [edge cases](https://github.com/visionmedia/node-querystring/issues).
|
||||||
|
|
||||||
|
You're much better off just converting the object to a JSON string:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({
|
||||||
|
foo: 'bar',
|
||||||
|
nested: JSON.stringify({
|
||||||
|
unicorn: 'cake'
|
||||||
|
})
|
||||||
|
});
|
||||||
|
//=> 'foo=bar&nested=%7B%22unicorn%22%3A%22cake%22%7D'
|
||||||
|
```
|
||||||
|
|
||||||
|
However, there is support for multiple instances of the same key:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('likes=cake&name=bob&likes=icecream');
|
||||||
|
//=> {likes: ['cake', 'icecream'], name: 'bob'}
|
||||||
|
|
||||||
|
queryString.stringify({color: ['taupe', 'chartreuse'], id: '515'});
|
||||||
|
//=> 'color=taupe&color=chartreuse&id=515'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Falsy values
|
||||||
|
|
||||||
|
Sometimes you want to unset a key, or maybe just make it present without assigning a value to it. Here is how falsy values are stringified:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: false});
|
||||||
|
//=> 'foo=false'
|
||||||
|
|
||||||
|
queryString.stringify({foo: null});
|
||||||
|
//=> 'foo'
|
||||||
|
|
||||||
|
queryString.stringify({foo: undefined});
|
||||||
|
//=> ''
|
||||||
|
```
|
||||||
|
|
||||||
|
## query-string for enterprise
|
||||||
|
|
||||||
|
Available as part of the Tidelift Subscription.
|
||||||
|
|
||||||
|
The maintainers of query-string and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-query-string?utm_source=npm-query-string&utm_medium=referral&utm_campaign=enterprise&utm_term=repo)
|
||||||
29
node_modules/split-on-first/index.d.ts
generated
vendored
Normal file
29
node_modules/split-on-first/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
/**
|
||||||
|
Split a string on the first occurrence of a given separator.
|
||||||
|
|
||||||
|
@param string - The string to split.
|
||||||
|
@param separator - The separator to split on.
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import splitOnFirst = require('split-on-first');
|
||||||
|
|
||||||
|
splitOnFirst('a-b-c', '-');
|
||||||
|
//=> ['a', 'b-c']
|
||||||
|
|
||||||
|
splitOnFirst('key:value:value2', ':');
|
||||||
|
//=> ['key', 'value:value2']
|
||||||
|
|
||||||
|
splitOnFirst('a---b---c', '---');
|
||||||
|
//=> ['a', 'b---c']
|
||||||
|
|
||||||
|
splitOnFirst('a-b-c', '+');
|
||||||
|
//=> ['a-b-c']
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
declare function splitOnFirst(
|
||||||
|
string: string,
|
||||||
|
separator: string
|
||||||
|
): [string, string?];
|
||||||
|
|
||||||
|
export = splitOnFirst;
|
||||||
22
node_modules/split-on-first/index.js
generated
vendored
Normal file
22
node_modules/split-on-first/index.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = (string, separator) => {
|
||||||
|
if (!(typeof string === 'string' && typeof separator === 'string')) {
|
||||||
|
throw new TypeError('Expected the arguments to be of type `string`');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (separator === '') {
|
||||||
|
return [string];
|
||||||
|
}
|
||||||
|
|
||||||
|
const separatorIndex = string.indexOf(separator);
|
||||||
|
|
||||||
|
if (separatorIndex === -1) {
|
||||||
|
return [string];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [
|
||||||
|
string.slice(0, separatorIndex),
|
||||||
|
string.slice(separatorIndex + separator.length)
|
||||||
|
];
|
||||||
|
};
|
||||||
9
node_modules/split-on-first/license
generated
vendored
Normal file
9
node_modules/split-on-first/license
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
36
node_modules/split-on-first/package.json
generated
vendored
Normal file
36
node_modules/split-on-first/package.json
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
{
|
||||||
|
"name": "split-on-first",
|
||||||
|
"version": "1.1.0",
|
||||||
|
"description": "Split a string on the first occurance of a given separator",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "sindresorhus/split-on-first",
|
||||||
|
"author": {
|
||||||
|
"name": "Sindre Sorhus",
|
||||||
|
"email": "sindresorhus@gmail.com",
|
||||||
|
"url": "sindresorhus.com"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "xo && ava && tsd"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js",
|
||||||
|
"index.d.ts"
|
||||||
|
],
|
||||||
|
"keywords": [
|
||||||
|
"split",
|
||||||
|
"string",
|
||||||
|
"first",
|
||||||
|
"occurrence",
|
||||||
|
"separator",
|
||||||
|
"delimiter",
|
||||||
|
"text"
|
||||||
|
],
|
||||||
|
"devDependencies": {
|
||||||
|
"ava": "^1.4.1",
|
||||||
|
"tsd": "^0.7.2",
|
||||||
|
"xo": "^0.24.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
58
node_modules/split-on-first/readme.md
generated
vendored
Normal file
58
node_modules/split-on-first/readme.md
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
# split-on-first [](https://travis-ci.com/sindresorhus/split-on-first)
|
||||||
|
|
||||||
|
> Split a string on the first occurrence of a given separator
|
||||||
|
|
||||||
|
This is similar to [`String#split()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/split), but that one splits on all the occurrences, not just the first one.
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install split-on-first
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
const splitOnFirst = require('split-on-first');
|
||||||
|
|
||||||
|
splitOnFirst('a-b-c', '-');
|
||||||
|
//=> ['a', 'b-c']
|
||||||
|
|
||||||
|
splitOnFirst('key:value:value2', ':');
|
||||||
|
//=> ['key', 'value:value2']
|
||||||
|
|
||||||
|
splitOnFirst('a---b---c', '---');
|
||||||
|
//=> ['a', 'b---c']
|
||||||
|
|
||||||
|
splitOnFirst('a-b-c', '+');
|
||||||
|
//=> ['a-b-c']
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
### splitOnFirst(string, separator)
|
||||||
|
|
||||||
|
#### string
|
||||||
|
|
||||||
|
Type: `string`
|
||||||
|
|
||||||
|
The string to split.
|
||||||
|
|
||||||
|
#### separator
|
||||||
|
|
||||||
|
Type: `string`
|
||||||
|
|
||||||
|
The separator to split on.
|
||||||
|
|
||||||
|
|
||||||
|
## Related
|
||||||
|
|
||||||
|
- [split-at](https://github.com/sindresorhus/split-at) - Split a string at one or more indices
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||||
2
node_modules/strict-uri-encode/index.js
generated
vendored
Normal file
2
node_modules/strict-uri-encode/index.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
'use strict';
|
||||||
|
module.exports = str => encodeURIComponent(str).replace(/[!'()*]/g, x => `%${x.charCodeAt(0).toString(16).toUpperCase()}`);
|
||||||
21
node_modules/strict-uri-encode/license
generated
vendored
Normal file
21
node_modules/strict-uri-encode/license
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) Kevin Martensson <kevinmartensson@gmail.com> (github.com/kevva)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
31
node_modules/strict-uri-encode/package.json
generated
vendored
Normal file
31
node_modules/strict-uri-encode/package.json
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
{
|
||||||
|
"name": "strict-uri-encode",
|
||||||
|
"version": "2.0.0",
|
||||||
|
"description": "A stricter URI encode adhering to RFC 3986",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "kevva/strict-uri-encode",
|
||||||
|
"author": {
|
||||||
|
"name": "Kevin Mårtensson",
|
||||||
|
"email": "kevinmartensson@gmail.com",
|
||||||
|
"url": "github.com/kevva"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "xo && ava"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"keywords": [
|
||||||
|
"component",
|
||||||
|
"encode",
|
||||||
|
"RFC3986",
|
||||||
|
"uri"
|
||||||
|
],
|
||||||
|
"devDependencies": {
|
||||||
|
"ava": "*",
|
||||||
|
"xo": "*"
|
||||||
|
}
|
||||||
|
}
|
||||||
39
node_modules/strict-uri-encode/readme.md
generated
vendored
Normal file
39
node_modules/strict-uri-encode/readme.md
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# strict-uri-encode [](https://travis-ci.org/kevva/strict-uri-encode)
|
||||||
|
|
||||||
|
> A stricter URI encode adhering to [RFC 3986](http://tools.ietf.org/html/rfc3986)
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install --save strict-uri-encode
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
const strictUriEncode = require('strict-uri-encode');
|
||||||
|
|
||||||
|
strictUriEncode('unicorn!foobar');
|
||||||
|
//=> 'unicorn%21foobar'
|
||||||
|
|
||||||
|
strictUriEncode('unicorn*foobar');
|
||||||
|
//=> 'unicorn%2Afoobar'
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
### strictUriEncode(string)
|
||||||
|
|
||||||
|
#### string
|
||||||
|
|
||||||
|
Type: `string`, `number`
|
||||||
|
|
||||||
|
String to URI encode.
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT © [Kevin Mårtensson](http://github.com/kevva)
|
||||||
40
package-lock.json
generated
40
package-lock.json
generated
@@ -1348,6 +1348,11 @@
|
|||||||
"integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=",
|
"integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"decode-uri-component": {
|
||||||
|
"version": "0.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.0.tgz",
|
||||||
|
"integrity": "sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU="
|
||||||
|
},
|
||||||
"decompress-response": {
|
"decompress-response": {
|
||||||
"version": "3.3.0",
|
"version": "3.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz",
|
||||||
@@ -2077,8 +2082,7 @@
|
|||||||
"fast-deep-equal": {
|
"fast-deep-equal": {
|
||||||
"version": "3.1.3",
|
"version": "3.1.3",
|
||||||
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
||||||
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
|
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"fast-diff": {
|
"fast-diff": {
|
||||||
"version": "1.2.0",
|
"version": "1.2.0",
|
||||||
@@ -2153,6 +2157,11 @@
|
|||||||
"to-regex-range": "^5.0.1"
|
"to-regex-range": "^5.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"filter-obj": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-1.1.0.tgz",
|
||||||
|
"integrity": "sha1-mzERErxsYSehbgFsbF1/GeCAXFs="
|
||||||
|
},
|
||||||
"find-up": {
|
"find-up": {
|
||||||
"version": "4.1.0",
|
"version": "4.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
|
||||||
@@ -2435,9 +2444,9 @@
|
|||||||
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4="
|
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4="
|
||||||
},
|
},
|
||||||
"ini": {
|
"ini": {
|
||||||
"version": "1.3.5",
|
"version": "1.3.8",
|
||||||
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz",
|
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
|
||||||
"integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==",
|
"integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"irregular-plurals": {
|
"irregular-plurals": {
|
||||||
@@ -3518,6 +3527,17 @@
|
|||||||
"escape-goat": "^2.0.0"
|
"escape-goat": "^2.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"query-string": {
|
||||||
|
"version": "6.14.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/query-string/-/query-string-6.14.0.tgz",
|
||||||
|
"integrity": "sha512-In3o+lUxlgejoVJgwEdYtdxrmlL0cQWJXj0+kkI7RWVo7hg5AhFtybeKlC9Dpgbr8eOC4ydpEh8017WwyfzqVQ==",
|
||||||
|
"requires": {
|
||||||
|
"decode-uri-component": "^0.2.0",
|
||||||
|
"filter-obj": "^1.1.0",
|
||||||
|
"split-on-first": "^1.0.0",
|
||||||
|
"strict-uri-encode": "^2.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"rc": {
|
"rc": {
|
||||||
"version": "1.2.8",
|
"version": "1.2.8",
|
||||||
"resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz",
|
"resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz",
|
||||||
@@ -3957,11 +3977,21 @@
|
|||||||
"integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==",
|
"integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"split-on-first": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw=="
|
||||||
|
},
|
||||||
"sprintf-js": {
|
"sprintf-js": {
|
||||||
"version": "1.0.3",
|
"version": "1.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
|
||||||
"integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw="
|
"integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw="
|
||||||
},
|
},
|
||||||
|
"strict-uri-encode": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz",
|
||||||
|
"integrity": "sha1-ucczDHBChi9rFC3CdLvMWGbONUY="
|
||||||
|
},
|
||||||
"string-width": {
|
"string-width": {
|
||||||
"version": "4.2.0",
|
"version": "4.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz",
|
||||||
|
|||||||
@@ -29,6 +29,7 @@
|
|||||||
"@octokit/types": "^5.5.0",
|
"@octokit/types": "^5.5.0",
|
||||||
"commander": "^6.0.0",
|
"commander": "^6.0.0",
|
||||||
"console-log-level": "^1.4.1",
|
"console-log-level": "^1.4.1",
|
||||||
|
"fast-deep-equal": "^3.1.3",
|
||||||
"file-url": "^3.0.0",
|
"file-url": "^3.0.0",
|
||||||
"fs": "0.0.1-security",
|
"fs": "0.0.1-security",
|
||||||
"js-yaml": "^3.13.1",
|
"js-yaml": "^3.13.1",
|
||||||
@@ -36,6 +37,7 @@
|
|||||||
"long": "^4.0.0",
|
"long": "^4.0.0",
|
||||||
"md5": "^2.2.1",
|
"md5": "^2.2.1",
|
||||||
"path": "^0.12.7",
|
"path": "^0.12.7",
|
||||||
|
"query-string": "^6.14.0",
|
||||||
"semver": "^7.3.2",
|
"semver": "^7.3.2",
|
||||||
"uuid": "^8.3.0",
|
"uuid": "^8.3.0",
|
||||||
"zlib": "^1.0.5"
|
"zlib": "^1.0.5"
|
||||||
|
|||||||
@@ -10,16 +10,11 @@ set -e
|
|||||||
# subsequent actions in the current job, and not the current action.
|
# subsequent actions in the current job, and not the current action.
|
||||||
export PATH="$HOME/.local/bin:$PATH"
|
export PATH="$HOME/.local/bin:$PATH"
|
||||||
|
|
||||||
# The Ubuntu 20.04 GHA environment does not come with a Python 2 pip
|
# Setup Python 3 dependency installation tools.
|
||||||
curl https://bootstrap.pypa.io/get-pip.py --output get-pip.py
|
|
||||||
python2 get-pip.py
|
|
||||||
|
|
||||||
python2 -m pip install --user --upgrade pip setuptools wheel
|
|
||||||
python3 -m pip install --user --upgrade pip setuptools wheel
|
python3 -m pip install --user --upgrade pip setuptools wheel
|
||||||
|
|
||||||
# virtualenv is a bit nicer for setting up virtual environment, since it will provide up-to-date versions of
|
# virtualenv is a bit nicer for setting up virtual environment, since it will provide up-to-date versions of
|
||||||
# pip/setuptools/wheel which basic `python3 -m venv venv` won't
|
# pip/setuptools/wheel which basic `python3 -m venv venv` won't
|
||||||
python2 -m pip install --user virtualenv
|
|
||||||
python3 -m pip install --user virtualenv
|
python3 -m pip install --user virtualenv
|
||||||
|
|
||||||
# We install poetry with pip instead of the recommended way, since the recommended way
|
# We install poetry with pip instead of the recommended way, since the recommended way
|
||||||
@@ -32,3 +27,13 @@ python3 -m pip install --user virtualenv
|
|||||||
# poetry 1.0.10 has error (https://github.com/python-poetry/poetry/issues/2711)
|
# poetry 1.0.10 has error (https://github.com/python-poetry/poetry/issues/2711)
|
||||||
python3 -m pip install --user poetry!=1.0.10
|
python3 -m pip install --user poetry!=1.0.10
|
||||||
python3 -m pip install --user pipenv
|
python3 -m pip install --user pipenv
|
||||||
|
|
||||||
|
if command -v python2 &> /dev/null; then
|
||||||
|
# Setup Python 2 dependency installation tools.
|
||||||
|
# The Ubuntu 20.04 GHA environment does not come with a Python 2 pip
|
||||||
|
curl --location --fail https://bootstrap.pypa.io/pip/2.7/get-pip.py | python2
|
||||||
|
|
||||||
|
python2 -m pip install --user --upgrade pip setuptools wheel
|
||||||
|
|
||||||
|
python2 -m pip install --user virtualenv
|
||||||
|
fi
|
||||||
|
|||||||
30
runner/package-lock.json
generated
30
runner/package-lock.json
generated
@@ -1194,24 +1194,24 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"elliptic": {
|
"elliptic": {
|
||||||
"version": "6.5.3",
|
"version": "6.5.4",
|
||||||
"resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.3.tgz",
|
"resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz",
|
||||||
"integrity": "sha512-IMqzv5wNQf+E6aHeIqATs0tOLeOTwj1QKbRcS3jBbYkl5oLAserA8yJTT7/VyHUYG91PRmPyeQDObKLPpeS4dw==",
|
"integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"requires": {
|
"requires": {
|
||||||
"bn.js": "^4.4.0",
|
"bn.js": "^4.11.9",
|
||||||
"brorand": "^1.0.1",
|
"brorand": "^1.1.0",
|
||||||
"hash.js": "^1.0.0",
|
"hash.js": "^1.0.0",
|
||||||
"hmac-drbg": "^1.0.0",
|
"hmac-drbg": "^1.0.1",
|
||||||
"inherits": "^2.0.1",
|
"inherits": "^2.0.4",
|
||||||
"minimalistic-assert": "^1.0.0",
|
"minimalistic-assert": "^1.0.1",
|
||||||
"minimalistic-crypto-utils": "^1.0.0"
|
"minimalistic-crypto-utils": "^1.0.1"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"bn.js": {
|
"bn.js": {
|
||||||
"version": "4.11.9",
|
"version": "4.12.0",
|
||||||
"resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz",
|
"resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
|
||||||
"integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==",
|
"integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==",
|
||||||
"dev": true
|
"dev": true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2072,9 +2072,9 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"ini": {
|
"ini": {
|
||||||
"version": "1.3.5",
|
"version": "1.3.8",
|
||||||
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz",
|
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
|
||||||
"integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==",
|
"integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"interpret": {
|
"interpret": {
|
||||||
|
|||||||
@@ -1,9 +1,17 @@
|
|||||||
import test from "ava";
|
import test from "ava";
|
||||||
|
import * as yaml from "js-yaml";
|
||||||
import sinon from "sinon";
|
import sinon from "sinon";
|
||||||
|
|
||||||
import * as actionsutil from "./actions-util";
|
import * as actionsutil from "./actions-util";
|
||||||
import { setupTests } from "./testing-utils";
|
import { setupTests } from "./testing-utils";
|
||||||
|
|
||||||
|
function errorCodes(
|
||||||
|
actual: actionsutil.CodedError[],
|
||||||
|
expected: actionsutil.CodedError[]
|
||||||
|
): [string[], string[]] {
|
||||||
|
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
||||||
|
}
|
||||||
|
|
||||||
setupTests(test);
|
setupTests(test);
|
||||||
|
|
||||||
test("getRef() throws on the empty string", async (t) => {
|
test("getRef() throws on the empty string", async (t) => {
|
||||||
@@ -82,128 +90,302 @@ test("prepareEnvironment() when a local run", (t) => {
|
|||||||
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
|
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on is missing", (t) => {
|
test("getWorkflowErrors() when on is empty", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({});
|
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingHooks]);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is missing", (t) => {
|
test("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({ on: {} });
|
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
||||||
|
|
||||||
console.log(errors);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingHooks]);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is an array missing pull_request", (t) => {
|
test("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({ on: ["push"] });
|
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingPullRequestHook]);
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook])
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is an array missing push", (t) => {
|
test("getWorkflowErrors() when on.push is valid", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({ on: ["pull_request"] });
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingPushHook]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is valid", (t) => {
|
|
||||||
const errors = actionsutil.validateWorkflow({
|
|
||||||
on: ["push", "pull_request"],
|
on: ["push", "pull_request"],
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors.length, 0);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is a valid superset", (t) => {
|
test("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: ["push", "pull_request", "schedule"],
|
on: ["push", "pull_request", "schedule"],
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors.length, 0);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push should not have a path", (t) => {
|
test("getWorkflowErrors() when on.push should not have a path", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"], paths: ["test/*"] },
|
push: { branches: ["main"], paths: ["test/*"] },
|
||||||
pull_request: { branches: ["main"] },
|
pull_request: { branches: ["main"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.PathsSpecified]);
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified])
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is a correct object", (t) => {
|
test("getWorkflowErrors() when on.push is a correct object", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors.length, 0);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is correct with empty objects", (t) => {
|
test("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: { push: undefined, pull_request: undefined },
|
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log(errors);
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
||||||
t.deepEqual(errors.length, 0);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is mismatched", (t) => {
|
test("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
|
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
`)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is mismatched", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"] },
|
push: { branches: ["main"] },
|
||||||
pull_request: { branches: ["feature"] },
|
pull_request: { branches: ["feature"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is not mismatched", (t) => {
|
test("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main", "feature"] },
|
push: { branches: ["main", "feature"] },
|
||||||
pull_request: { branches: ["main"] },
|
pull_request: { branches: ["main"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors.length, 0);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is mismatched for pull_request", (t) => {
|
test("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"] },
|
push: { branches: ["main"] },
|
||||||
pull_request: { branches: ["main", "feature"] },
|
pull_request: { branches: ["main", "feature"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.pull_request for every branch but push specifies branches", (t) => {
|
test("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"] },
|
push: 1,
|
||||||
pull_request: null,
|
pull_request: 1,
|
||||||
|
},
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: 1,
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: [1],
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { 1: 1 },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: 1 },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: [1] },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: { steps: 1 } },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: [undefined] },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1 as any), []));
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: {
|
||||||
|
branches: 1,
|
||||||
|
},
|
||||||
|
pull_request: {
|
||||||
|
branches: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: ["main"]
|
||||||
|
pull_request:
|
||||||
|
`)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["feature/*"] },
|
||||||
|
pull_request: { branches: "feature/moose" },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when HEAD^2 is checked out", (t) => {
|
test("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["feature/moose"] },
|
||||||
|
pull_request: { branches: "feature/*" },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test";
|
||||||
|
|
||||||
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: ["push", "pull_request"],
|
on: ["push", "pull_request"],
|
||||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]);
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead])
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("formatWorkflowErrors() when there is one error", (t) => {
|
test("formatWorkflowErrors() when there is one error", (t) => {
|
||||||
@@ -221,6 +403,12 @@ test("formatWorkflowErrors() when there are multiple errors", (t) => {
|
|||||||
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("formatWorkflowCause() with no errors", (t) => {
|
||||||
|
const message = actionsutil.formatWorkflowCause([]);
|
||||||
|
|
||||||
|
t.deepEqual(message, undefined);
|
||||||
|
});
|
||||||
|
|
||||||
test("formatWorkflowCause()", (t) => {
|
test("formatWorkflowCause()", (t) => {
|
||||||
const message = actionsutil.formatWorkflowCause([
|
const message = actionsutil.formatWorkflowCause([
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||||
@@ -230,3 +418,204 @@ test("formatWorkflowCause()", (t) => {
|
|||||||
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
||||||
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
|
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("patternIsSuperset()", (t) => {
|
||||||
|
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("*", "*"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("*", "main-*"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("main-*", "*"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("main", "main"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("*", "feature/*"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("**", "feature/*"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("feature-*", "**"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**/d"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("a/**", "a/**/c"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("a/**/c", "a/main-**/c"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/d/**/c"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/c/**/c"));
|
||||||
|
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/d/**/c"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("a/**/c/d/**/c", "a/**/b/**/c"));
|
||||||
|
t.false(actionsutil.patternIsSuperset("a/main-**/c", "a/**/c"));
|
||||||
|
t.true(
|
||||||
|
actionsutil.patternIsSuperset(
|
||||||
|
"/robin/*/release/*",
|
||||||
|
"/robin/moose/release/goose"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
t.false(
|
||||||
|
actionsutil.patternIsSuperset(
|
||||||
|
"/robin/moose/release/goose",
|
||||||
|
"/robin/*/release/*"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when branches contain dots", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [4.1, master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [4.1, master]
|
||||||
|
`)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master, ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
`)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test";
|
||||||
|
|
||||||
|
const errors = actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead])
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test3";
|
||||||
|
|
||||||
|
const errors = actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on is missing", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
`)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() with a different on setup", (t) => {
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: "workflow_dispatch"
|
||||||
|
`)
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: [workflow_dispatch]
|
||||||
|
`)
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
`)
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
`)
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: ["push"]
|
||||||
|
`)
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|||||||
@@ -45,6 +45,13 @@ export function getRequiredEnvParam(paramName: string): string {
|
|||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getTemporaryDirectory(): string {
|
||||||
|
const value = process.env["CODEQL_ACTION_TEMP"];
|
||||||
|
return value !== undefined && value !== ""
|
||||||
|
? value
|
||||||
|
: getRequiredEnvParam("RUNNER_TEMP");
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ensures all required environment variables are set in the context of a local run.
|
* Ensures all required environment variables are set in the context of a local run.
|
||||||
*/
|
*/
|
||||||
@@ -111,7 +118,7 @@ interface WorkflowJob {
|
|||||||
}
|
}
|
||||||
|
|
||||||
interface WorkflowTrigger {
|
interface WorkflowTrigger {
|
||||||
branches?: string[];
|
branches?: string[] | string;
|
||||||
paths?: string[];
|
paths?: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -134,42 +141,83 @@ function isObject(o: unknown): o is object {
|
|||||||
return o !== null && typeof o === "object";
|
return o !== null && typeof o === "object";
|
||||||
}
|
}
|
||||||
|
|
||||||
enum MissingTriggers {
|
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
|
||||||
None = 0,
|
|
||||||
Push = 1,
|
function escapeRegExp(string) {
|
||||||
PullRequest = 2,
|
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
||||||
}
|
}
|
||||||
|
|
||||||
interface CodedError {
|
function patternToRegExp(value) {
|
||||||
|
return new RegExp(
|
||||||
|
`^${value
|
||||||
|
.toString()
|
||||||
|
.split(GLOB_PATTERN)
|
||||||
|
.reduce(function (arr, cur) {
|
||||||
|
if (cur === "**") {
|
||||||
|
arr.push(".*?");
|
||||||
|
} else if (cur === "*") {
|
||||||
|
arr.push("[^/]*?");
|
||||||
|
} else if (cur) {
|
||||||
|
arr.push(escapeRegExp(cur));
|
||||||
|
}
|
||||||
|
return arr;
|
||||||
|
}, [])
|
||||||
|
.join("")}$`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// this function should return true if patternA is a superset of patternB
|
||||||
|
// e.g: * is a superset of main-* but main-* is not a superset of *.
|
||||||
|
export function patternIsSuperset(patternA: string, patternB: string): boolean {
|
||||||
|
return patternToRegExp(patternA).test(patternB);
|
||||||
|
}
|
||||||
|
|
||||||
|
function branchesToArray(branches?: string | null | string[]): string[] | "**" {
|
||||||
|
if (typeof branches === "string") {
|
||||||
|
return [branches];
|
||||||
|
}
|
||||||
|
if (Array.isArray(branches)) {
|
||||||
|
if (branches.length === 0) {
|
||||||
|
return "**";
|
||||||
|
}
|
||||||
|
return branches;
|
||||||
|
}
|
||||||
|
return "**";
|
||||||
|
}
|
||||||
|
export interface CodedError {
|
||||||
message: string;
|
message: string;
|
||||||
code: string;
|
code: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
function toCodedErrors(errors: {
|
function toCodedErrors<T>(errors: T): Record<keyof T, CodedError> {
|
||||||
[key: string]: string;
|
|
||||||
}): { [key: string]: CodedError } {
|
|
||||||
return Object.entries(errors).reduce((acc, [key, value]) => {
|
return Object.entries(errors).reduce((acc, [key, value]) => {
|
||||||
acc[key] = { message: value, code: key };
|
acc[key] = { message: value, code: key };
|
||||||
return acc;
|
return acc;
|
||||||
}, {} as ReturnType<typeof toCodedErrors>);
|
}, {} as Record<keyof T, CodedError>);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// code to send back via status report
|
||||||
|
// message to add as a warning annotation to the run
|
||||||
export const WorkflowErrors = toCodedErrors({
|
export const WorkflowErrors = toCodedErrors({
|
||||||
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
MissingHooks: `Please specify on.push and on.pull_request hooks so that Code Scanning can compare pull requests against the state of the base branch.`,
|
|
||||||
MissingPullRequestHook: `Please specify an on.pull_request hook so that Code Scanning is run against pull requests.`,
|
|
||||||
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
||||||
});
|
});
|
||||||
|
|
||||||
export function validateWorkflow(doc: Workflow): CodedError[] {
|
export function getWorkflowErrors(doc: Workflow): CodedError[] {
|
||||||
const errors: CodedError[] = [];
|
const errors: CodedError[] = [];
|
||||||
|
|
||||||
// .jobs[key].steps[].run
|
const jobName = process.env.GITHUB_JOB;
|
||||||
for (const job of Object.values(doc?.jobs || {})) {
|
|
||||||
for (const step of job?.steps || []) {
|
if (jobName) {
|
||||||
|
const job = doc?.jobs?.[jobName];
|
||||||
|
|
||||||
|
const steps = job?.steps;
|
||||||
|
|
||||||
|
if (Array.isArray(steps)) {
|
||||||
|
for (const step of steps) {
|
||||||
// this was advice that we used to give in the README
|
// this was advice that we used to give in the README
|
||||||
// we actually want to run the analysis on the merge commit
|
// we actually want to run the analysis on the merge commit
|
||||||
// to produce results that are more inline with expectations
|
// to produce results that are more inline with expectations
|
||||||
@@ -177,40 +225,37 @@ export function validateWorkflow(doc: Workflow): CodedError[] {
|
|||||||
// and avoid some race conditions
|
// and avoid some race conditions
|
||||||
if (step?.run === "git checkout HEAD^2") {
|
if (step?.run === "git checkout HEAD^2") {
|
||||||
errors.push(WorkflowErrors.CheckoutWrongHead);
|
errors.push(WorkflowErrors.CheckoutWrongHead);
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let missing = MissingTriggers.None;
|
let missingPush = false;
|
||||||
|
|
||||||
if (doc.on === undefined) {
|
if (doc.on === undefined) {
|
||||||
missing = MissingTriggers.Push | MissingTriggers.PullRequest;
|
// this is not a valid config
|
||||||
} else if (typeof doc.on === "string") {
|
} else if (typeof doc.on === "string") {
|
||||||
switch (doc.on) {
|
if (doc.on === "pull_request") {
|
||||||
case "push":
|
missingPush = true;
|
||||||
missing = MissingTriggers.PullRequest;
|
|
||||||
break;
|
|
||||||
case "pull_request":
|
|
||||||
missing = MissingTriggers.Push;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
missing = MissingTriggers.Push | MissingTriggers.PullRequest;
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
} else if (Array.isArray(doc.on)) {
|
} else if (Array.isArray(doc.on)) {
|
||||||
if (!doc.on.includes("push")) {
|
const hasPush = doc.on.includes("push");
|
||||||
missing = missing | MissingTriggers.Push;
|
const hasPullRequest = doc.on.includes("pull_request");
|
||||||
}
|
if (hasPullRequest && !hasPush) {
|
||||||
if (!doc.on.includes("pull_request")) {
|
missingPush = true;
|
||||||
missing = missing | MissingTriggers.PullRequest;
|
|
||||||
}
|
}
|
||||||
} else if (isObject(doc.on)) {
|
} else if (isObject(doc.on)) {
|
||||||
if (!Object.prototype.hasOwnProperty.call(doc.on, "pull_request")) {
|
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
||||||
missing = missing | MissingTriggers.PullRequest;
|
const hasPullRequest = Object.prototype.hasOwnProperty.call(
|
||||||
|
doc.on,
|
||||||
|
"pull_request"
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!hasPush && hasPullRequest) {
|
||||||
|
missingPush = true;
|
||||||
}
|
}
|
||||||
if (!Object.prototype.hasOwnProperty.call(doc.on, "push")) {
|
if (hasPush && hasPullRequest) {
|
||||||
missing = missing | MissingTriggers.Push;
|
|
||||||
} else {
|
|
||||||
const paths = doc.on.push?.paths;
|
const paths = doc.on.push?.paths;
|
||||||
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
||||||
// if they didn't change any files
|
// if they didn't change any files
|
||||||
@@ -224,13 +269,18 @@ export function validateWorkflow(doc: Workflow): CodedError[] {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (doc.on.push) {
|
// if doc.on.pull_request is null that means 'all branches'
|
||||||
const push = doc.on.push.branches || [];
|
// if doc.on.pull_request is undefined that means 'off'
|
||||||
|
// we only want to check for mismatched branches if pull_request is on.
|
||||||
|
if (doc.on.pull_request !== undefined) {
|
||||||
|
const push = branchesToArray(doc.on.push?.branches);
|
||||||
|
|
||||||
if (doc.on.pull_request) {
|
if (push !== "**") {
|
||||||
const pull_request = doc.on.pull_request.branches || [];
|
const pull_request = branchesToArray(doc.on.pull_request?.branches);
|
||||||
|
|
||||||
|
if (pull_request !== "**") {
|
||||||
const difference = pull_request.filter(
|
const difference = pull_request.filter(
|
||||||
(value) => !push.includes(value)
|
(value) => !push.some((o) => patternIsSuperset(o, value))
|
||||||
);
|
);
|
||||||
if (difference.length > 0) {
|
if (difference.length > 0) {
|
||||||
// there are branches in pull_request that may not have a baseline
|
// there are branches in pull_request that may not have a baseline
|
||||||
@@ -244,30 +294,40 @@ export function validateWorkflow(doc: Workflow): CodedError[] {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
switch (missing) {
|
if (missingPush) {
|
||||||
case MissingTriggers.PullRequest | MissingTriggers.Push:
|
|
||||||
errors.push(WorkflowErrors.MissingHooks);
|
|
||||||
break;
|
|
||||||
case MissingTriggers.PullRequest:
|
|
||||||
errors.push(WorkflowErrors.MissingPullRequestHook);
|
|
||||||
break;
|
|
||||||
case MissingTriggers.Push:
|
|
||||||
errors.push(WorkflowErrors.MissingPushHook);
|
errors.push(WorkflowErrors.MissingPushHook);
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return errors;
|
return errors;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getWorkflowErrors(): Promise<CodedError[]> {
|
export async function validateWorkflow(): Promise<undefined | string> {
|
||||||
const workflow = await getWorkflow();
|
let workflow: Workflow;
|
||||||
|
try {
|
||||||
if (workflow === undefined) {
|
workflow = await getWorkflow();
|
||||||
return [];
|
} catch (e) {
|
||||||
|
return `error: getWorkflow() failed: ${e.toString()}`;
|
||||||
|
}
|
||||||
|
let workflowErrors: CodedError[];
|
||||||
|
try {
|
||||||
|
workflowErrors = getWorkflowErrors(workflow);
|
||||||
|
} catch (e) {
|
||||||
|
return `error: getWorkflowErrors() failed: ${e.toString()}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
return validateWorkflow(workflow);
|
if (workflowErrors.length > 0) {
|
||||||
|
let message: string;
|
||||||
|
try {
|
||||||
|
message = formatWorkflowErrors(workflowErrors);
|
||||||
|
} catch (e) {
|
||||||
|
return `error: formatWorkflowErrors() failed: ${e.toString()}`;
|
||||||
|
}
|
||||||
|
core.warning(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
return formatWorkflowCause(workflowErrors);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function formatWorkflowErrors(errors: CodedError[]): string {
|
export function formatWorkflowErrors(errors: CodedError[]): string {
|
||||||
@@ -285,19 +345,14 @@ export function formatWorkflowCause(errors: CodedError[]): undefined | string {
|
|||||||
return errors.map((e) => e.code).join(",");
|
return errors.map((e) => e.code).join(",");
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getWorkflow(): Promise<Workflow | undefined> {
|
export async function getWorkflow(): Promise<Workflow> {
|
||||||
const relativePath = await getWorkflowPath();
|
const relativePath = await getWorkflowPath();
|
||||||
const absolutePath = path.join(
|
const absolutePath = path.join(
|
||||||
getRequiredEnvParam("GITHUB_WORKSPACE"),
|
getRequiredEnvParam("GITHUB_WORKSPACE"),
|
||||||
relativePath
|
relativePath
|
||||||
);
|
);
|
||||||
|
|
||||||
try {
|
|
||||||
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
|
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
|
||||||
} catch (e) {
|
|
||||||
core.warning(`Could not read workflow: ${e.toString()}`);
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -560,11 +615,11 @@ export async function sendStatusReport<S extends StatusReportBase>(
|
|||||||
// this means that this action version is no longer compatible with the API
|
// this means that this action version is no longer compatible with the API
|
||||||
// we still want to continue as it is likely the analysis endpoint will work
|
// we still want to continue as it is likely the analysis endpoint will work
|
||||||
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) {
|
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) {
|
||||||
core.warning(
|
core.debug(
|
||||||
"CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action."
|
"CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action."
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
core.warning(
|
core.debug(
|
||||||
"CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action."
|
"CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action."
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ test("emptyPaths", async (t) => {
|
|||||||
tempDir: tmpDir,
|
tempDir: tmpDir,
|
||||||
toolCacheDir: tmpDir,
|
toolCacheDir: tmpDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM } as util.GitHubVersion,
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||||
@@ -38,6 +39,7 @@ test("nonEmptyPaths", async (t) => {
|
|||||||
tempDir: tmpDir,
|
tempDir: tmpDir,
|
||||||
toolCacheDir: tmpDir,
|
toolCacheDir: tmpDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM } as util.GitHubVersion,
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||||
@@ -61,6 +63,7 @@ test("exclude temp dir", async (t) => {
|
|||||||
tempDir,
|
tempDir,
|
||||||
toolCacheDir,
|
toolCacheDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM } as util.GitHubVersion,
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||||
|
|||||||
@@ -1,16 +1,23 @@
|
|||||||
|
import * as fs from "fs";
|
||||||
|
import * as path from "path";
|
||||||
|
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
import {
|
import {
|
||||||
AnalysisStatusReport,
|
|
||||||
runAnalyze,
|
runAnalyze,
|
||||||
CodeQLAnalysisError,
|
CodeQLAnalysisError,
|
||||||
|
QueriesStatusReport,
|
||||||
} from "./analyze";
|
} from "./analyze";
|
||||||
import { getConfig } from "./config-utils";
|
import { Config, getConfig } from "./config-utils";
|
||||||
import { getActionsLogger } from "./logging";
|
import { getActionsLogger } from "./logging";
|
||||||
import { parseRepositoryNwo } from "./repository";
|
import * as upload_lib from "./upload-lib";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
|
|
||||||
|
interface AnalysisStatusReport
|
||||||
|
extends upload_lib.UploadStatusReport,
|
||||||
|
QueriesStatusReport {}
|
||||||
|
|
||||||
interface FinishStatusReport
|
interface FinishStatusReport
|
||||||
extends actionsUtil.StatusReportBase,
|
extends actionsUtil.StatusReportBase,
|
||||||
AnalysisStatusReport {}
|
AnalysisStatusReport {}
|
||||||
@@ -41,6 +48,7 @@ async function sendStatusReport(
|
|||||||
async function run() {
|
async function run() {
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
let stats: AnalysisStatusReport | undefined = undefined;
|
let stats: AnalysisStatusReport | undefined = undefined;
|
||||||
|
let config: Config | undefined = undefined;
|
||||||
try {
|
try {
|
||||||
actionsUtil.prepareLocalRunEnvironment();
|
actionsUtil.prepareLocalRunEnvironment();
|
||||||
if (
|
if (
|
||||||
@@ -55,10 +63,7 @@ async function run() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const logger = getActionsLogger();
|
const logger = getActionsLogger();
|
||||||
const config = await getConfig(
|
config = await getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
||||||
actionsUtil.getRequiredEnvParam("RUNNER_TEMP"),
|
|
||||||
logger
|
|
||||||
);
|
|
||||||
if (config === undefined) {
|
if (config === undefined) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
"Config file could not be found at expected location. Has the 'init' action been called?"
|
"Config file could not be found at expected location. Has the 'init' action been called?"
|
||||||
@@ -68,25 +73,28 @@ async function run() {
|
|||||||
auth: actionsUtil.getRequiredInput("token"),
|
auth: actionsUtil.getRequiredInput("token"),
|
||||||
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
stats = await runAnalyze(
|
const outputDir = actionsUtil.getRequiredInput("output");
|
||||||
parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")),
|
const queriesStats = await runAnalyze(
|
||||||
await actionsUtil.getCommitOid(),
|
outputDir,
|
||||||
await actionsUtil.getRef(),
|
|
||||||
await actionsUtil.getAnalysisKey(),
|
|
||||||
actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
|
||||||
actionsUtil.getWorkflowRunID(),
|
|
||||||
actionsUtil.getRequiredInput("checkout_path"),
|
|
||||||
actionsUtil.getRequiredInput("matrix"),
|
|
||||||
apiDetails,
|
|
||||||
actionsUtil.getRequiredInput("upload") === "true",
|
|
||||||
"actions",
|
|
||||||
actionsUtil.getRequiredInput("output"),
|
|
||||||
util.getMemoryFlag(actionsUtil.getOptionalInput("ram")),
|
util.getMemoryFlag(actionsUtil.getOptionalInput("ram")),
|
||||||
util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")),
|
util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")),
|
||||||
util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger),
|
util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger),
|
||||||
config,
|
config,
|
||||||
logger
|
logger
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (actionsUtil.getRequiredInput("upload") === "true") {
|
||||||
|
const uploadStats = await upload_lib.uploadFromActions(
|
||||||
|
outputDir,
|
||||||
|
config.gitHubVersion,
|
||||||
|
apiDetails,
|
||||||
|
logger
|
||||||
|
);
|
||||||
|
stats = { ...queriesStats, ...uploadStats };
|
||||||
|
} else {
|
||||||
|
logger.info("Not uploading results");
|
||||||
|
stats = { ...queriesStats };
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
console.log(error);
|
console.log(error);
|
||||||
@@ -97,6 +105,35 @@ async function run() {
|
|||||||
|
|
||||||
await sendStatusReport(startedAt, stats, error);
|
await sendStatusReport(startedAt, stats, error);
|
||||||
return;
|
return;
|
||||||
|
} finally {
|
||||||
|
if (core.isDebug() && config !== undefined) {
|
||||||
|
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
||||||
|
for (const language of config.languages) {
|
||||||
|
const databaseDirectory = util.getCodeQLDatabasePath(
|
||||||
|
config.tempDir,
|
||||||
|
language
|
||||||
|
);
|
||||||
|
const logsDirectory = path.join(databaseDirectory, "log");
|
||||||
|
|
||||||
|
const walkLogFiles = (dir: string) => {
|
||||||
|
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.isFile()) {
|
||||||
|
core.startGroup(
|
||||||
|
`CodeQL Debug Logs - ${language} - ${entry.name}`
|
||||||
|
);
|
||||||
|
process.stdout.write(
|
||||||
|
fs.readFileSync(path.resolve(dir, entry.name))
|
||||||
|
);
|
||||||
|
core.endGroup();
|
||||||
|
} else if (entry.isDirectory()) {
|
||||||
|
walkLogFiles(path.resolve(dir, entry.name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
walkLogFiles(logsDirectory);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await sendStatusReport(startedAt, stats);
|
await sendStatusReport(startedAt, stats);
|
||||||
|
|||||||
@@ -34,6 +34,9 @@ test("status report fields", async (t) => {
|
|||||||
tempDir: tmpDir,
|
tempDir: tmpDir,
|
||||||
toolCacheDir: tmpDir,
|
toolCacheDir: tmpDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
|
gitHubVersion: {
|
||||||
|
type: util.GitHubVariant.DOTCOM,
|
||||||
|
} as util.GitHubVersion,
|
||||||
};
|
};
|
||||||
fs.mkdirSync(util.getCodeQLDatabasePath(config.tempDir, language), {
|
fs.mkdirSync(util.getCodeQLDatabasePath(config.tempDir, language), {
|
||||||
recursive: true,
|
recursive: true,
|
||||||
|
|||||||
@@ -4,14 +4,11 @@ import * as path from "path";
|
|||||||
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||||
|
|
||||||
import * as analysisPaths from "./analysis-paths";
|
import * as analysisPaths from "./analysis-paths";
|
||||||
import { GitHubApiDetails } from "./api-client";
|
|
||||||
import { getCodeQL } from "./codeql";
|
import { getCodeQL } from "./codeql";
|
||||||
import * as configUtils from "./config-utils";
|
import * as configUtils from "./config-utils";
|
||||||
import { isScannedLanguage, Language } from "./languages";
|
import { isScannedLanguage, Language } from "./languages";
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import { RepositoryNwo } from "./repository";
|
|
||||||
import * as sharedEnv from "./shared-environment";
|
import * as sharedEnv from "./shared-environment";
|
||||||
import * as upload_lib from "./upload-lib";
|
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
|
|
||||||
export class CodeQLAnalysisError extends Error {
|
export class CodeQLAnalysisError extends Error {
|
||||||
@@ -54,10 +51,6 @@ export interface QueriesStatusReport {
|
|||||||
analyze_failure_language?: string;
|
analyze_failure_language?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AnalysisStatusReport
|
|
||||||
extends upload_lib.UploadStatusReport,
|
|
||||||
QueriesStatusReport {}
|
|
||||||
|
|
||||||
async function setupPythonExtractor(logger: Logger) {
|
async function setupPythonExtractor(logger: Logger) {
|
||||||
const codeqlPython = process.env["CODEQL_PYTHON"];
|
const codeqlPython = process.env["CODEQL_PYTHON"];
|
||||||
if (codeqlPython === undefined || codeqlPython.length === 0) {
|
if (codeqlPython === undefined || codeqlPython.length === 0) {
|
||||||
@@ -217,24 +210,13 @@ export async function runQueries(
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function runAnalyze(
|
export async function runAnalyze(
|
||||||
repositoryNwo: RepositoryNwo,
|
|
||||||
commitOid: string,
|
|
||||||
ref: string,
|
|
||||||
analysisKey: string | undefined,
|
|
||||||
analysisName: string | undefined,
|
|
||||||
workflowRunID: number | undefined,
|
|
||||||
checkoutPath: string,
|
|
||||||
environment: string | undefined,
|
|
||||||
apiDetails: GitHubApiDetails,
|
|
||||||
doUpload: boolean,
|
|
||||||
mode: util.Mode,
|
|
||||||
outputDir: string,
|
outputDir: string,
|
||||||
memoryFlag: string,
|
memoryFlag: string,
|
||||||
addSnippetsFlag: string,
|
addSnippetsFlag: string,
|
||||||
threadsFlag: string,
|
threadsFlag: string,
|
||||||
config: configUtils.Config,
|
config: configUtils.Config,
|
||||||
logger: Logger
|
logger: Logger
|
||||||
): Promise<AnalysisStatusReport> {
|
): Promise<QueriesStatusReport> {
|
||||||
// Delete the tracer config env var to avoid tracing ourselves
|
// Delete the tracer config env var to avoid tracing ourselves
|
||||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||||
|
|
||||||
@@ -253,25 +235,5 @@ export async function runAnalyze(
|
|||||||
logger
|
logger
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!doUpload) {
|
|
||||||
logger.info("Not uploading results");
|
|
||||||
return { ...queriesStats };
|
return { ...queriesStats };
|
||||||
}
|
|
||||||
|
|
||||||
const uploadStats = await upload_lib.upload(
|
|
||||||
outputDir,
|
|
||||||
repositoryNwo,
|
|
||||||
commitOid,
|
|
||||||
ref,
|
|
||||||
analysisKey,
|
|
||||||
analysisName,
|
|
||||||
workflowRunID,
|
|
||||||
checkoutPath,
|
|
||||||
environment,
|
|
||||||
apiDetails,
|
|
||||||
mode,
|
|
||||||
logger
|
|
||||||
);
|
|
||||||
|
|
||||||
return { ...queriesStats, ...uploadStats };
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,19 +0,0 @@
|
|||||||
import test from "ava";
|
|
||||||
|
|
||||||
import { apiVersionInRange, DisallowedAPIVersionReason } from "./api-client";
|
|
||||||
|
|
||||||
test("allowed API versions", async (t) => {
|
|
||||||
t.is(apiVersionInRange("1.33.0", "1.33", "2.0"), undefined);
|
|
||||||
t.is(apiVersionInRange("1.33.1", "1.33", "2.0"), undefined);
|
|
||||||
t.is(apiVersionInRange("1.34.0", "1.33", "2.0"), undefined);
|
|
||||||
t.is(apiVersionInRange("2.0.0", "1.33", "2.0"), undefined);
|
|
||||||
t.is(apiVersionInRange("2.0.1", "1.33", "2.0"), undefined);
|
|
||||||
t.is(
|
|
||||||
apiVersionInRange("1.32.0", "1.33", "2.0"),
|
|
||||||
DisallowedAPIVersionReason.ACTION_TOO_NEW
|
|
||||||
);
|
|
||||||
t.is(
|
|
||||||
apiVersionInRange("2.1.0", "1.33", "2.0"),
|
|
||||||
DisallowedAPIVersionReason.ACTION_TOO_OLD
|
|
||||||
);
|
|
||||||
});
|
|
||||||
@@ -1,87 +1,37 @@
|
|||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import { exportVariable } from "@actions/core";
|
|
||||||
import * as githubUtils from "@actions/github/lib/utils";
|
import * as githubUtils from "@actions/github/lib/utils";
|
||||||
import * as retry from "@octokit/plugin-retry";
|
|
||||||
import { OctokitResponse } from "@octokit/types";
|
|
||||||
import consoleLogLevel from "console-log-level";
|
import consoleLogLevel from "console-log-level";
|
||||||
import * as semver from "semver";
|
|
||||||
|
|
||||||
import { getRequiredEnvParam, getRequiredInput } from "./actions-util";
|
import { getRequiredEnvParam, getRequiredInput } from "./actions-util";
|
||||||
import * as apiCompatibility from "./api-compatibility.json";
|
import { isLocalRun } from "./util";
|
||||||
import { Logger, getActionsLogger } from "./logging";
|
|
||||||
import { isLocalRun, Mode } from "./util";
|
|
||||||
|
|
||||||
export enum DisallowedAPIVersionReason {
|
export enum DisallowedAPIVersionReason {
|
||||||
ACTION_TOO_OLD,
|
ACTION_TOO_OLD,
|
||||||
ACTION_TOO_NEW,
|
ACTION_TOO_NEW,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type GitHubApiCombinedDetails = GitHubApiDetails &
|
||||||
|
GitHubApiExternalRepoDetails;
|
||||||
|
|
||||||
export interface GitHubApiDetails {
|
export interface GitHubApiDetails {
|
||||||
auth: string;
|
auth: string;
|
||||||
url: string;
|
url: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
const GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
export interface GitHubApiExternalRepoDetails {
|
||||||
const CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR =
|
externalRepoAuth?: string;
|
||||||
"CODEQL_ACTION_WARNED_ABOUT_VERSION";
|
url: string;
|
||||||
let hasBeenWarnedAboutVersion = false;
|
}
|
||||||
|
|
||||||
export const getApiClient = function (
|
export const getApiClient = function (
|
||||||
apiDetails: GitHubApiDetails,
|
apiDetails: GitHubApiDetails,
|
||||||
mode: Mode,
|
allowLocalRun = false
|
||||||
logger: Logger,
|
|
||||||
allowLocalRun = false,
|
|
||||||
possibleFailureExpected = false
|
|
||||||
) {
|
) {
|
||||||
if (isLocalRun() && !allowLocalRun) {
|
if (isLocalRun() && !allowLocalRun) {
|
||||||
throw new Error("Invalid API call in local run");
|
throw new Error("Invalid API call in local run");
|
||||||
}
|
}
|
||||||
const customOctokit = githubUtils.GitHub.plugin(retry.retry, (octokit, _) => {
|
return new githubUtils.GitHub(
|
||||||
octokit.hook.after("request", (response: OctokitResponse<any>, __) => {
|
|
||||||
if (response.status < 400 && !possibleFailureExpected) {
|
|
||||||
if (hasBeenWarnedAboutVersion) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === undefined ||
|
|
||||||
process.env[CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR] === undefined
|
|
||||||
) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const installedVersion = response.headers[
|
|
||||||
GITHUB_ENTERPRISE_VERSION_HEADER
|
|
||||||
] as string;
|
|
||||||
const disallowedAPIVersionReason = apiVersionInRange(
|
|
||||||
installedVersion,
|
|
||||||
apiCompatibility.minimumVersion,
|
|
||||||
apiCompatibility.maximumVersion
|
|
||||||
);
|
|
||||||
|
|
||||||
const toolName = mode === "actions" ? "Action" : "Runner";
|
|
||||||
|
|
||||||
if (
|
|
||||||
disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_OLD
|
|
||||||
) {
|
|
||||||
logger.warning(
|
|
||||||
`The CodeQL ${toolName} version you are using is too old to be compatible with GitHub Enterprise ${installedVersion}. If you experience issues, please upgrade to a more recent version of the CodeQL ${toolName}.`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_NEW
|
|
||||||
) {
|
|
||||||
logger.warning(
|
|
||||||
`GitHub Enterprise ${installedVersion} is too old to be compatible with this version of the CodeQL ${toolName}. If you experience issues, please upgrade to a more recent version of GitHub Enterprise or use an older version of the CodeQL ${toolName}.`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
hasBeenWarnedAboutVersion = true;
|
|
||||||
if (mode === "actions") {
|
|
||||||
exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
return new customOctokit(
|
|
||||||
githubUtils.getOctokitOptions(apiDetails.auth, {
|
githubUtils.getOctokitOptions(apiDetails.auth, {
|
||||||
baseUrl: getApiUrl(apiDetails.url),
|
baseUrl: getApiUrl(apiDetails.url),
|
||||||
userAgent: "CodeQL Action",
|
userAgent: "CodeQL Action",
|
||||||
@@ -93,7 +43,7 @@ export const getApiClient = function (
|
|||||||
function getApiUrl(githubUrl: string): string {
|
function getApiUrl(githubUrl: string): string {
|
||||||
const url = new URL(githubUrl);
|
const url = new URL(githubUrl);
|
||||||
|
|
||||||
// If we detect this is trying to be to github.com
|
// If we detect this is trying to connect to github.com
|
||||||
// then return with a fixed canonical URL.
|
// then return with a fixed canonical URL.
|
||||||
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
|
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
|
||||||
return "https://api.github.com";
|
return "https://api.github.com";
|
||||||
@@ -113,19 +63,5 @@ export function getActionsApiClient(allowLocalRun = false) {
|
|||||||
url: getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
|
|
||||||
return getApiClient(apiDetails, "actions", getActionsLogger(), allowLocalRun);
|
return getApiClient(apiDetails, allowLocalRun);
|
||||||
}
|
|
||||||
|
|
||||||
export function apiVersionInRange(
|
|
||||||
version: string,
|
|
||||||
minimumVersion: string,
|
|
||||||
maximumVersion: string
|
|
||||||
): DisallowedAPIVersionReason | undefined {
|
|
||||||
if (!semver.satisfies(version, `>=${minimumVersion}`)) {
|
|
||||||
return DisallowedAPIVersionReason.ACTION_TOO_NEW;
|
|
||||||
}
|
|
||||||
if (!semver.satisfies(version, `<=${maximumVersion}`)) {
|
|
||||||
return DisallowedAPIVersionReason.ACTION_TOO_OLD;
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user