mirror of
https://github.com/github/codeql-action.git
synced 2025-12-13 02:59:59 +08:00
Compare commits
193 Commits
hackathon-
...
update-v1-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c87ee1c65a | ||
|
|
6aebd1b98a | ||
|
|
0c2281fb06 | ||
|
|
fcf0863613 | ||
|
|
534192fa05 | ||
|
|
64b50fa2a6 | ||
|
|
51b1d7d81f | ||
|
|
f9a19da7bf | ||
|
|
7f9fb10a74 | ||
|
|
2f9f143d73 | ||
|
|
356d7a0637 | ||
|
|
def266fc62 | ||
|
|
5c715f3945 | ||
|
|
d0b1259bbe | ||
|
|
8f0d3f7541 | ||
|
|
ca27066d09 | ||
|
|
2f93805cef | ||
|
|
d4edded3ea | ||
|
|
bf8daada40 | ||
|
|
996a90bf48 | ||
|
|
d8216decae | ||
|
|
1d93ad95c1 | ||
|
|
bb012c4070 | ||
|
|
ba14abbca7 | ||
|
|
972dc3e3f9 | ||
|
|
9165099103 | ||
|
|
36a9516acc | ||
|
|
ef92c5ac5f | ||
|
|
5d467d014b | ||
|
|
f8e31274f4 | ||
|
|
e700075082 | ||
|
|
d2f4021928 | ||
|
|
c4fced7348 | ||
|
|
08fae3caba | ||
|
|
ffd96b38fb | ||
|
|
0f834639e4 | ||
|
|
5004a54ed3 | ||
|
|
8373707722 | ||
|
|
378f30f95d | ||
|
|
d698cb3d2b | ||
|
|
09024e50d4 | ||
|
|
daff33213e | ||
|
|
4c3c9b0d41 | ||
|
|
4df078eec5 | ||
|
|
cfec2bbc35 | ||
|
|
18f6367c46 | ||
|
|
2fcc4eb030 | ||
|
|
27ed6ac804 | ||
|
|
c78d81fa3e | ||
|
|
bcca43b391 | ||
|
|
6ddfab14f6 | ||
|
|
039625a3ce | ||
|
|
ce6b93eb0a | ||
|
|
bb51ece0b4 | ||
|
|
fb01860db6 | ||
|
|
2f4f48f767 | ||
|
|
0ff9c449b7 | ||
|
|
bde489c632 | ||
|
|
324d987bc6 | ||
|
|
964ae01287 | ||
|
|
fdf2494cfa | ||
|
|
7f1d7b8bda | ||
|
|
760681b052 | ||
|
|
bcd7b2de1d | ||
|
|
fd0ad84431 | ||
|
|
41b73e168c | ||
|
|
a2653534db | ||
|
|
f84e342ff3 | ||
|
|
2b1c88c014 | ||
|
|
0ab754c698 | ||
|
|
4c94e29f1b | ||
|
|
58defc0652 | ||
|
|
88714e3a60 | ||
|
|
3c63623824 | ||
|
|
1d92248672 | ||
|
|
c6390afb59 | ||
|
|
f2896eb08a | ||
|
|
f8c5dacab5 | ||
|
|
49cb962d82 | ||
|
|
04d2b0018e | ||
|
|
3568e4afcc | ||
|
|
7b72f1c330 | ||
|
|
6452109691 | ||
|
|
c9ca4ec1bd | ||
|
|
0656b2c1ad | ||
|
|
781e3bc540 | ||
|
|
10a2f1b1aa | ||
|
|
c4084e1c1a | ||
|
|
f3f5dfd3df | ||
|
|
169b9f804b | ||
|
|
3d8b1cb7ea | ||
|
|
8fef3928ba | ||
|
|
db540f07f0 | ||
|
|
80a8f97b9c | ||
|
|
1f07e287da | ||
|
|
ed751ece83 | ||
|
|
6408d72268 | ||
|
|
44ed1c6ce1 | ||
|
|
bfa9dfe827 | ||
|
|
19fe854945 | ||
|
|
c2377b2e49 | ||
|
|
74004631ca | ||
|
|
07943dcc5d | ||
|
|
2c62543901 | ||
|
|
18f9eb6b55 | ||
|
|
0dc37c7260 | ||
|
|
f109c77463 | ||
|
|
601dc8486f | ||
|
|
d182a0e3aa | ||
|
|
5261491807 | ||
|
|
24872f608c | ||
|
|
94b32884f9 | ||
|
|
8705aaff32 | ||
|
|
219142571c | ||
|
|
7a340d32a1 | ||
|
|
ee4d06713e | ||
|
|
6be1f5ce0e | ||
|
|
8a9922df92 | ||
|
|
795b1923ec | ||
|
|
28e2860afb | ||
|
|
4547749a2f | ||
|
|
484a9ad67e | ||
|
|
1013277382 | ||
|
|
504cb5e7a2 | ||
|
|
cfdf2eaf7a | ||
|
|
a1bfa7609f | ||
|
|
a3a8231e64 | ||
|
|
d0ac97e33f | ||
|
|
cb574a7d60 | ||
|
|
b0adc415a0 | ||
|
|
946779f5b6 | ||
|
|
9a753aa409 | ||
|
|
2a6d6c52d7 | ||
|
|
8659fb33f9 | ||
|
|
137e614f23 | ||
|
|
4bdcd08344 | ||
|
|
b6fc7138bf | ||
|
|
1a6f6a27b3 | ||
|
|
f86e200d13 | ||
|
|
c8abbce0a2 | ||
|
|
3d63fa4dad | ||
|
|
0853901c0d | ||
|
|
369cad8272 | ||
|
|
9ace6974f2 | ||
|
|
884ee1d129 | ||
|
|
a7f3c648eb | ||
|
|
087e7a3a1a | ||
|
|
97a70e6013 | ||
|
|
90d1a31dd4 | ||
|
|
70733e4ae5 | ||
|
|
a432f684f7 | ||
|
|
2f9814894f | ||
|
|
c796788c33 | ||
|
|
cd2eafc8e3 | ||
|
|
5a03a14bfb | ||
|
|
dbd8007298 | ||
|
|
a0c4707dcc | ||
|
|
c7275a75ce | ||
|
|
023add5df0 | ||
|
|
4e46a490ae | ||
|
|
54e0c67332 | ||
|
|
4bc186cf34 | ||
|
|
1da4ce5a03 | ||
|
|
20d8f91819 | ||
|
|
3792ed8ceb | ||
|
|
b1e0b46970 | ||
|
|
034bf318b8 | ||
|
|
bd4e3adfd9 | ||
|
|
230cb9b734 | ||
|
|
456cd431ff | ||
|
|
1511db33b3 | ||
|
|
55eae6652f | ||
|
|
094554cf89 | ||
|
|
3c494fdd7a | ||
|
|
6de1b753c2 | ||
|
|
45dd5ee97d | ||
|
|
82a8fa443e | ||
|
|
e89a24b8cb | ||
|
|
dc999c55d0 | ||
|
|
2d00e8c6f7 | ||
|
|
9f7bdecc04 | ||
|
|
cea5932aad | ||
|
|
cbd120ea91 | ||
|
|
71c7759fac | ||
|
|
9435055597 | ||
|
|
5d77983efc | ||
|
|
1fd28a0d4c | ||
|
|
46c74bba1d | ||
|
|
3e176f8293 | ||
|
|
ff28c8d403 | ||
|
|
9532bda6e4 | ||
|
|
57514f31db | ||
|
|
7ae9b0db35 |
@@ -44,7 +44,6 @@
|
|||||||
"@typescript-eslint/no-unsafe-call": "off",
|
"@typescript-eslint/no-unsafe-call": "off",
|
||||||
"@typescript-eslint/no-unsafe-member-access": "off",
|
"@typescript-eslint/no-unsafe-member-access": "off",
|
||||||
"@typescript-eslint/no-unsafe-return": "off",
|
"@typescript-eslint/no-unsafe-return": "off",
|
||||||
"@typescript-eslint/no-unused-vars": "off",
|
|
||||||
"@typescript-eslint/no-var-requires": "off",
|
"@typescript-eslint/no-var-requires": "off",
|
||||||
"@typescript-eslint/prefer-regexp-exec": "off",
|
"@typescript-eslint/prefer-regexp-exec": "off",
|
||||||
"@typescript-eslint/require-await": "off",
|
"@typescript-eslint/require-await": "off",
|
||||||
|
|||||||
14
.github/update-release-branch.py
vendored
14
.github/update-release-branch.py
vendored
@@ -35,7 +35,7 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
|
|||||||
commits_without_pull_requests = []
|
commits_without_pull_requests = []
|
||||||
for commit in all_commits:
|
for commit in all_commits:
|
||||||
pr = get_pr_for_commit(repo, commit)
|
pr = get_pr_for_commit(repo, commit)
|
||||||
|
|
||||||
if pr is None:
|
if pr is None:
|
||||||
commits_without_pull_requests.append(commit)
|
commits_without_pull_requests.append(commit)
|
||||||
elif not any(p for p in pull_requests if p.number == pr.number):
|
elif not any(p for p in pull_requests if p.number == pr.number):
|
||||||
@@ -47,7 +47,7 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
|
|||||||
# Sort PRs and commits by age
|
# Sort PRs and commits by age
|
||||||
pull_requests = sorted(pull_requests, key=lambda pr: pr.number)
|
pull_requests = sorted(pull_requests, key=lambda pr: pr.number)
|
||||||
commits_without_pull_requests = sorted(commits_without_pull_requests, key=lambda c: c.commit.author.date)
|
commits_without_pull_requests = sorted(commits_without_pull_requests, key=lambda c: c.commit.author.date)
|
||||||
|
|
||||||
# Start constructing the body text
|
# Start constructing the body text
|
||||||
body = 'Merging ' + short_main_sha + ' into ' + LATEST_RELEASE_BRANCH
|
body = 'Merging ' + short_main_sha + ' into ' + LATEST_RELEASE_BRANCH
|
||||||
|
|
||||||
@@ -62,7 +62,7 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
|
|||||||
body += '\n- #' + str(pr.number)
|
body += '\n- #' + str(pr.number)
|
||||||
body += ' - ' + pr.title
|
body += ' - ' + pr.title
|
||||||
body += ' (@' + merger + ')'
|
body += ' (@' + merger + ')'
|
||||||
|
|
||||||
# List all commits not part of a PR
|
# List all commits not part of a PR
|
||||||
if len(commits_without_pull_requests) > 0:
|
if len(commits_without_pull_requests) > 0:
|
||||||
body += '\n\nContains the following commits not from a pull request:'
|
body += '\n\nContains the following commits not from a pull request:'
|
||||||
@@ -86,7 +86,7 @@ def get_conductor(repo, pull_requests, other_commits):
|
|||||||
# If there are any PRs then use whoever merged the last one
|
# If there are any PRs then use whoever merged the last one
|
||||||
if len(pull_requests) > 0:
|
if len(pull_requests) > 0:
|
||||||
return get_merger_of_pr(repo, pull_requests[-1])
|
return get_merger_of_pr(repo, pull_requests[-1])
|
||||||
|
|
||||||
# Otherwise take the author of the latest commit
|
# Otherwise take the author of the latest commit
|
||||||
return other_commits[-1].author.login
|
return other_commits[-1].author.login
|
||||||
|
|
||||||
@@ -95,7 +95,7 @@ def get_conductor(repo, pull_requests, other_commits):
|
|||||||
# This will not include any commits that exist on the release branch
|
# This will not include any commits that exist on the release branch
|
||||||
# that aren't on main.
|
# that aren't on main.
|
||||||
def get_commit_difference(repo):
|
def get_commit_difference(repo):
|
||||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '...' + MAIN_BRANCH).strip().split('\n')
|
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '..' + MAIN_BRANCH).strip().split('\n')
|
||||||
|
|
||||||
# Convert to full-fledged commit objects
|
# Convert to full-fledged commit objects
|
||||||
commits = [repo.get_commit(c) for c in commits]
|
commits = [repo.get_commit(c) for c in commits]
|
||||||
@@ -119,7 +119,7 @@ def get_truncated_commit_message(commit):
|
|||||||
# Returns the PR object, or None if no PR could be found.
|
# Returns the PR object, or None if no PR could be found.
|
||||||
def get_pr_for_commit(repo, commit):
|
def get_pr_for_commit(repo, commit):
|
||||||
prs = commit.get_pulls()
|
prs = commit.get_pulls()
|
||||||
|
|
||||||
if prs.totalCount > 0:
|
if prs.totalCount > 0:
|
||||||
# In the case that there are multiple PRs, return the earliest one
|
# In the case that there are multiple PRs, return the earliest one
|
||||||
prs = list(prs)
|
prs = list(prs)
|
||||||
@@ -165,7 +165,7 @@ def main():
|
|||||||
if branch_exists_on_remote(new_branch_name):
|
if branch_exists_on_remote(new_branch_name):
|
||||||
print('Branch ' + new_branch_name + ' already exists. Nothing to do.')
|
print('Branch ' + new_branch_name + ' already exists. Nothing to do.')
|
||||||
return
|
return
|
||||||
|
|
||||||
# Create the new branch and push it to the remote
|
# Create the new branch and push it to the remote
|
||||||
print('Creating branch ' + new_branch_name)
|
print('Creating branch ' + new_branch_name)
|
||||||
run_git('checkout', '-b', new_branch_name, MAIN_BRANCH)
|
run_git('checkout', '-b', new_branch_name, MAIN_BRANCH)
|
||||||
|
|||||||
47
.github/workflows/codeql.yml
vendored
47
.github/workflows/codeql.yml
vendored
@@ -7,10 +7,56 @@ on:
|
|||||||
branches: [main, v1]
|
branches: [main, v1]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
# Identify the CodeQL tool versions to use in the analysis job.
|
||||||
|
check-codeql-versions:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
versions: ${{ steps.compare.outputs.versions }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Init with default CodeQL bundle from the VM image
|
||||||
|
id: init-default
|
||||||
|
uses: ./init
|
||||||
|
with:
|
||||||
|
languages: javascript
|
||||||
|
- name: Remove empty database
|
||||||
|
# allows us to run init a second time
|
||||||
|
run: |
|
||||||
|
rm -rf "$RUNNER_TEMP/codeql_databases"
|
||||||
|
- name: Init with latest CodeQL bundle
|
||||||
|
id: init-latest
|
||||||
|
uses: ./init
|
||||||
|
with:
|
||||||
|
tools: latest
|
||||||
|
languages: javascript
|
||||||
|
- name: Compare default and latest CodeQL bundle versions
|
||||||
|
id: compare
|
||||||
|
env:
|
||||||
|
CODEQL_DEFAULT: ${{ steps.init-default.outputs.codeql-path }}
|
||||||
|
CODEQL_LATEST: ${{ steps.init-latest.outputs.codeql-path }}
|
||||||
|
run: |
|
||||||
|
CODEQL_VERSION_DEFAULT="$("$CODEQL_DEFAULT" version --format terse)"
|
||||||
|
CODEQL_VERSION_LATEST="$("$CODEQL_LATEST" version --format terse)"
|
||||||
|
echo "Default CodeQL bundle version is $CODEQL_VERSION_DEFAULT"
|
||||||
|
echo "Latest CodeQL bundle version is $CODEQL_VERSION_LATEST"
|
||||||
|
if [[ "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then
|
||||||
|
# Just use `tools: null` to avoid duplication in the analysis job.
|
||||||
|
VERSIONS_JSON='[null]'
|
||||||
|
else
|
||||||
|
# Use both `tools: null` and `tools: latest` in the analysis job.
|
||||||
|
VERSIONS_JSON='[null, "latest"]'
|
||||||
|
fi
|
||||||
|
# Output a JSON-encoded list with the distinct versions to test against.
|
||||||
|
echo "Suggested matrix config for analysis job: $VERSIONS_JSON"
|
||||||
|
echo "::set-output name=versions::${VERSIONS_JSON}"
|
||||||
|
|
||||||
build:
|
build:
|
||||||
|
needs: [check-codeql-versions]
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest,windows-latest,macos-latest]
|
os: [ubuntu-latest,windows-latest,macos-latest]
|
||||||
|
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -20,6 +66,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
languages: javascript
|
languages: javascript
|
||||||
config-file: ./.github/codeql/codeql-config.yml
|
config-file: ./.github/codeql/codeql-config.yml
|
||||||
|
tools: ${{ matrix.tools }}
|
||||||
# confirm steps.init.outputs.codeql-path points to the codeql binary
|
# confirm steps.init.outputs.codeql-path points to the codeql binary
|
||||||
- name: Print CodeQL Version
|
- name: Print CodeQL Version
|
||||||
run: ${{steps.init.outputs.codeql-path}} version --format=json
|
run: ${{steps.init.outputs.codeql-path}} version --format=json
|
||||||
|
|||||||
507
.github/workflows/integration-testing.yml
vendored
507
.github/workflows/integration-testing.yml
vendored
@@ -1,507 +0,0 @@
|
|||||||
name: "Integration Testing"
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main, v1]
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
multi-language-repo_test-autodetect-languages:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
- uses: ./../action/init
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: ./build.sh
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
- run: |
|
|
||||||
cd "$RUNNER_TEMP/codeql_databases"
|
|
||||||
# List all directories as there will be precisely one directory per database
|
|
||||||
# but there may be other files in this directory such as query suites.
|
|
||||||
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
|
||||||
[[ ! -d cpp ]] || \
|
|
||||||
[[ ! -d csharp ]] || \
|
|
||||||
[[ ! -d go ]] || \
|
|
||||||
[[ ! -d java ]] || \
|
|
||||||
[[ ! -d javascript ]] || \
|
|
||||||
[[ ! -d python ]]; then
|
|
||||||
echo "Did not find expected number of databases. Database dir contains: $(ls)"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
multi-language-repo_test-custom-queries-and-remote-config:
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
|
||||||
tools: [~, latest]
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
tools: ${{ matrix.tools }}
|
|
||||||
languages: cpp,csharp,java,javascript,python
|
|
||||||
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: ./build.sh
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
# Currently is not possible to analyze Go in conjunction with other languages in macos
|
|
||||||
multi-language-repo_test-go-custom-queries:
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-go@v2
|
|
||||||
if: ${{ matrix.os == 'macos-latest' }}
|
|
||||||
with:
|
|
||||||
go-version: '^1.13.1'
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: go
|
|
||||||
config-file: ./.github/codeql/custom-queries.yml
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: ./build.sh
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
go-custom-tracing:
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
env:
|
|
||||||
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-go@v2
|
|
||||||
if: ${{ matrix.os == 'macos-latest' }}
|
|
||||||
with:
|
|
||||||
go-version: '^1.13.1'
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: go
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: go build main.go
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
go-custom-tracing-autobuild:
|
|
||||||
# No need to test Go autobuild on multiple OSes since
|
|
||||||
# we're testing Go custom tracing with a manual build on all OSes.
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: go
|
|
||||||
- uses: ./../action/autobuild
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
multi-language-repo_rubocop:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
- name: Set up Ruby
|
|
||||||
uses: ruby/setup-ruby@v1
|
|
||||||
with:
|
|
||||||
ruby-version: 2.6
|
|
||||||
- name: Install Code Scanning integration
|
|
||||||
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
|
||||||
- name: Install dependencies
|
|
||||||
run: bundle install
|
|
||||||
- name: Rubocop run
|
|
||||||
run: |
|
|
||||||
bash -c "
|
|
||||||
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
|
||||||
[[ $? -ne 2 ]]
|
|
||||||
"
|
|
||||||
- uses: ./../action/upload-sarif
|
|
||||||
with:
|
|
||||||
sarif_file: rubocop.sarif
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
test-proxy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: ubuntu:18.04
|
|
||||||
options: --dns 127.0.0.1
|
|
||||||
services:
|
|
||||||
squid-proxy:
|
|
||||||
image: datadog/squid:latest
|
|
||||||
ports:
|
|
||||||
- 3128:3128
|
|
||||||
env:
|
|
||||||
https_proxy: http://squid-proxy:3128
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: javascript
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-javascript-ubuntu:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
# Pass --config-file here, but not for other jobs in this workflow.
|
|
||||||
# This means we're testing the config file parsing in the runner
|
|
||||||
# but not slowing down all jobs unnecessarily as it doesn't add much
|
|
||||||
# testing the parsing on different operating systems and languages.
|
|
||||||
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-javascript-windows:
|
|
||||||
runs-on: windows-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-javascript-macos:
|
|
||||||
runs-on: macos-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-csharp-ubuntu:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
run: |
|
|
||||||
. ./codeql-runner/codeql-env.sh
|
|
||||||
dotnet build
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-csharp-windows:
|
|
||||||
runs-on: windows-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
shell: powershell
|
|
||||||
run: |
|
|
||||||
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
|
|
||||||
dotnet build
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-csharp-macos:
|
|
||||||
runs-on: macos-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
. ./codeql-runner/codeql-env.sh
|
|
||||||
dotnet build
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
|
|
||||||
runner-analyze-csharp-autobuild-ubuntu:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-linux autobuild
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-csharp-autobuild-windows:
|
|
||||||
runs-on: windows-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
shell: powershell
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-win.exe autobuild
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-csharp-autobuild-macos:
|
|
||||||
runs-on: macos-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos autobuild
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-upload-sarif:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Upload with runner
|
|
||||||
run: |
|
|
||||||
# Deliberately don't use TEST_MODE here. This is specifically testing
|
|
||||||
# the compatibility with the API.
|
|
||||||
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
640
.github/workflows/pr-checks.yml
vendored
640
.github/workflows/pr-checks.yml
vendored
@@ -1,5 +1,8 @@
|
|||||||
name: "PR checks"
|
name: "PR checks"
|
||||||
|
|
||||||
|
env:
|
||||||
|
GO111MODULE: auto
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [main, v1]
|
branches: [main, v1]
|
||||||
@@ -20,25 +23,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Check generated JavaScript
|
- name: Check generated JavaScript
|
||||||
run: |
|
run: .github/workflows/script/check-js.sh
|
||||||
# Sanity check that repo is clean to start with
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then this workflow needs attention...
|
|
||||||
>&2 echo "Failed: Repo should be clean before testing!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
# Wipe the lib directory incase there are extra unnecessary files in there
|
|
||||||
rm -rf lib
|
|
||||||
# Generate the JavaScript files
|
|
||||||
npm run-script build
|
|
||||||
# Check that repo is still clean
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then the PR needs attention
|
|
||||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
|
||||||
git status
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "Success: JavaScript files are up to date"
|
|
||||||
|
|
||||||
check-node-modules:
|
check-node-modules:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -46,27 +31,10 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Check node modules up to date
|
- name: Check node modules up to date
|
||||||
run: |
|
run: .github/workflows/script/check-node-modules.sh
|
||||||
# Sanity check that repo is clean to start with
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then this workflow needs attention...
|
|
||||||
>&2 echo "Failed: Repo should be clean before testing!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
# Reinstall modules and then clean to remove absolute paths
|
|
||||||
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
|
||||||
npm ci
|
|
||||||
npm run removeNPMAbsolutePaths
|
|
||||||
# Check that repo is still clean
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then the PR needs attention
|
|
||||||
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
|
|
||||||
git status
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "Success: node_modules are up to date"
|
|
||||||
|
|
||||||
npm-test:
|
npm-test:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest,macos-latest]
|
os: [ubuntu-latest,macos-latest]
|
||||||
@@ -76,3 +44,599 @@ jobs:
|
|||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: npm run-script test
|
- name: npm run-script test
|
||||||
run: npm run-script test
|
run: npm run-script test
|
||||||
|
|
||||||
|
multi-language-repo_test-autodetect-languages:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- uses: ./../action/init
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- run: |
|
||||||
|
cd "$RUNNER_TEMP/codeql_databases"
|
||||||
|
# List all directories as there will be precisely one directory per database
|
||||||
|
# but there may be other files in this directory such as query suites.
|
||||||
|
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
||||||
|
[[ ! -d cpp ]] || \
|
||||||
|
[[ ! -d csharp ]] || \
|
||||||
|
[[ ! -d go ]] || \
|
||||||
|
[[ ! -d java ]] || \
|
||||||
|
[[ ! -d javascript ]] || \
|
||||||
|
[[ ! -d python ]]; then
|
||||||
|
echo "Did not find expected number of databases. Database dir contains: $(ls)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Identify the CodeQL tool versions to integration test against.
|
||||||
|
check-codeql-versions:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
versions: ${{ steps.compare.outputs.versions }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- name: Init with default CodeQL bundle from the VM image
|
||||||
|
id: init-default
|
||||||
|
uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: javascript
|
||||||
|
- name: Remove empty database
|
||||||
|
# allows us to run init a second time
|
||||||
|
run: |
|
||||||
|
rm -rf "$RUNNER_TEMP/codeql_databases"
|
||||||
|
- name: Init with latest CodeQL bundle
|
||||||
|
id: init-latest
|
||||||
|
uses: ./../action/init
|
||||||
|
with:
|
||||||
|
tools: latest
|
||||||
|
languages: javascript
|
||||||
|
- name: Compare default and latest CodeQL bundle versions
|
||||||
|
id: compare
|
||||||
|
env:
|
||||||
|
CODEQL_DEFAULT: ${{ steps.init-default.outputs.codeql-path }}
|
||||||
|
CODEQL_LATEST: ${{ steps.init-latest.outputs.codeql-path }}
|
||||||
|
run: |
|
||||||
|
CODEQL_VERSION_DEFAULT="$("$CODEQL_DEFAULT" version --format terse)"
|
||||||
|
CODEQL_VERSION_LATEST="$("$CODEQL_LATEST" version --format terse)"
|
||||||
|
echo "Default CodeQL bundle version is $CODEQL_VERSION_DEFAULT"
|
||||||
|
echo "Latest CodeQL bundle version is $CODEQL_VERSION_LATEST"
|
||||||
|
if [[ "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then
|
||||||
|
# Just use `tools: null` to avoid duplication in the integration tests.
|
||||||
|
VERSIONS_JSON='[null]'
|
||||||
|
else
|
||||||
|
# Use both `tools: null` and `tools: latest` in the integration tests.
|
||||||
|
VERSIONS_JSON='[null, "latest"]'
|
||||||
|
fi
|
||||||
|
# Output a JSON-encoded list with the distinct versions to test against.
|
||||||
|
echo "Suggested matrix config for integration tests: $VERSIONS_JSON"
|
||||||
|
echo "::set-output name=versions::${VERSIONS_JSON}"
|
||||||
|
|
||||||
|
multi-language-repo_test-custom-queries-and-remote-config:
|
||||||
|
needs: [check-js, check-node-modules, check-codeql-versions]
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||||
|
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
tools: ${{ matrix.tools }}
|
||||||
|
languages: cpp,csharp,java,javascript,python
|
||||||
|
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
# Currently is not possible to analyze Go in conjunction with other languages in macos
|
||||||
|
multi-language-repo_test-go-custom-queries:
|
||||||
|
needs: [check-js, check-node-modules, check-codeql-versions]
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||||
|
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/setup-go@v2
|
||||||
|
if: ${{ matrix.os == 'macos-latest' }}
|
||||||
|
with:
|
||||||
|
go-version: '^1.13.1'
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: go
|
||||||
|
config-file: ./.github/codeql/custom-queries.yml
|
||||||
|
tools: ${{ matrix.tools }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
go-custom-tracing:
|
||||||
|
needs: [check-js, check-node-modules, check-codeql-versions]
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||||
|
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
env:
|
||||||
|
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/setup-go@v2
|
||||||
|
if: ${{ matrix.os == 'macos-latest' }}
|
||||||
|
with:
|
||||||
|
go-version: '^1.13.1'
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: go
|
||||||
|
tools: ${{ matrix.tools }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: go build main.go
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
go-custom-tracing-autobuild:
|
||||||
|
needs: [check-js, check-node-modules, check-codeql-versions]
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
||||||
|
# No need to test Go autobuild on multiple OSes since
|
||||||
|
# we're testing Go custom tracing with a manual build on all OSes.
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: go
|
||||||
|
tools: ${{ matrix.tools }}
|
||||||
|
- uses: ./../action/autobuild
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
multi-language-repo_rubocop:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- name: Set up Ruby
|
||||||
|
uses: ruby/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: 2.6
|
||||||
|
- name: Install Code Scanning integration
|
||||||
|
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
||||||
|
- name: Install dependencies
|
||||||
|
run: bundle install
|
||||||
|
- name: Rubocop run
|
||||||
|
run: |
|
||||||
|
bash -c "
|
||||||
|
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
||||||
|
[[ $? -ne 2 ]]
|
||||||
|
"
|
||||||
|
- uses: ./../action/upload-sarif
|
||||||
|
with:
|
||||||
|
sarif_file: rubocop.sarif
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
test-proxy:
|
||||||
|
needs: [check-js, check-node-modules, check-codeql-versions]
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ubuntu:18.04
|
||||||
|
options: --dns 127.0.0.1
|
||||||
|
services:
|
||||||
|
squid-proxy:
|
||||||
|
image: datadog/squid:latest
|
||||||
|
ports:
|
||||||
|
- 3128:3128
|
||||||
|
env:
|
||||||
|
https_proxy: http://squid-proxy:3128
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: javascript
|
||||||
|
tools: ${{ matrix.tools }}
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-javascript-ubuntu:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
# Pass --config-file here, but not for other jobs in this workflow.
|
||||||
|
# This means we're testing the config file parsing in the runner
|
||||||
|
# but not slowing down all jobs unnecessarily as it doesn't add much
|
||||||
|
# testing the parsing on different operating systems and languages.
|
||||||
|
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-javascript-windows:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: windows-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-javascript-macos:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: macos-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-csharp-ubuntu:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd ../action/runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Build code
|
||||||
|
run: |
|
||||||
|
. ./codeql-runner/codeql-env.sh
|
||||||
|
$CODEQL_RUNNER dotnet build
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-csharp-windows:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: windows-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd ../action/runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Build code
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
|
||||||
|
& $Env:CODEQL_RUNNER dotnet build
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-csharp-macos:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: macos-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd ../action/runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
. ./codeql-runner/codeql-env.sh
|
||||||
|
$CODEQL_RUNNER dotnet build
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
|
||||||
|
runner-analyze-csharp-autobuild-ubuntu:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd ../action/runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Build code
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-linux autobuild
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-csharp-autobuild-windows:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: windows-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd ../action/runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Build code
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-win.exe autobuild
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-analyze-csharp-autobuild-macos:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: macos-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd ../action/runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Run init
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-macos autobuild
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
runner-upload-sarif:
|
||||||
|
needs: [check-js, check-node-modules]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Build runner
|
||||||
|
run: |
|
||||||
|
cd runner
|
||||||
|
npm install
|
||||||
|
npm run build-runner
|
||||||
|
|
||||||
|
- name: Upload with runner
|
||||||
|
run: |
|
||||||
|
# Deliberately don't use TEST_MODE here. This is specifically testing
|
||||||
|
# the compatibility with the API.
|
||||||
|
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|||||||
21
.github/workflows/script/check-js.sh
vendored
Executable file
21
.github/workflows/script/check-js.sh
vendored
Executable file
@@ -0,0 +1,21 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
# Sanity check that repo is clean to start with
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then this workflow needs attention...
|
||||||
|
>&2 echo "Failed: Repo should be clean before testing!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
# Wipe the lib directory incase there are extra unnecessary files in there
|
||||||
|
rm -rf lib
|
||||||
|
# Generate the JavaScript files
|
||||||
|
npm run-script build
|
||||||
|
# Check that repo is still clean
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then the PR needs attention
|
||||||
|
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||||
|
git status
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Success: JavaScript files are up to date"
|
||||||
21
.github/workflows/script/check-node-modules.sh
vendored
Executable file
21
.github/workflows/script/check-node-modules.sh
vendored
Executable file
@@ -0,0 +1,21 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
# Sanity check that repo is clean to start with
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then this workflow needs attention...
|
||||||
|
>&2 echo "Failed: Repo should be clean before testing!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
# Reinstall modules and then clean to remove absolute paths
|
||||||
|
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||||
|
npm ci
|
||||||
|
npm run removeNPMAbsolutePaths
|
||||||
|
# Check that repo is still clean
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then the PR needs attention
|
||||||
|
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
|
||||||
|
git status
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Success: node_modules are up to date"
|
||||||
1
.github/workflows/update-release-branch.yml
vendored
1
.github/workflows/update-release-branch.yml
vendored
@@ -12,6 +12,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
update:
|
update:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ github.repository == 'github/codeql-action' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
|
|||||||
11
README.md
11
README.md
@@ -96,7 +96,16 @@ Use the `config-file` parameter of the `init` action to enable the configuration
|
|||||||
config-file: ./.github/codeql/codeql-config.yml
|
config-file: ./.github/codeql/codeql-config.yml
|
||||||
```
|
```
|
||||||
|
|
||||||
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration file](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration-file)."
|
The configuration file can be located in a different repository. This is useful if you want to share the same configuration across multiple repositories. If the configuration file is in a private repository you can also specify an `external-repository-token` option. This should be a personal access token that has read access to any repositories containing referenced config files and queries.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- uses: github/codeql-action/init@v1
|
||||||
|
with:
|
||||||
|
config-file: owner/repo/codeql-config.yml@branch
|
||||||
|
external-repository-token: ${{ secrets.EXTERNAL_REPOSITORY_TOKEN }}
|
||||||
|
```
|
||||||
|
|
||||||
|
For information on how to write a configuration file, see "[Using a custom configuration file](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration-file)."
|
||||||
|
|
||||||
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:
|
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:
|
||||||
|
|
||||||
|
|||||||
@@ -19,6 +19,9 @@ inputs:
|
|||||||
queries:
|
queries:
|
||||||
description: Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file; prefix with "+" to use both sets of queries.
|
description: Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file; prefix with "+" to use both sets of queries.
|
||||||
required: false
|
required: false
|
||||||
|
external-repository-token:
|
||||||
|
description: A token for fetching external config files and queries if they reside in a private repository.
|
||||||
|
required: false
|
||||||
setup-python-dependencies:
|
setup-python-dependencies:
|
||||||
description: Try to auto-install your python dependencies
|
description: Try to auto-install your python dependencies
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
215
lib/actions-util.js
generated
215
lib/actions-util.js
generated
@@ -51,6 +51,13 @@ function getRequiredEnvParam(paramName) {
|
|||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
exports.getRequiredEnvParam = getRequiredEnvParam;
|
exports.getRequiredEnvParam = getRequiredEnvParam;
|
||||||
|
function getTemporaryDirectory() {
|
||||||
|
const value = process.env["CODEQL_ACTION_TEMP"];
|
||||||
|
return value !== undefined && value !== ""
|
||||||
|
? value
|
||||||
|
: getRequiredEnvParam("RUNNER_TEMP");
|
||||||
|
}
|
||||||
|
exports.getTemporaryDirectory = getTemporaryDirectory;
|
||||||
/**
|
/**
|
||||||
* Ensures all required environment variables are set in the context of a local run.
|
* Ensures all required environment variables are set in the context of a local run.
|
||||||
*/
|
*/
|
||||||
@@ -70,7 +77,7 @@ exports.prepareLocalRunEnvironment = prepareLocalRunEnvironment;
|
|||||||
/**
|
/**
|
||||||
* Gets the SHA of the commit that is currently checked out.
|
* Gets the SHA of the commit that is currently checked out.
|
||||||
*/
|
*/
|
||||||
exports.getCommitOid = async function () {
|
exports.getCommitOid = async function (ref = "HEAD") {
|
||||||
// Try to use git to get the current commit SHA. If that fails then
|
// Try to use git to get the current commit SHA. If that fails then
|
||||||
// log but otherwise silently fall back to using the SHA from the environment.
|
// log but otherwise silently fall back to using the SHA from the environment.
|
||||||
// The only time these two values will differ is during analysis of a PR when
|
// The only time these two values will differ is during analysis of a PR when
|
||||||
@@ -80,7 +87,7 @@ exports.getCommitOid = async function () {
|
|||||||
// reported on the merge commit.
|
// reported on the merge commit.
|
||||||
try {
|
try {
|
||||||
let commitOid = "";
|
let commitOid = "";
|
||||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), ["rev-parse", "HEAD"], {
|
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), ["rev-parse", ref], {
|
||||||
silent: true,
|
silent: true,
|
||||||
listeners: {
|
listeners: {
|
||||||
stdout: (data) => {
|
stdout: (data) => {
|
||||||
@@ -107,6 +114,7 @@ function escapeRegExp(string) {
|
|||||||
}
|
}
|
||||||
function patternToRegExp(value) {
|
function patternToRegExp(value) {
|
||||||
return new RegExp(`^${value
|
return new RegExp(`^${value
|
||||||
|
.toString()
|
||||||
.split(GLOB_PATTERN)
|
.split(GLOB_PATTERN)
|
||||||
.reduce(function (arr, cur) {
|
.reduce(function (arr, cur) {
|
||||||
if (cur === "**") {
|
if (cur === "**") {
|
||||||
@@ -140,35 +148,30 @@ function branchesToArray(branches) {
|
|||||||
}
|
}
|
||||||
return "**";
|
return "**";
|
||||||
}
|
}
|
||||||
var MissingTriggers;
|
|
||||||
(function (MissingTriggers) {
|
|
||||||
MissingTriggers[MissingTriggers["None"] = 0] = "None";
|
|
||||||
MissingTriggers[MissingTriggers["Push"] = 1] = "Push";
|
|
||||||
MissingTriggers[MissingTriggers["PullRequest"] = 2] = "PullRequest";
|
|
||||||
})(MissingTriggers || (MissingTriggers = {}));
|
|
||||||
function toCodedErrors(errors) {
|
function toCodedErrors(errors) {
|
||||||
return Object.entries(errors).reduce((acc, [key, value]) => {
|
return Object.entries(errors).reduce((acc, [key, value]) => {
|
||||||
acc[key] = { message: value, code: key };
|
acc[key] = { message: value, code: key };
|
||||||
return acc;
|
return acc;
|
||||||
}, {});
|
}, {});
|
||||||
}
|
}
|
||||||
|
// code to send back via status report
|
||||||
|
// message to add as a warning annotation to the run
|
||||||
exports.WorkflowErrors = toCodedErrors({
|
exports.WorkflowErrors = toCodedErrors({
|
||||||
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
MissingHooks: `Please specify on.push and on.pull_request hooks so that Code Scanning can compare pull requests against the state of the base branch.`,
|
|
||||||
MissingPullRequestHook: `Please specify an on.pull_request hook so that Code Scanning is explicitly run against pull requests. This will be required to see results on pull requests from January 31 2021.`,
|
|
||||||
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
||||||
LintFailed: `Unable to lint workflow for CodeQL.`,
|
|
||||||
});
|
});
|
||||||
function validateWorkflow(doc) {
|
function getWorkflowErrors(doc) {
|
||||||
var _a, _b, _c, _d, _e, _f, _g, _h;
|
var _a, _b, _c, _d, _e, _f, _g, _h;
|
||||||
const errors = [];
|
const errors = [];
|
||||||
// .jobs[key].steps[].run
|
const jobName = process.env.GITHUB_JOB;
|
||||||
for (const job of Object.values(((_a = doc) === null || _a === void 0 ? void 0 : _a.jobs) || {})) {
|
if (jobName) {
|
||||||
if (Array.isArray((_b = job) === null || _b === void 0 ? void 0 : _b.steps)) {
|
const job = (_b = (_a = doc) === null || _a === void 0 ? void 0 : _a.jobs) === null || _b === void 0 ? void 0 : _b[jobName];
|
||||||
for (const step of (_c = job) === null || _c === void 0 ? void 0 : _c.steps) {
|
const steps = (_c = job) === null || _c === void 0 ? void 0 : _c.steps;
|
||||||
|
if (Array.isArray(steps)) {
|
||||||
|
for (const step of steps) {
|
||||||
// this was advice that we used to give in the README
|
// this was advice that we used to give in the README
|
||||||
// we actually want to run the analysis on the merge commit
|
// we actually want to run the analysis on the merge commit
|
||||||
// to produce results that are more inline with expectations
|
// to produce results that are more inline with expectations
|
||||||
@@ -176,43 +179,34 @@ function validateWorkflow(doc) {
|
|||||||
// and avoid some race conditions
|
// and avoid some race conditions
|
||||||
if (((_d = step) === null || _d === void 0 ? void 0 : _d.run) === "git checkout HEAD^2") {
|
if (((_d = step) === null || _d === void 0 ? void 0 : _d.run) === "git checkout HEAD^2") {
|
||||||
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
|
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let missing = MissingTriggers.None;
|
let missingPush = false;
|
||||||
if (doc.on === undefined) {
|
if (doc.on === undefined) {
|
||||||
missing = MissingTriggers.Push | MissingTriggers.PullRequest;
|
// this is not a valid config
|
||||||
}
|
}
|
||||||
else if (typeof doc.on === "string") {
|
else if (typeof doc.on === "string") {
|
||||||
switch (doc.on) {
|
if (doc.on === "pull_request") {
|
||||||
case "push":
|
missingPush = true;
|
||||||
missing = MissingTriggers.PullRequest;
|
|
||||||
break;
|
|
||||||
case "pull_request":
|
|
||||||
missing = MissingTriggers.Push;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
missing = MissingTriggers.Push | MissingTriggers.PullRequest;
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (Array.isArray(doc.on)) {
|
else if (Array.isArray(doc.on)) {
|
||||||
if (!doc.on.includes("push")) {
|
const hasPush = doc.on.includes("push");
|
||||||
missing = missing | MissingTriggers.Push;
|
const hasPullRequest = doc.on.includes("pull_request");
|
||||||
}
|
if (hasPullRequest && !hasPush) {
|
||||||
if (!doc.on.includes("pull_request")) {
|
missingPush = true;
|
||||||
missing = missing | MissingTriggers.PullRequest;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (isObject(doc.on)) {
|
else if (isObject(doc.on)) {
|
||||||
if (!Object.prototype.hasOwnProperty.call(doc.on, "pull_request")) {
|
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
||||||
missing = missing | MissingTriggers.PullRequest;
|
const hasPullRequest = Object.prototype.hasOwnProperty.call(doc.on, "pull_request");
|
||||||
|
if (!hasPush && hasPullRequest) {
|
||||||
|
missingPush = true;
|
||||||
}
|
}
|
||||||
if (!Object.prototype.hasOwnProperty.call(doc.on, "push")) {
|
if (hasPush && hasPullRequest) {
|
||||||
missing = missing | MissingTriggers.Push;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const paths = (_e = doc.on.push) === null || _e === void 0 ? void 0 : _e.paths;
|
const paths = (_e = doc.on.push) === null || _e === void 0 ? void 0 : _e.paths;
|
||||||
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
||||||
// if they didn't change any files
|
// if they didn't change any files
|
||||||
@@ -225,56 +219,63 @@ function validateWorkflow(doc) {
|
|||||||
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
|
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const push = branchesToArray((_g = doc.on.push) === null || _g === void 0 ? void 0 : _g.branches);
|
// if doc.on.pull_request is null that means 'all branches'
|
||||||
if (push !== "**") {
|
// if doc.on.pull_request is undefined that means 'off'
|
||||||
const pull_request = branchesToArray((_h = doc.on.pull_request) === null || _h === void 0 ? void 0 : _h.branches);
|
// we only want to check for mismatched branches if pull_request is on.
|
||||||
if (pull_request !== "**") {
|
if (doc.on.pull_request !== undefined) {
|
||||||
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
|
const push = branchesToArray((_g = doc.on.push) === null || _g === void 0 ? void 0 : _g.branches);
|
||||||
if (difference.length > 0) {
|
if (push !== "**") {
|
||||||
// there are branches in pull_request that may not have a baseline
|
const pull_request = branchesToArray((_h = doc.on.pull_request) === null || _h === void 0 ? void 0 : _h.branches);
|
||||||
// because we are not building them on push
|
if (pull_request !== "**") {
|
||||||
|
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
|
||||||
|
if (difference.length > 0) {
|
||||||
|
// there are branches in pull_request that may not have a baseline
|
||||||
|
// because we are not building them on push
|
||||||
|
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (push.length > 0) {
|
||||||
|
// push is set up to run on a subset of branches
|
||||||
|
// and you could open a PR against a branch with no baseline
|
||||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (push.length > 0) {
|
|
||||||
// push is set up to run on a subset of branches
|
|
||||||
// and you could open a PR against a branch with no baseline
|
|
||||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
if (missingPush) {
|
||||||
// on is not a known type
|
errors.push(exports.WorkflowErrors.MissingPushHook);
|
||||||
// this workflow is likely malformed
|
|
||||||
missing = MissingTriggers.Push | MissingTriggers.PullRequest;
|
|
||||||
}
|
|
||||||
switch (missing) {
|
|
||||||
case MissingTriggers.PullRequest | MissingTriggers.Push:
|
|
||||||
errors.push(exports.WorkflowErrors.MissingHooks);
|
|
||||||
break;
|
|
||||||
case MissingTriggers.PullRequest:
|
|
||||||
errors.push(exports.WorkflowErrors.MissingPullRequestHook);
|
|
||||||
break;
|
|
||||||
case MissingTriggers.Push:
|
|
||||||
errors.push(exports.WorkflowErrors.MissingPushHook);
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
return errors;
|
return errors;
|
||||||
}
|
}
|
||||||
exports.validateWorkflow = validateWorkflow;
|
exports.getWorkflowErrors = getWorkflowErrors;
|
||||||
async function getWorkflowErrors() {
|
async function validateWorkflow() {
|
||||||
|
let workflow;
|
||||||
try {
|
try {
|
||||||
const workflow = await getWorkflow();
|
workflow = await getWorkflow();
|
||||||
if (workflow === undefined) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
return validateWorkflow(workflow);
|
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
return [exports.WorkflowErrors.LintFailed];
|
return `error: getWorkflow() failed: ${e.toString()}`;
|
||||||
}
|
}
|
||||||
|
let workflowErrors;
|
||||||
|
try {
|
||||||
|
workflowErrors = getWorkflowErrors(workflow);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
return `error: getWorkflowErrors() failed: ${e.toString()}`;
|
||||||
|
}
|
||||||
|
if (workflowErrors.length > 0) {
|
||||||
|
let message;
|
||||||
|
try {
|
||||||
|
message = formatWorkflowErrors(workflowErrors);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
return `error: formatWorkflowErrors() failed: ${e.toString()}`;
|
||||||
|
}
|
||||||
|
core.warning(message);
|
||||||
|
}
|
||||||
|
return formatWorkflowCause(workflowErrors);
|
||||||
}
|
}
|
||||||
exports.getWorkflowErrors = getWorkflowErrors;
|
exports.validateWorkflow = validateWorkflow;
|
||||||
function formatWorkflowErrors(errors) {
|
function formatWorkflowErrors(errors) {
|
||||||
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
||||||
const errorsList = errors.map((e) => e.message).join(" ");
|
const errorsList = errors.map((e) => e.message).join(" ");
|
||||||
@@ -291,13 +292,7 @@ exports.formatWorkflowCause = formatWorkflowCause;
|
|||||||
async function getWorkflow() {
|
async function getWorkflow() {
|
||||||
const relativePath = await getWorkflowPath();
|
const relativePath = await getWorkflowPath();
|
||||||
const absolutePath = path.join(getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath);
|
const absolutePath = path.join(getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath);
|
||||||
try {
|
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
|
||||||
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
core.warning(`Could not read workflow: ${e.toString()}`);
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
exports.getWorkflow = getWorkflow;
|
exports.getWorkflow = getWorkflow;
|
||||||
/**
|
/**
|
||||||
@@ -333,7 +328,7 @@ function getWorkflowRunID() {
|
|||||||
}
|
}
|
||||||
exports.getWorkflowRunID = getWorkflowRunID;
|
exports.getWorkflowRunID = getWorkflowRunID;
|
||||||
/**
|
/**
|
||||||
* Get the analysis key paramter for the current job.
|
* Get the analysis key parameter for the current job.
|
||||||
*
|
*
|
||||||
* This will combine the workflow path and current job name.
|
* This will combine the workflow path and current job name.
|
||||||
* Computing this the first time requires making requests to
|
* Computing this the first time requires making requests to
|
||||||
@@ -359,15 +354,28 @@ async function getRef() {
|
|||||||
// Will be in the form "refs/heads/master" on a push event
|
// Will be in the form "refs/heads/master" on a push event
|
||||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||||
const ref = getRequiredEnvParam("GITHUB_REF");
|
const ref = getRequiredEnvParam("GITHUB_REF");
|
||||||
|
const sha = getRequiredEnvParam("GITHUB_SHA");
|
||||||
// For pull request refs we want to detect whether the workflow
|
// For pull request refs we want to detect whether the workflow
|
||||||
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
||||||
// than the 'merge' ref. If so, we want to convert the ref that
|
// than the 'merge' ref. If so, we want to convert the ref that
|
||||||
// we report back.
|
// we report back.
|
||||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||||
const checkoutSha = await exports.getCommitOid();
|
if (!pull_ref_regex.test(ref)) {
|
||||||
if (pull_ref_regex.test(ref) &&
|
return ref;
|
||||||
checkoutSha !== getRequiredEnvParam("GITHUB_SHA")) {
|
}
|
||||||
return ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
const head = await exports.getCommitOid("HEAD");
|
||||||
|
// in actions/checkout@v2 we can check if git rev-parse HEAD == GITHUB_SHA
|
||||||
|
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||||
|
// using GITHUB_REF. There is a subtle race condition where
|
||||||
|
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
||||||
|
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||||
|
const hasChangedRef = sha !== head &&
|
||||||
|
(await exports.getCommitOid(ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !==
|
||||||
|
head;
|
||||||
|
if (hasChangedRef) {
|
||||||
|
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
||||||
|
core.debug(`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`);
|
||||||
|
return newRef;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return ref;
|
return ref;
|
||||||
@@ -439,6 +447,10 @@ function isHTTPError(arg) {
|
|||||||
var _a;
|
var _a;
|
||||||
return ((_a = arg) === null || _a === void 0 ? void 0 : _a.status) !== undefined && Number.isInteger(arg.status);
|
return ((_a = arg) === null || _a === void 0 ? void 0 : _a.status) !== undefined && Number.isInteger(arg.status);
|
||||||
}
|
}
|
||||||
|
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
|
||||||
|
const GENERIC_404_MSG = "Not authorized to used the CodeQL code scanning feature on this repo.";
|
||||||
|
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
||||||
|
const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
|
||||||
/**
|
/**
|
||||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||||
*
|
*
|
||||||
@@ -467,34 +479,51 @@ async function sendStatusReport(statusReport) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
|
console.log(e);
|
||||||
if (isHTTPError(e)) {
|
if (isHTTPError(e)) {
|
||||||
switch (e.status) {
|
switch (e.status) {
|
||||||
case 403:
|
case 403:
|
||||||
core.setFailed("The repo on which this action is running is not opted-in to CodeQL code scanning.");
|
if (workflowIsTriggeredByPushEvent() && isDependabotActor()) {
|
||||||
|
core.setFailed('Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
||||||
|
"Uploading Code Scanning results requires write access. " +
|
||||||
|
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
||||||
|
"See https://docs.github.com/en/code-security/secure-coding/configuring-code-scanning#scanning-on-push for more information on how to configure these events.");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.setFailed(e.message || GENERIC_403_MSG);
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
case 404:
|
case 404:
|
||||||
core.setFailed("Not authorized to used the CodeQL code scanning feature on this repo.");
|
core.setFailed(GENERIC_404_MSG);
|
||||||
return false;
|
return false;
|
||||||
case 422:
|
case 422:
|
||||||
// schema incompatibility when reporting status
|
// schema incompatibility when reporting status
|
||||||
// this means that this action version is no longer compatible with the API
|
// this means that this action version is no longer compatible with the API
|
||||||
// we still want to continue as it is likely the analysis endpoint will work
|
// we still want to continue as it is likely the analysis endpoint will work
|
||||||
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
|
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
|
||||||
core.debug("CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.");
|
core.debug(INCOMPATIBLE_MSG);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
core.debug("CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.");
|
core.debug(OUT_OF_DATE_MSG);
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// something else has gone wrong and the request/response will be logged by octokit
|
// something else has gone wrong and the request/response will be logged by octokit
|
||||||
// it's possible this is a transient error and we should continue scanning
|
// it's possible this is a transient error and we should continue scanning
|
||||||
core.error("An unexpected error occured when sending code scanning status report.");
|
core.error("An unexpected error occurred when sending code scanning status report.");
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.sendStatusReport = sendStatusReport;
|
exports.sendStatusReport = sendStatusReport;
|
||||||
|
// Was the workflow run triggered by a `push` event, for example as opposed to a `pull_request` event.
|
||||||
|
function workflowIsTriggeredByPushEvent() {
|
||||||
|
return process.env["GITHUB_EVENT_NAME"] === "push";
|
||||||
|
}
|
||||||
|
// Is dependabot the actor that triggered the current workflow run.
|
||||||
|
function isDependabotActor() {
|
||||||
|
return process.env["GITHUB_ACTOR"] === "dependabot[bot]";
|
||||||
|
}
|
||||||
// Is the current action executing a local copy (i.e. we're running a workflow on the codeql-action repo itself)
|
// Is the current action executing a local copy (i.e. we're running a workflow on the codeql-action repo itself)
|
||||||
// as opposed to running a remote action (i.e. when another repo references us)
|
// as opposed to running a remote action (i.e. when another repo references us)
|
||||||
function isRunningLocalAction() {
|
function isRunningLocalAction() {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
310
lib/actions-util.test.js
generated
310
lib/actions-util.test.js
generated
@@ -11,9 +11,13 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const yaml = __importStar(require("js-yaml"));
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
const sinon_1 = __importDefault(require("sinon"));
|
||||||
const actionsutil = __importStar(require("./actions-util"));
|
const actionsutil = __importStar(require("./actions-util"));
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
function errorCodes(actual, expected) {
|
||||||
|
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
||||||
|
}
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
ava_1.default("getRef() throws on the empty string", async (t) => {
|
ava_1.default("getRef() throws on the empty string", async (t) => {
|
||||||
process.env["GITHUB_REF"] = "";
|
process.env["GITHUB_REF"] = "";
|
||||||
@@ -24,16 +28,33 @@ ava_1.default("getRef() returns merge PR ref if GITHUB_SHA still checked out", a
|
|||||||
const currentSha = "a".repeat(40);
|
const currentSha = "a".repeat(40);
|
||||||
process.env["GITHUB_REF"] = expectedRef;
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
process.env["GITHUB_SHA"] = currentSha;
|
process.env["GITHUB_SHA"] = currentSha;
|
||||||
sinon_1.default.stub(actionsutil, "getCommitOid").resolves(currentSha);
|
const callback = sinon_1.default.stub(actionsutil, "getCommitOid");
|
||||||
|
callback.withArgs("HEAD").resolves(currentSha);
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsutil.getRef();
|
||||||
t.deepEqual(actualRef, expectedRef);
|
t.deepEqual(actualRef, expectedRef);
|
||||||
|
callback.restore();
|
||||||
});
|
});
|
||||||
ava_1.default("getRef() returns head PR ref if GITHUB_SHA not currently checked out", async (t) => {
|
ava_1.default("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
||||||
|
const expectedRef = "refs/pull/1/merge";
|
||||||
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
|
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||||
|
const sha = "a".repeat(40);
|
||||||
|
const callback = sinon_1.default.stub(actionsutil, "getCommitOid");
|
||||||
|
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
||||||
|
callback.withArgs("HEAD").resolves(sha);
|
||||||
|
const actualRef = await actionsutil.getRef();
|
||||||
|
t.deepEqual(actualRef, expectedRef);
|
||||||
|
callback.restore();
|
||||||
|
});
|
||||||
|
ava_1.default("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
||||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||||
sinon_1.default.stub(actionsutil, "getCommitOid").resolves("b".repeat(40));
|
const callback = sinon_1.default.stub(actionsutil, "getCommitOid");
|
||||||
|
callback.withArgs("refs/pull/1/merge").resolves("a".repeat(40));
|
||||||
|
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsutil.getRef();
|
||||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||||
|
callback.restore();
|
||||||
});
|
});
|
||||||
ava_1.default("getAnalysisKey() when a local run", async (t) => {
|
ava_1.default("getAnalysisKey() when a local run", async (t) => {
|
||||||
process.env.CODEQL_LOCAL_RUN = "true";
|
process.env.CODEQL_LOCAL_RUN = "true";
|
||||||
@@ -68,141 +89,136 @@ ava_1.default("prepareEnvironment() when a local run", (t) => {
|
|||||||
t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB");
|
t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB");
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
|
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on is missing", (t) => {
|
ava_1.default("getWorkflowErrors() when on is empty", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({});
|
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingHooks]);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is missing", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({ on: {} });
|
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
||||||
console.log(errors);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingHooks]);
|
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is an array missing pull_request", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({ on: ["push"] });
|
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingPullRequestHook]);
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook]));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is an array missing push", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is valid", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({ on: ["pull_request"] });
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingPushHook]);
|
|
||||||
});
|
|
||||||
ava_1.default("validateWorkflow() when on.push is valid", (t) => {
|
|
||||||
const errors = actionsutil.validateWorkflow({
|
|
||||||
on: ["push", "pull_request"],
|
on: ["push", "pull_request"],
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, []);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is a valid superset", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: ["push", "pull_request", "schedule"],
|
on: ["push", "pull_request", "schedule"],
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, []);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push should not have a path", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push should not have a path", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"], paths: ["test/*"] },
|
push: { branches: ["main"], paths: ["test/*"] },
|
||||||
pull_request: { branches: ["main"] },
|
pull_request: { branches: ["main"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.PathsSpecified]);
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified]));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is a correct object", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is a correct object", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, []);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.pull_requests is a string", (t) => {
|
ava_1.default("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.pull_requests is a string and correct", (t) => {
|
ava_1.default("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, []);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is correct with empty objects", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
on: { push: undefined, pull_request: undefined },
|
on:
|
||||||
});
|
push:
|
||||||
t.deepEqual(errors, []);
|
pull_request:
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is mismatched", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is mismatched", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"] },
|
push: { branches: ["main"] },
|
||||||
pull_request: { branches: ["feature"] },
|
pull_request: { branches: ["feature"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is not mismatched", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main", "feature"] },
|
push: { branches: ["main", "feature"] },
|
||||||
pull_request: { branches: ["main"] },
|
pull_request: { branches: ["main"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, []);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.push is mismatched for pull_request", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"] },
|
push: { branches: ["main"] },
|
||||||
pull_request: { branches: ["main", "feature"] },
|
pull_request: { branches: ["main", "feature"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() for a range of malformed workflows", (t) => {
|
ava_1.default("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
||||||
t.deepEqual(actionsutil.validateWorkflow({
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: 1,
|
push: 1,
|
||||||
pull_request: 1,
|
pull_request: 1,
|
||||||
},
|
},
|
||||||
}), []);
|
}), []));
|
||||||
t.deepEqual(actionsutil.validateWorkflow({
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
on: 1,
|
on: 1,
|
||||||
}), [actionsutil.WorkflowErrors.MissingHooks]);
|
}), []));
|
||||||
t.deepEqual(actionsutil.validateWorkflow({
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
on: 1,
|
on: 1,
|
||||||
jobs: 1,
|
jobs: 1,
|
||||||
}), [actionsutil.WorkflowErrors.MissingHooks]);
|
}), []));
|
||||||
t.deepEqual(actionsutil.validateWorkflow({
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
on: 1,
|
on: 1,
|
||||||
jobs: [1],
|
jobs: [1],
|
||||||
}), [actionsutil.WorkflowErrors.MissingHooks]);
|
}), []));
|
||||||
t.deepEqual(actionsutil.validateWorkflow({
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
on: 1,
|
on: 1,
|
||||||
jobs: { 1: 1 },
|
jobs: { 1: 1 },
|
||||||
}), [actionsutil.WorkflowErrors.MissingHooks]);
|
}), []));
|
||||||
t.deepEqual(actionsutil.validateWorkflow({
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
on: 1,
|
on: 1,
|
||||||
jobs: { test: 1 },
|
jobs: { test: 1 },
|
||||||
}), [actionsutil.WorkflowErrors.MissingHooks]);
|
}), []));
|
||||||
t.deepEqual(actionsutil.validateWorkflow({
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
on: 1,
|
on: 1,
|
||||||
jobs: { test: [1] },
|
jobs: { test: [1] },
|
||||||
}), [actionsutil.WorkflowErrors.MissingHooks]);
|
}), []));
|
||||||
t.deepEqual(actionsutil.validateWorkflow({
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
on: 1,
|
on: 1,
|
||||||
jobs: { test: { steps: 1 } },
|
jobs: { test: { steps: 1 } },
|
||||||
}), [actionsutil.WorkflowErrors.MissingHooks]);
|
}), []));
|
||||||
t.deepEqual(actionsutil.validateWorkflow({
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
on: 1,
|
on: 1,
|
||||||
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
||||||
}), [actionsutil.WorkflowErrors.MissingHooks]);
|
}), []));
|
||||||
t.deepEqual(actionsutil.validateWorkflow({
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
on: 1,
|
on: 1,
|
||||||
jobs: { test: [undefined] },
|
jobs: { test: [undefined] },
|
||||||
}), [actionsutil.WorkflowErrors.MissingHooks]);
|
}), []));
|
||||||
t.deepEqual(actionsutil.validateWorkflow(1), [
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1), []));
|
||||||
actionsutil.WorkflowErrors.MissingHooks,
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
]);
|
|
||||||
t.deepEqual(actionsutil.validateWorkflow({
|
|
||||||
on: {
|
on: {
|
||||||
push: {
|
push: {
|
||||||
branches: 1,
|
branches: 1,
|
||||||
@@ -211,41 +227,43 @@ ava_1.default("validateWorkflow() for a range of malformed workflows", (t) => {
|
|||||||
branches: 1,
|
branches: 1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}), []);
|
}), []));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.pull_request for every branch but push specifies branches", (t) => {
|
ava_1.default("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
on: {
|
name: "CodeQL"
|
||||||
push: { branches: ["main"] },
|
on:
|
||||||
pull_request: null,
|
push:
|
||||||
},
|
branches: ["main"]
|
||||||
});
|
pull_request:
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.pull_request for wildcard branches", (t) => {
|
ava_1.default("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["feature/*"] },
|
push: { branches: ["feature/*"] },
|
||||||
pull_request: { branches: "feature/moose" },
|
pull_request: { branches: "feature/moose" },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, []);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when on.pull_request for mismatched wildcard branches", (t) => {
|
ava_1.default("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["feature/moose"] },
|
push: { branches: ["feature/moose"] },
|
||||||
pull_request: { branches: "feature/*" },
|
pull_request: { branches: "feature/*" },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||||
});
|
});
|
||||||
ava_1.default("validateWorkflow() when HEAD^2 is checked out", (t) => {
|
ava_1.default("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
process.env.GITHUB_JOB = "test";
|
||||||
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: ["push", "pull_request"],
|
on: ["push", "pull_request"],
|
||||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
||||||
});
|
});
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]);
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
||||||
});
|
});
|
||||||
ava_1.default("formatWorkflowErrors() when there is one error", (t) => {
|
ava_1.default("formatWorkflowErrors() when there is one error", (t) => {
|
||||||
const message = actionsutil.formatWorkflowErrors([
|
const message = actionsutil.formatWorkflowErrors([
|
||||||
@@ -260,6 +278,10 @@ ava_1.default("formatWorkflowErrors() when there are multiple errors", (t) => {
|
|||||||
]);
|
]);
|
||||||
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
||||||
});
|
});
|
||||||
|
ava_1.default("formatWorkflowCause() with no errors", (t) => {
|
||||||
|
const message = actionsutil.formatWorkflowCause([]);
|
||||||
|
t.deepEqual(message, undefined);
|
||||||
|
});
|
||||||
ava_1.default("formatWorkflowCause()", (t) => {
|
ava_1.default("formatWorkflowCause()", (t) => {
|
||||||
const message = actionsutil.formatWorkflowCause([
|
const message = actionsutil.formatWorkflowCause([
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||||
@@ -290,4 +312,108 @@ ava_1.default("patternIsSuperset()", (t) => {
|
|||||||
t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
|
t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
|
||||||
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
|
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
|
||||||
});
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() when branches contain dots", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [4.1, master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [4.1, master]
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master, ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test";
|
||||||
|
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test3";
|
||||||
|
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() when on is missing", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() with a different on setup", (t) => {
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: "workflow_dispatch"
|
||||||
|
`)), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: [workflow_dispatch]
|
||||||
|
`)), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
`)), []));
|
||||||
|
});
|
||||||
|
ava_1.default("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
`)), []));
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: ["push"]
|
||||||
|
`)), []));
|
||||||
|
});
|
||||||
//# sourceMappingURL=actions-util.test.js.map
|
//# sourceMappingURL=actions-util.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
2
lib/analysis-paths.js
generated
2
lib/analysis-paths.js
generated
@@ -28,7 +28,7 @@ function printPathFiltersWarning(config, logger) {
|
|||||||
// If any other languages are detected/configured then show a warning.
|
// If any other languages are detected/configured then show a warning.
|
||||||
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
|
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
|
||||||
!config.languages.every(isInterpretedLanguage)) {
|
!config.languages.every(isInterpretedLanguage)) {
|
||||||
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
|
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript and Python');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.printPathFiltersWarning = printPathFiltersWarning;
|
exports.printPathFiltersWarning = printPathFiltersWarning;
|
||||||
|
|||||||
6
lib/analysis-paths.test.js
generated
6
lib/analysis-paths.test.js
generated
@@ -27,7 +27,7 @@ ava_1.default("emptyPaths", async (t) => {
|
|||||||
tempDir: tmpDir,
|
tempDir: tmpDir,
|
||||||
toolCacheDir: tmpDir,
|
toolCacheDir: tmpDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
gitHubVersion: { type: "dotcom" },
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||||
@@ -46,7 +46,7 @@ ava_1.default("nonEmptyPaths", async (t) => {
|
|||||||
tempDir: tmpDir,
|
tempDir: tmpDir,
|
||||||
toolCacheDir: tmpDir,
|
toolCacheDir: tmpDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
gitHubVersion: { type: "dotcom" },
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||||
@@ -66,7 +66,7 @@ ava_1.default("exclude temp dir", async (t) => {
|
|||||||
tempDir,
|
tempDir,
|
||||||
toolCacheDir,
|
toolCacheDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
gitHubVersion: { type: "dotcom" },
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAwB;SACxD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAwB;SACxD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAwB;SACxD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;SACzE,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;SACzE,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;SACzE,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
41
lib/analyze-action.js
generated
41
lib/analyze-action.js
generated
@@ -7,12 +7,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const analyze_1 = require("./analyze");
|
const analyze_1 = require("./analyze");
|
||||||
const config_utils_1 = require("./config-utils");
|
const config_utils_1 = require("./config-utils");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const repository_1 = require("./repository");
|
const upload_lib = __importStar(require("./upload-lib"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
async function sendStatusReport(startedAt, stats, error) {
|
async function sendStatusReport(startedAt, stats, error) {
|
||||||
var _a, _b, _c;
|
var _a, _b, _c;
|
||||||
@@ -29,13 +31,14 @@ async function sendStatusReport(startedAt, stats, error) {
|
|||||||
async function run() {
|
async function run() {
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
let stats = undefined;
|
let stats = undefined;
|
||||||
|
let config = undefined;
|
||||||
try {
|
try {
|
||||||
actionsUtil.prepareLocalRunEnvironment();
|
actionsUtil.prepareLocalRunEnvironment();
|
||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
|
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const logger = logging_1.getActionsLogger();
|
const logger = logging_1.getActionsLogger();
|
||||||
const config = await config_utils_1.getConfig(actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), logger);
|
config = await config_utils_1.getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
||||||
if (config === undefined) {
|
if (config === undefined) {
|
||||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||||
}
|
}
|
||||||
@@ -43,7 +46,16 @@ async function run() {
|
|||||||
auth: actionsUtil.getRequiredInput("token"),
|
auth: actionsUtil.getRequiredInput("token"),
|
||||||
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
stats = await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), apiDetails, actionsUtil.getRequiredInput("upload") === "true", "actions", actionsUtil.getRequiredInput("output"), util.getMemoryFlag(actionsUtil.getOptionalInput("ram")), util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger), config, logger);
|
const outputDir = actionsUtil.getRequiredInput("output");
|
||||||
|
const queriesStats = await analyze_1.runAnalyze(outputDir, util.getMemoryFlag(actionsUtil.getOptionalInput("ram")), util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger), config, logger);
|
||||||
|
if (actionsUtil.getRequiredInput("upload") === "true") {
|
||||||
|
const uploadStats = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
|
||||||
|
stats = { ...queriesStats, ...uploadStats };
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
logger.info("Not uploading results");
|
||||||
|
stats = { ...queriesStats };
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
@@ -54,6 +66,29 @@ async function run() {
|
|||||||
await sendStatusReport(startedAt, stats, error);
|
await sendStatusReport(startedAt, stats, error);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
finally {
|
||||||
|
if (core.isDebug() && config !== undefined) {
|
||||||
|
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
||||||
|
for (const language of config.languages) {
|
||||||
|
const databaseDirectory = util.getCodeQLDatabasePath(config.tempDir, language);
|
||||||
|
const logsDirectory = path.join(databaseDirectory, "log");
|
||||||
|
const walkLogFiles = (dir) => {
|
||||||
|
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.isFile()) {
|
||||||
|
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
|
||||||
|
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
|
||||||
|
core.endGroup();
|
||||||
|
}
|
||||||
|
else if (entry.isDirectory()) {
|
||||||
|
walkLogFiles(path.resolve(dir, entry.name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
walkLogFiles(logsDirectory);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
await sendStatusReport(startedAt, stats);
|
await sendStatusReport(startedAt, stats);
|
||||||
}
|
}
|
||||||
async function runWrapper() {
|
async function runWrapper() {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAImB;AACnB,iDAA2C;AAC3C,uCAA6C;AAC7C,6CAAkD;AAClD,6CAA+B;AAM/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,MAAM,GAAG,MAAM,wBAAS,CAC5B,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QACF,KAAK,GAAG,MAAM,oBAAU,CACtB,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,MAAM,WAAW,CAAC,YAAY,EAAE,EAChC,MAAM,WAAW,CAAC,MAAM,EAAE,EAC1B,MAAM,WAAW,CAAC,cAAc,EAAE,EAClC,WAAW,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAClD,WAAW,CAAC,gBAAgB,EAAE,EAC9B,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,UAAU,EACV,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EACjD,SAAS,EACT,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EACvD,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACpE,MAAM,EACN,MAAM,CACP,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAEnB,IAAI,KAAK,YAAY,6BAAmB,EAAE;YACxC,KAAK,GAAG,EAAE,GAAG,KAAK,CAAC,mBAAmB,EAAE,CAAC;SAC1C;QAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAImB;AACnB,iDAAmD;AACnD,uCAA6C;AAC7C,yDAA2C;AAC3C,6CAA+B;AAU/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI,MAAM,GAAuB,SAAS,CAAC;IAC3C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,GAAG,MAAM,wBAAS,CAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;QACtE,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,YAAY,GAAG,MAAM,oBAAU,CACnC,SAAS,EACT,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EACvD,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACpE,MAAM,EACN,MAAM,CACP,CAAC;QAEF,IAAI,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EAAE;YACrD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,SAAS,EACT,MAAM,CAAC,aAAa,EACpB,UAAU,EACV,MAAM,CACP,CAAC;YACF,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;SAC7C;aAAM;YACL,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;YACrC,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,CAAC;SAC7B;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAEnB,IAAI,KAAK,YAAY,6BAAmB,EAAE;YACxC,KAAK,GAAG,EAAE,GAAG,KAAK,CAAC,mBAAmB,EAAE,CAAC;SAC1C;QAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;YAAS;QACR,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,MAAM,KAAK,SAAS,EAAE;YAC1C,IAAI,CAAC,IAAI,CAAC,iDAAiD,CAAC,CAAC;YAC7D,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,MAAM,iBAAiB,GAAG,IAAI,CAAC,qBAAqB,CAClD,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;gBACF,MAAM,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;gBAE1D,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE;oBACnC,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;oBAC7D,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;wBAC3B,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;4BAClB,IAAI,CAAC,UAAU,CACb,uBAAuB,QAAQ,MAAM,KAAK,CAAC,IAAI,EAAE,CAClD,CAAC;4BACF,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAC/C,CAAC;4BACF,IAAI,CAAC,QAAQ,EAAE,CAAC;yBACjB;6BAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;4BAC9B,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;yBAC7C;qBACF;gBACH,CAAC,CAAC;gBACF,YAAY,CAAC,aAAa,CAAC,CAAC;aAC7B;SACF;KACF;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
10
lib/analyze.js
generated
10
lib/analyze.js
generated
@@ -14,7 +14,6 @@ const analysisPaths = __importStar(require("./analysis-paths"));
|
|||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
const upload_lib = __importStar(require("./upload-lib"));
|
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
class CodeQLAnalysisError extends Error {
|
class CodeQLAnalysisError extends Error {
|
||||||
constructor(queriesStatusReport, message) {
|
constructor(queriesStatusReport, message) {
|
||||||
@@ -117,7 +116,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
return statusReport;
|
return statusReport;
|
||||||
}
|
}
|
||||||
exports.runQueries = runQueries;
|
exports.runQueries = runQueries;
|
||||||
async function runAnalyze(repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, apiDetails, doUpload, mode, outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
|
async function runAnalyze(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
|
||||||
// Delete the tracer config env var to avoid tracing ourselves
|
// Delete the tracer config env var to avoid tracing ourselves
|
||||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||||
fs.mkdirSync(outputDir, { recursive: true });
|
fs.mkdirSync(outputDir, { recursive: true });
|
||||||
@@ -125,12 +124,7 @@ async function runAnalyze(repositoryNwo, commitOid, ref, analysisKey, analysisNa
|
|||||||
await finalizeDatabaseCreation(config, threadsFlag, logger);
|
await finalizeDatabaseCreation(config, threadsFlag, logger);
|
||||||
logger.info("Analyzing database");
|
logger.info("Analyzing database");
|
||||||
const queriesStats = await runQueries(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger);
|
const queriesStats = await runQueries(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger);
|
||||||
if (!doUpload) {
|
return { ...queriesStats };
|
||||||
logger.info("Not uploading results");
|
|
||||||
return { ...queriesStats };
|
|
||||||
}
|
|
||||||
const uploadStats = await upload_lib.upload(outputDir, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, config.gitHubVersion, apiDetails, mode, logger);
|
|
||||||
return { ...queriesStats, ...uploadStats };
|
|
||||||
}
|
}
|
||||||
exports.runAnalyze = runAnalyze;
|
exports.runAnalyze = runAnalyze;
|
||||||
//# sourceMappingURL=analyze.js.map
|
//# sourceMappingURL=analyze.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAE3D,gEAAkD;AAElD,qCAAqC;AAErC,2CAA0D;AAG1D,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAE/B,MAAa,mBAAoB,SAAQ,KAAK;IAG5C,YAAY,mBAAwC,EAAE,OAAe;QACnE,KAAK,CAAC,OAAO,CAAC,CAAC;QAEf,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC;QAClC,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;CACF;AATD,kDASC;AAmCD,KAAK,UAAU,oBAAoB,CAAC,MAAc;IAChD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IAClD,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3D,+FAA+F;QAC/F,OAAO;KACR;IAED,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,OAAO,GAAG;QACd,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC;IAEF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ;QACE,IAAI;QACJ,8EAA8E;KAC/E,EACD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,kCAAkC,MAAM,EAAE,CAAC,CAAC;IACxD,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;IAE/C,MAAM,GAAG,EAAE,CAAC;IACZ,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ,CAAC,IAAI,EAAE,wCAAwC,CAAC,EAChD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,qCAAqC,MAAM,EAAE,CAAC,CAAC;IAC3D,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC,GAAG,MAAM,CAAC;AACpD,CAAC;AAED,KAAK,UAAU,4BAA4B,CACzC,MAA0B,EAC1B,MAAc;IAEd,sEAAsE;IACtE,oCAAoC;IACpC,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;YAE5C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,MAAM,EAAE;gBAChC,MAAM,oBAAoB,CAAC,MAAM,CAAC,CAAC;aACpC;YAED,MAAM,MAAM,CAAC,sBAAsB,CACjC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,CACT,CAAC;YACF,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CACrC,MAA0B,EAC1B,WAAmB,EACnB,MAAc;IAEd,MAAM,4BAA4B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,gBAAgB,CAC3B,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,WAAW,CACZ,CAAC;QACF,MAAM,CAAC,QAAQ,EAAE,CAAC;KACnB;AACH,CAAC;AAED,2DAA2D;AACpD,KAAK,UAAU,UAAU,CAC9B,WAAmB,EACnB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,MAAM,YAAY,GAAwB,EAAE,CAAC;IAE7C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,aAAa,QAAQ,EAAE,CAAC,CAAC;QAE3C,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QACzC,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;YAC/D,MAAM,IAAI,KAAK,CACb,qBAAqB,QAAQ,gDAAgD,CAC9E,CAAC;SACH;QAED,IAAI;YACF,KAAK,MAAM,IAAI,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,EAAE;gBACxC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBAEvC,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAC7C,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;oBACF,uEAAuE;oBACvE,2EAA2E;oBAC3E,MAAM,cAAc,GAAG,GAAG,YAAY,YAAY,IAAI,MAAM,CAAC;oBAC7D,MAAM,kBAAkB,GAAG,OAAO,CAAC,IAAI,CAAC;yBACrC,GAAG,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC;yBACnC,IAAI,CAAC,IAAI,CAAC,CAAC;oBACd,EAAE,CAAC,aAAa,CAAC,cAAc,EAAE,kBAAkB,CAAC,CAAC;oBACrD,MAAM,CAAC,KAAK,CACV,wBAAwB,QAAQ,QAAQ,kBAAkB,EAAE,CAC7D,CAAC;oBAEF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,GAAG,QAAQ,IAAI,IAAI,QAAQ,CAAC,CAAC;oBAEtE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;oBAC3C,MAAM,MAAM,CAAC,eAAe,CAC1B,YAAY,EACZ,SAAS,EACT,cAAc,EACd,UAAU,EACV,eAAe,EACf,WAAW,CACZ,CAAC;oBAEF,MAAM,CAAC,KAAK,CACV,8BAA8B,QAAQ,gBAAgB,SAAS,GAAG,CACnE,CAAC;oBACF,MAAM,CAAC,QAAQ,EAAE,CAAC;oBAElB,yBAAyB;oBACzB,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBACrC,YAAY,CAAC,WAAW,IAAI,YAAY,QAAQ,cAAc,CAAC;wBAC7D,OAAO,GAAG,SAAS,CAAC;iBACvB;aACF;SACF;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YACf,YAAY,CAAC,wBAAwB,GAAG,QAAQ,CAAC;YACjD,MAAM,IAAI,mBAAmB,CAC3B,YAAY,EACZ,8BAA8B,QAAQ,KAAK,CAAC,EAAE,CAC/C,CAAC;SACH;KACF;IAED,OAAO,YAAY,CAAC;AACtB,CAAC;AA1ED,gCA0EC;AAEM,KAAK,UAAU,UAAU,CAC9B,aAA4B,EAC5B,SAAiB,EACjB,GAAW,EACX,WAA+B,EAC/B,YAAgC,EAChC,aAAiC,EACjC,YAAoB,EACpB,WAA+B,EAC/B,UAA4B,EAC5B,QAAiB,EACjB,IAAe,EACf,SAAiB,EACjB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,8DAA8D;IAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;IAEzD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7C,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;IAC5C,MAAM,wBAAwB,CAAC,MAAM,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;IAE5D,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,MAAM,UAAU,CACnC,SAAS,EACT,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,MAAM,CACP,CAAC;IAEF,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACrC,OAAO,EAAE,GAAG,YAAY,EAAE,CAAC;KAC5B;IAED,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,SAAS,EACT,aAAa,EACb,SAAS,EACT,GAAG,EACH,WAAW,EACX,YAAY,EACZ,aAAa,EACb,YAAY,EACZ,WAAW,EACX,MAAM,CAAC,aAAa,EACpB,UAAU,EACV,IAAI,EACJ,MAAM,CACP,CAAC;IAEF,OAAO,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;AAC7C,CAAC;AA3DD,gCA2DC"}
|
{"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAE3D,gEAAkD;AAClD,qCAAqC;AAErC,2CAA0D;AAE1D,gEAAkD;AAClD,6CAA+B;AAE/B,MAAa,mBAAoB,SAAQ,KAAK;IAG5C,YAAY,mBAAwC,EAAE,OAAe;QACnE,KAAK,CAAC,OAAO,CAAC,CAAC;QAEf,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC;QAClC,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;CACF;AATD,kDASC;AA+BD,KAAK,UAAU,oBAAoB,CAAC,MAAc;IAChD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IAClD,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3D,+FAA+F;QAC/F,OAAO;KACR;IAED,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,OAAO,GAAG;QACd,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC;IAEF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ;QACE,IAAI;QACJ,8EAA8E;KAC/E,EACD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,kCAAkC,MAAM,EAAE,CAAC,CAAC;IACxD,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;IAE/C,MAAM,GAAG,EAAE,CAAC;IACZ,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ,CAAC,IAAI,EAAE,wCAAwC,CAAC,EAChD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,qCAAqC,MAAM,EAAE,CAAC,CAAC;IAC3D,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC,GAAG,MAAM,CAAC;AACpD,CAAC;AAED,KAAK,UAAU,4BAA4B,CACzC,MAA0B,EAC1B,MAAc;IAEd,sEAAsE;IACtE,oCAAoC;IACpC,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;YAE5C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,MAAM,EAAE;gBAChC,MAAM,oBAAoB,CAAC,MAAM,CAAC,CAAC;aACpC;YAED,MAAM,MAAM,CAAC,sBAAsB,CACjC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,CACT,CAAC;YACF,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CACrC,MAA0B,EAC1B,WAAmB,EACnB,MAAc;IAEd,MAAM,4BAA4B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,gBAAgB,CAC3B,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,WAAW,CACZ,CAAC;QACF,MAAM,CAAC,QAAQ,EAAE,CAAC;KACnB;AACH,CAAC;AAED,2DAA2D;AACpD,KAAK,UAAU,UAAU,CAC9B,WAAmB,EACnB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,MAAM,YAAY,GAAwB,EAAE,CAAC;IAE7C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,aAAa,QAAQ,EAAE,CAAC,CAAC;QAE3C,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QACzC,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;YAC/D,MAAM,IAAI,KAAK,CACb,qBAAqB,QAAQ,gDAAgD,CAC9E,CAAC;SACH;QAED,IAAI;YACF,KAAK,MAAM,IAAI,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,EAAE;gBACxC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBAEvC,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAC7C,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;oBACF,uEAAuE;oBACvE,2EAA2E;oBAC3E,MAAM,cAAc,GAAG,GAAG,YAAY,YAAY,IAAI,MAAM,CAAC;oBAC7D,MAAM,kBAAkB,GAAG,OAAO,CAAC,IAAI,CAAC;yBACrC,GAAG,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC;yBACnC,IAAI,CAAC,IAAI,CAAC,CAAC;oBACd,EAAE,CAAC,aAAa,CAAC,cAAc,EAAE,kBAAkB,CAAC,CAAC;oBACrD,MAAM,CAAC,KAAK,CACV,wBAAwB,QAAQ,QAAQ,kBAAkB,EAAE,CAC7D,CAAC;oBAEF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,GAAG,QAAQ,IAAI,IAAI,QAAQ,CAAC,CAAC;oBAEtE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;oBAC3C,MAAM,MAAM,CAAC,eAAe,CAC1B,YAAY,EACZ,SAAS,EACT,cAAc,EACd,UAAU,EACV,eAAe,EACf,WAAW,CACZ,CAAC;oBAEF,MAAM,CAAC,KAAK,CACV,8BAA8B,QAAQ,gBAAgB,SAAS,GAAG,CACnE,CAAC;oBACF,MAAM,CAAC,QAAQ,EAAE,CAAC;oBAElB,yBAAyB;oBACzB,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBACrC,YAAY,CAAC,WAAW,IAAI,YAAY,QAAQ,cAAc,CAAC;wBAC7D,OAAO,GAAG,SAAS,CAAC;iBACvB;aACF;SACF;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YACf,YAAY,CAAC,wBAAwB,GAAG,QAAQ,CAAC;YACjD,MAAM,IAAI,mBAAmB,CAC3B,YAAY,EACZ,8BAA8B,QAAQ,KAAK,CAAC,EAAE,CAC/C,CAAC;SACH;KACF;IAED,OAAO,YAAY,CAAC;AACtB,CAAC;AA1ED,gCA0EC;AAEM,KAAK,UAAU,UAAU,CAC9B,SAAiB,EACjB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,8DAA8D;IAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;IAEzD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7C,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;IAC5C,MAAM,wBAAwB,CAAC,MAAM,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;IAE5D,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,MAAM,UAAU,CACnC,SAAS,EACT,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,MAAM,CACP,CAAC;IAEF,OAAO,EAAE,GAAG,YAAY,EAAE,CAAC;AAC7B,CAAC;AA3BD,gCA2BC"}
|
||||||
4
lib/analyze.test.js
generated
4
lib/analyze.test.js
generated
@@ -39,7 +39,9 @@ ava_1.default("status report fields", async (t) => {
|
|||||||
tempDir: tmpDir,
|
tempDir: tmpDir,
|
||||||
toolCacheDir: tmpDir,
|
toolCacheDir: tmpDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
gitHubVersion: { type: "dotcom" },
|
gitHubVersion: {
|
||||||
|
type: util.GitHubVariant.DOTCOM,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
fs.mkdirSync(util.getCodeQLDatabasePath(config.tempDir, language), {
|
fs.mkdirSync(util.getCodeQLDatabasePath(config.tempDir, language), {
|
||||||
recursive: true,
|
recursive: true,
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analyze.test.js","sourceRoot":"","sources":["../src/analyze.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AAEzB,8CAAuB;AAEvB,uCAAuC;AACvC,qCAAqC;AAErC,2CAAuC;AACvC,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,yEAAyE;AACzE,yCAAyC;AACzC,aAAI,CAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,kBAAS,CAAC;YACR,eAAe,EAAE,KAAK,IAAI,EAAE,CAAC,SAAS;SACvC,CAAC,CAAC;QAEH,MAAM,UAAU,GAAG,EAAE,CAAC;QACtB,MAAM,eAAe,GAAG,EAAE,CAAC;QAC3B,MAAM,WAAW,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,MAAM,CAAC,oBAAQ,CAAC,EAAE;YAC9C,MAAM,MAAM,GAAW;gBACrB,SAAS,EAAE,CAAC,QAAQ,CAAC;gBACrB,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,EAAE;gBACf,KAAK,EAAE,EAAE;gBACT,iBAAiB,EAAE,EAAE;gBACrB,OAAO,EAAE,MAAM;gBACf,YAAY,EAAE,MAAM;gBACpB,SAAS,EAAE,EAAE;gBACb,aAAa,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAwB;aACxD,CAAC;YACF,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE;gBACjE,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,CAAC,QAAQ,CAAC;gBACnB,MAAM,EAAE,EAAE;aACX,CAAC;YACF,MAAM,mBAAmB,GAAG,MAAM,oBAAU,CAC1C,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACxD,CAAC,CAAC,IAAI,CACJ,2BAA2B,QAAQ,cAAc,IAAI,mBAAmB,CACzE,CAAC;YAEF,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,EAAE;gBACX,MAAM,EAAE,CAAC,QAAQ,CAAC;aACnB,CAAC;YACF,MAAM,kBAAkB,GAAG,MAAM,oBAAU,CACzC,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACvD,CAAC,CAAC,IAAI,CACJ,0BAA0B,QAAQ,cAAc,IAAI,kBAAkB,CACvE,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"analyze.test.js","sourceRoot":"","sources":["../src/analyze.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AAEzB,8CAAuB;AAEvB,uCAAuC;AACvC,qCAAqC;AAErC,2CAAuC;AACvC,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,yEAAyE;AACzE,yCAAyC;AACzC,aAAI,CAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,kBAAS,CAAC;YACR,eAAe,EAAE,KAAK,IAAI,EAAE,CAAC,SAAS;SACvC,CAAC,CAAC;QAEH,MAAM,UAAU,GAAG,EAAE,CAAC;QACtB,MAAM,eAAe,GAAG,EAAE,CAAC;QAC3B,MAAM,WAAW,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,MAAM,CAAC,oBAAQ,CAAC,EAAE;YAC9C,MAAM,MAAM,GAAW;gBACrB,SAAS,EAAE,CAAC,QAAQ,CAAC;gBACrB,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,EAAE;gBACf,KAAK,EAAE,EAAE;gBACT,iBAAiB,EAAE,EAAE;gBACrB,OAAO,EAAE,MAAM;gBACf,YAAY,EAAE,MAAM;gBACpB,SAAS,EAAE,EAAE;gBACb,aAAa,EAAE;oBACb,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;iBACV;aACxB,CAAC;YACF,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE;gBACjE,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,CAAC,QAAQ,CAAC;gBACnB,MAAM,EAAE,EAAE;aACX,CAAC;YACF,MAAM,mBAAmB,GAAG,MAAM,oBAAU,CAC1C,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACxD,CAAC,CAAC,IAAI,CACJ,2BAA2B,QAAQ,cAAc,IAAI,mBAAmB,CACzE,CAAC;YAEF,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,EAAE;gBACX,MAAM,EAAE,CAAC,QAAQ,CAAC;aACnB,CAAC;YACF,MAAM,kBAAkB,GAAG,MAAM,oBAAU,CACzC,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACvD,CAAC,CAAC,IAAI,CACJ,0BAA0B,QAAQ,cAAc,IAAI,kBAAkB,CACvE,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
7
lib/api-client.js
generated
7
lib/api-client.js
generated
@@ -20,11 +20,12 @@ var DisallowedAPIVersionReason;
|
|||||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
|
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
|
||||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
|
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
|
||||||
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
|
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
|
||||||
exports.getApiClient = function (apiDetails, allowLocalRun = false) {
|
exports.getApiClient = function (apiDetails, { allowLocalRun = false, allowExternal = false } = {}) {
|
||||||
if (util_1.isLocalRun() && !allowLocalRun) {
|
if (util_1.isLocalRun() && !allowLocalRun) {
|
||||||
throw new Error("Invalid API call in local run");
|
throw new Error("Invalid API call in local run");
|
||||||
}
|
}
|
||||||
return new githubUtils.GitHub(githubUtils.getOctokitOptions(apiDetails.auth, {
|
const auth = (allowExternal && apiDetails.externalRepoAuth) || apiDetails.auth;
|
||||||
|
return new githubUtils.GitHub(githubUtils.getOctokitOptions(auth, {
|
||||||
baseUrl: getApiUrl(apiDetails.url),
|
baseUrl: getApiUrl(apiDetails.url),
|
||||||
userAgent: "CodeQL Action",
|
userAgent: "CodeQL Action",
|
||||||
log: console_log_level_1.default({ level: "debug" }),
|
log: console_log_level_1.default({ level: "debug" }),
|
||||||
@@ -49,7 +50,7 @@ function getActionsApiClient(allowLocalRun = false) {
|
|||||||
auth: actions_util_1.getRequiredInput("token"),
|
auth: actions_util_1.getRequiredInput("token"),
|
||||||
url: actions_util_1.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: actions_util_1.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
return exports.getApiClient(apiDetails, allowLocalRun);
|
return exports.getApiClient(apiDetails, { allowLocalRun });
|
||||||
}
|
}
|
||||||
exports.getActionsApiClient = getActionsApiClient;
|
exports.getActionsApiClient = getActionsApiClient;
|
||||||
//# sourceMappingURL=api-client.js.map
|
//# sourceMappingURL=api-client.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,0EAAgD;AAEhD,iDAAuE;AACvE,iCAAoC;AAEpC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAOY,QAAA,YAAY,GAAG,UAC1B,UAA4B,EAC5B,aAAa,GAAG,KAAK;IAErB,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,WAAW,CAAC,MAAM,CAC3B,WAAW,CAAC,iBAAiB,CAAC,UAAU,CAAC,IAAI,EAAE;QAC7C,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,+BAAgB,CAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,kCAAmB,CAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,oBAAY,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;AACjD,CAAC;AAPD,kDAOC"}
|
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,0EAAgD;AAEhD,iDAAuE;AACvE,iCAAoC;AAEpC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeY,QAAA,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAErD,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IAED,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,OAAO,IAAI,WAAW,CAAC,MAAM,CAC3B,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,+BAAgB,CAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,kCAAmB,CAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,oBAAY,CAAC,UAAU,EAAE,EAAE,aAAa,EAAE,CAAC,CAAC;AACrD,CAAC;AAPD,kDAOC"}
|
||||||
72
lib/api-client.test.js
generated
Normal file
72
lib/api-client.test.js
generated
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const sinon_1 = __importDefault(require("sinon"));
|
||||||
|
const api_client_1 = require("./api-client");
|
||||||
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
|
let githubStub;
|
||||||
|
ava_1.default.beforeEach(() => {
|
||||||
|
githubStub = sinon_1.default.stub(githubUtils, "GitHub");
|
||||||
|
});
|
||||||
|
ava_1.default("Get the client API", async (t) => {
|
||||||
|
doTest(t, {
|
||||||
|
auth: "xyz",
|
||||||
|
externalRepoAuth: "abc",
|
||||||
|
url: "http://hucairz",
|
||||||
|
}, undefined, {
|
||||||
|
auth: "token xyz",
|
||||||
|
baseUrl: "http://hucairz/api/v3",
|
||||||
|
userAgent: "CodeQL Action",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("Get the client API external", async (t) => {
|
||||||
|
doTest(t, {
|
||||||
|
auth: "xyz",
|
||||||
|
externalRepoAuth: "abc",
|
||||||
|
url: "http://hucairz",
|
||||||
|
}, { allowExternal: true }, {
|
||||||
|
auth: "token abc",
|
||||||
|
baseUrl: "http://hucairz/api/v3",
|
||||||
|
userAgent: "CodeQL Action",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("Get the client API external not present", async (t) => {
|
||||||
|
doTest(t, {
|
||||||
|
auth: "xyz",
|
||||||
|
url: "http://hucairz",
|
||||||
|
}, { allowExternal: true }, {
|
||||||
|
auth: "token xyz",
|
||||||
|
baseUrl: "http://hucairz/api/v3",
|
||||||
|
userAgent: "CodeQL Action",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("Get the client API with github url", async (t) => {
|
||||||
|
doTest(t, {
|
||||||
|
auth: "xyz",
|
||||||
|
url: "https://github.com/some/invalid/url",
|
||||||
|
}, undefined, {
|
||||||
|
auth: "token xyz",
|
||||||
|
baseUrl: "https://api.github.com",
|
||||||
|
userAgent: "CodeQL Action",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
function doTest(t, clientArgs, clientOptions, expected) {
|
||||||
|
api_client_1.getApiClient(clientArgs, clientOptions);
|
||||||
|
const firstCallArgs = githubStub.args[0];
|
||||||
|
// log is a function, so we don't need to test for equality of it
|
||||||
|
delete firstCallArgs[0].log;
|
||||||
|
t.deepEqual(firstCallArgs, [expected]);
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=api-client.test.js.map
|
||||||
1
lib/api-client.test.js.map
Normal file
1
lib/api-client.test.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uEAAyD;AACzD,8CAA6C;AAC7C,kDAA0B;AAE1B,6CAA4C;AAC5C,mDAA6C;AAE7C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,IAAI,UAA2B,CAAC;AAEhC,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,UAAU,GAAG,eAAK,CAAC,IAAI,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;AACjD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,oBAAoB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrC,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,eAAe;KAC3B,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,6BAA6B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9C,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,eAAe;KAC3B,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,eAAe;KAC3B,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,oCAAoC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrD,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,qCAAqC;KAC3C,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,wBAAwB;QACjC,SAAS,EAAE,eAAe;KAC3B,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,MAAM,CACb,CAA4B,EAC5B,UAAe,EACf,aAAkB,EAClB,QAAa;IAEb,yBAAY,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAExC,MAAM,aAAa,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IACzC,iEAAiE;IACjE,OAAO,aAAa,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;IAC5B,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzC,CAAC"}
|
||||||
@@ -1 +1 @@
|
|||||||
{ "maximumVersion": "3.0", "minimumVersion": "2.22" }
|
{ "maximumVersion": "3.1", "minimumVersion": "2.22" }
|
||||||
|
|||||||
2
lib/autobuild-action.js
generated
2
lib/autobuild-action.js
generated
@@ -34,7 +34,7 @@ async function run() {
|
|||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("autobuild", "starting", startedAt)))) {
|
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("autobuild", "starting", startedAt)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const config = await config_utils.getConfig(actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), logger);
|
const config = await config_utils.getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
||||||
if (config === undefined) {
|
if (config === undefined) {
|
||||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAS7C,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,WAAW,EACX,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,sCAA0B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,wBAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIAAmI,KAAK,CAAC,OAAO,EAAE,CACnJ,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAS7C,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,WAAW,EACX,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,sCAA0B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,wBAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIAAmI,KAAK,CAAC,OAAO,EAAE,CACnJ,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
55
lib/codeql.js
generated
55
lib/codeql.js
generated
@@ -6,6 +6,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
result["default"] = mod;
|
result["default"] = mod;
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
@@ -14,6 +17,8 @@ const globalutil = __importStar(require("util"));
|
|||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
const http = __importStar(require("@actions/http-client"));
|
const http = __importStar(require("@actions/http-client"));
|
||||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||||
|
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
|
||||||
|
const query_string_1 = __importDefault(require("query-string"));
|
||||||
const semver = __importStar(require("semver"));
|
const semver = __importStar(require("semver"));
|
||||||
const uuid_1 = require("uuid");
|
const uuid_1 = require("uuid");
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
@@ -49,6 +54,11 @@ function getCodeQLActionRepository(mode, logger) {
|
|||||||
if (mode !== "actions") {
|
if (mode !== "actions") {
|
||||||
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
return getActionsCodeQLActionRepository(logger);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function getActionsCodeQLActionRepository(logger) {
|
||||||
if (process.env["GITHUB_ACTION_REPOSITORY"] !== undefined) {
|
if (process.env["GITHUB_ACTION_REPOSITORY"] !== undefined) {
|
||||||
return process.env["GITHUB_ACTION_REPOSITORY"];
|
return process.env["GITHUB_ACTION_REPOSITORY"];
|
||||||
}
|
}
|
||||||
@@ -64,7 +74,7 @@ function getCodeQLActionRepository(mode, logger) {
|
|||||||
const relativeScriptPathParts = actions_util_1.getRelativeScriptPath().split(path.sep);
|
const relativeScriptPathParts = actions_util_1.getRelativeScriptPath().split(path.sep);
|
||||||
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
|
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
|
||||||
}
|
}
|
||||||
async function getCodeQLBundleDownloadURL(apiDetails, mode, logger) {
|
async function getCodeQLBundleDownloadURL(apiDetails, mode, variant, logger) {
|
||||||
const codeQLActionRepository = getCodeQLActionRepository(mode, logger);
|
const codeQLActionRepository = getCodeQLActionRepository(mode, logger);
|
||||||
const potentialDownloadSources = [
|
const potentialDownloadSources = [
|
||||||
// This GitHub instance, and this Action.
|
// This GitHub instance, and this Action.
|
||||||
@@ -76,8 +86,34 @@ async function getCodeQLBundleDownloadURL(apiDetails, mode, logger) {
|
|||||||
];
|
];
|
||||||
// We now filter out any duplicates.
|
// We now filter out any duplicates.
|
||||||
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
|
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
|
||||||
const uniqueDownloadSources = potentialDownloadSources.filter((url, index, self) => index === self.indexOf(url));
|
const uniqueDownloadSources = potentialDownloadSources.filter((source, index, self) => {
|
||||||
|
return !self.slice(0, index).some((other) => fast_deep_equal_1.default(source, other));
|
||||||
|
});
|
||||||
const codeQLBundleName = getCodeQLBundleName();
|
const codeQLBundleName = getCodeQLBundleName();
|
||||||
|
if (variant === util.GitHubVariant.GHAE) {
|
||||||
|
try {
|
||||||
|
const release = await api
|
||||||
|
.getApiClient(apiDetails)
|
||||||
|
.request("GET /enterprise/code-scanning/codeql-bundle/find/{tag}", {
|
||||||
|
tag: CODEQL_BUNDLE_VERSION,
|
||||||
|
});
|
||||||
|
const assetID = release.data.assets[codeQLBundleName];
|
||||||
|
if (assetID !== undefined) {
|
||||||
|
const download = await api
|
||||||
|
.getApiClient(apiDetails)
|
||||||
|
.request("GET /enterprise/code-scanning/codeql-bundle/download/{asset_id}", { asset_id: assetID });
|
||||||
|
const downloadURL = download.data.url;
|
||||||
|
logger.info(`Found CodeQL bundle at GitHub AE endpoint with URL ${downloadURL}.`);
|
||||||
|
return downloadURL;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
logger.info(`Attempted to fetch bundle from GitHub AE endpoint but the bundle ${codeQLBundleName} was not found in the assets ${JSON.stringify(release.data.assets)}.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
logger.info(`Attempted to fetch bundle from GitHub AE endpoint but got error ${e}.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
for (const downloadSource of uniqueDownloadSources) {
|
for (const downloadSource of uniqueDownloadSources) {
|
||||||
const [apiURL, repository] = downloadSource;
|
const [apiURL, repository] = downloadSource;
|
||||||
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
|
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
|
||||||
@@ -120,12 +156,7 @@ async function toolcacheDownloadTool(url, headers, tempDir, logger) {
|
|||||||
await pipeline(response.message, fs.createWriteStream(dest));
|
await pipeline(response.message, fs.createWriteStream(dest));
|
||||||
return dest;
|
return dest;
|
||||||
}
|
}
|
||||||
async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolsDir, mode, logger) {
|
async function setupCodeQL(codeqlURL, apiDetails, tempDir, mode, variant, logger) {
|
||||||
// Setting these two env vars makes the toolcache code safe to use outside,
|
|
||||||
// of actions but this is obviously not a great thing we're doing and it would
|
|
||||||
// be better to write our own implementation to use outside of actions.
|
|
||||||
process.env["RUNNER_TEMP"] = tempDir;
|
|
||||||
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
|
|
||||||
try {
|
try {
|
||||||
// We use the special value of 'latest' to prioritize the version in the
|
// We use the special value of 'latest' to prioritize the version in the
|
||||||
// defaults over any pinned cached version.
|
// defaults over any pinned cached version.
|
||||||
@@ -155,13 +186,17 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolsDir, mode, logge
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if (!codeqlURL) {
|
if (!codeqlURL) {
|
||||||
codeqlURL = await getCodeQLBundleDownloadURL(apiDetails, mode, logger);
|
codeqlURL = await getCodeQLBundleDownloadURL(apiDetails, mode, variant, logger);
|
||||||
}
|
}
|
||||||
|
const parsedCodeQLURL = new URL(codeqlURL);
|
||||||
|
const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search);
|
||||||
const headers = { accept: "application/octet-stream" };
|
const headers = { accept: "application/octet-stream" };
|
||||||
// We only want to provide an authorization header if we are downloading
|
// We only want to provide an authorization header if we are downloading
|
||||||
// from the same GitHub instance the Action is running on.
|
// from the same GitHub instance the Action is running on.
|
||||||
// This avoids leaking Enterprise tokens to dotcom.
|
// This avoids leaking Enterprise tokens to dotcom.
|
||||||
if (codeqlURL.startsWith(`${apiDetails.url}/`)) {
|
// We also don't want to send an authorization header if there's already a token provided in the URL.
|
||||||
|
if (codeqlURL.startsWith(`${apiDetails.url}/`) &&
|
||||||
|
parsedQueryString["token"] === undefined) {
|
||||||
logger.debug("Downloading CodeQL bundle with token.");
|
logger.debug("Downloading CodeQL bundle with token.");
|
||||||
headers.authorization = `token ${apiDetails.auth}`;
|
headers.authorization = `token ${apiDetails.auth}`;
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
57
lib/codeql.test.js
generated
57
lib/codeql.test.js
generated
@@ -24,15 +24,20 @@ const sampleApiDetails = {
|
|||||||
auth: "token",
|
auth: "token",
|
||||||
url: "https://github.com",
|
url: "https://github.com",
|
||||||
};
|
};
|
||||||
|
const sampleGHAEApiDetails = {
|
||||||
|
auth: "token",
|
||||||
|
url: "https://example.githubenterprise.com",
|
||||||
|
};
|
||||||
ava_1.default("download codeql bundle cache", async (t) => {
|
ava_1.default("download codeql bundle cache", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
|
util.setupActionsVars(tmpDir, tmpDir);
|
||||||
const versions = ["20200601", "20200610"];
|
const versions = ["20200601", "20200610"];
|
||||||
for (let i = 0; i < versions.length; i++) {
|
for (let i = 0; i < versions.length; i++) {
|
||||||
const version = versions[i];
|
const version = versions[i];
|
||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
||||||
}
|
}
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
@@ -41,36 +46,39 @@ ava_1.default("download codeql bundle cache", async (t) => {
|
|||||||
});
|
});
|
||||||
ava_1.default("download codeql bundle cache explicitly requested with pinned different version cached", async (t) => {
|
ava_1.default("download codeql bundle cache explicitly requested with pinned different version cached", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
|
util.setupActionsVars(tmpDir, tmpDir);
|
||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("don't download codeql bundle cache with pinned different version cached", async (t) => {
|
ava_1.default("don't download codeql bundle cache with pinned different version cached", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
|
util.setupActionsVars(tmpDir, tmpDir);
|
||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 1);
|
t.is(cachedVersions.length, 1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("download codeql bundle cache with different version cached (not pinned)", async (t) => {
|
ava_1.default("download codeql bundle cache with different version cached (not pinned)", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
|
util.setupActionsVars(tmpDir, tmpDir);
|
||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
const platform = process.platform === "win32"
|
const platform = process.platform === "win32"
|
||||||
? "win64"
|
? "win64"
|
||||||
@@ -80,17 +88,18 @@ ava_1.default("download codeql bundle cache with different version cached (not p
|
|||||||
nock_1.default("https://github.com")
|
nock_1.default("https://github.com")
|
||||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 2);
|
t.is(cachedVersions.length, 2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default('download codeql bundle cache with pinned different version cached if "latests" tools specified', async (t) => {
|
ava_1.default('download codeql bundle cache with pinned different version cached if "latest" tools specified', async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
|
util.setupActionsVars(tmpDir, tmpDir);
|
||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
const platform = process.platform === "win32"
|
const platform = process.platform === "win32"
|
||||||
? "win64"
|
? "win64"
|
||||||
@@ -100,11 +109,39 @@ ava_1.default('download codeql bundle cache with pinned different version cached
|
|||||||
nock_1.default("https://github.com")
|
nock_1.default("https://github.com")
|
||||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 2);
|
t.is(cachedVersions.length, 2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
ava_1.default("download codeql bundle from github ae endpoint", async (t) => {
|
||||||
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
|
util.setupActionsVars(tmpDir, tmpDir);
|
||||||
|
const bundleAssetID = 10;
|
||||||
|
const platform = process.platform === "win32"
|
||||||
|
? "win64"
|
||||||
|
: process.platform === "linux"
|
||||||
|
? "linux64"
|
||||||
|
: "osx64";
|
||||||
|
const codeQLBundleName = `codeql-bundle-${platform}.tar.gz`;
|
||||||
|
nock_1.default("https://example.githubenterprise.com")
|
||||||
|
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/find/${defaults.bundleVersion}`)
|
||||||
|
.reply(200, {
|
||||||
|
assets: { [codeQLBundleName]: bundleAssetID },
|
||||||
|
});
|
||||||
|
nock_1.default("https://example.githubenterprise.com")
|
||||||
|
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/download/${bundleAssetID}`)
|
||||||
|
.reply(200, {
|
||||||
|
url: `https://example.githubenterprise.com/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`,
|
||||||
|
});
|
||||||
|
nock_1.default("https://example.githubenterprise.com")
|
||||||
|
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
|
||||||
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
|
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, "runner", util.GitHubVariant.GHAE, logging_1.getRunnerLogger(true));
|
||||||
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
|
t.is(cachedVersions.length, 1);
|
||||||
|
});
|
||||||
|
});
|
||||||
ava_1.default("parse codeql bundle url version", (t) => {
|
ava_1.default("parse codeql bundle url version", (t) => {
|
||||||
t.deepEqual(codeql.getCodeQLURLVersion("https://github.com/.../codeql-bundle-20200601/..."), "20200601");
|
t.deepEqual(codeql.getCodeQLURLVersion("https://github.com/.../codeql-bundle-20200601/..."), "20200601");
|
||||||
});
|
});
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
24
lib/config-utils.js
generated
24
lib/config-utils.js
generated
@@ -128,7 +128,7 @@ async function addLocalQueries(codeQL, resultMap, localQueryPath, checkoutPath,
|
|||||||
/**
|
/**
|
||||||
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
|
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
|
||||||
*/
|
*/
|
||||||
async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile) {
|
async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile) {
|
||||||
let tok = queryUses.split("@");
|
let tok = queryUses.split("@");
|
||||||
if (tok.length !== 2) {
|
if (tok.length !== 2) {
|
||||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||||
@@ -147,7 +147,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl
|
|||||||
}
|
}
|
||||||
const nwo = `${tok[0]}/${tok[1]}`;
|
const nwo = `${tok[0]}/${tok[1]}`;
|
||||||
// Checkout the external repository
|
// Checkout the external repository
|
||||||
const checkoutPath = await externalQueries.checkoutExternalRepository(nwo, ref, githubUrl, tempDir, logger);
|
const checkoutPath = await externalQueries.checkoutExternalRepository(nwo, ref, apiDetails, tempDir, logger);
|
||||||
const queryPath = tok.length > 2
|
const queryPath = tok.length > 2
|
||||||
? path.join(checkoutPath, tok.slice(2).join("/"))
|
? path.join(checkoutPath, tok.slice(2).join("/"))
|
||||||
: checkoutPath;
|
: checkoutPath;
|
||||||
@@ -161,7 +161,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl
|
|||||||
* local paths starting with './', or references to remote repos, or
|
* local paths starting with './', or references to remote repos, or
|
||||||
* a finite set of hardcoded terms for builtin suites.
|
* a finite set of hardcoded terms for builtin suites.
|
||||||
*/
|
*/
|
||||||
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, checkoutPath, githubUrl, logger, configFile) {
|
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, checkoutPath, apiDetails, logger, configFile) {
|
||||||
queryUses = queryUses.trim();
|
queryUses = queryUses.trim();
|
||||||
if (queryUses === "") {
|
if (queryUses === "") {
|
||||||
throw new Error(getQueryUsesInvalid(configFile));
|
throw new Error(getQueryUsesInvalid(configFile));
|
||||||
@@ -177,7 +177,7 @@ async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir,
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Otherwise, must be a reference to another repo
|
// Otherwise, must be a reference to another repo
|
||||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile);
|
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
||||||
}
|
}
|
||||||
// Regex validating stars in paths or paths-ignore entries.
|
// Regex validating stars in paths or paths-ignore entries.
|
||||||
// The intention is to only allow ** to appear when immediately
|
// The intention is to only allow ** to appear when immediately
|
||||||
@@ -304,7 +304,7 @@ exports.getUnknownLanguagesError = getUnknownLanguagesError;
|
|||||||
async function getLanguagesInRepo(repository, apiDetails, logger) {
|
async function getLanguagesInRepo(repository, apiDetails, logger) {
|
||||||
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
|
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
|
||||||
const response = await api
|
const response = await api
|
||||||
.getApiClient(apiDetails, true)
|
.getApiClient(apiDetails, { allowLocalRun: true })
|
||||||
.repos.listLanguages({
|
.repos.listLanguages({
|
||||||
owner: repository.owner,
|
owner: repository.owner,
|
||||||
repo: repository.repo,
|
repo: repository.repo,
|
||||||
@@ -367,12 +367,12 @@ async function getLanguages(languagesInput, repository, apiDetails, logger) {
|
|||||||
}
|
}
|
||||||
return parsedLanguages;
|
return parsedLanguages;
|
||||||
}
|
}
|
||||||
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, checkoutPath, githubUrl, logger) {
|
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, checkoutPath, apiDetails, logger) {
|
||||||
queriesInput = queriesInput.trim();
|
queriesInput = queriesInput.trim();
|
||||||
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
||||||
queriesInput = queriesInput.replace(/^\+/, "");
|
queriesInput = queriesInput.replace(/^\+/, "");
|
||||||
for (const query of queriesInput.split(",")) {
|
for (const query of queriesInput.split(",")) {
|
||||||
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, githubUrl, logger);
|
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, apiDetails, logger);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Returns true if either no queries were provided in the workflow.
|
// Returns true if either no queries were provided in the workflow.
|
||||||
@@ -393,7 +393,7 @@ async function getDefaultConfig(languagesInput, queriesInput, repository, tempDi
|
|||||||
const queries = {};
|
const queries = {};
|
||||||
await addDefaultQueries(codeQL, languages, queries);
|
await addDefaultQueries(codeQL, languages, queries);
|
||||||
if (queriesInput) {
|
if (queriesInput) {
|
||||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails.url, logger);
|
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails, logger);
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
languages,
|
languages,
|
||||||
@@ -450,7 +450,7 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
|
|||||||
// unless they're prefixed with "+", in which case they supplement those
|
// unless they're prefixed with "+", in which case they supplement those
|
||||||
// in the config file.
|
// in the config file.
|
||||||
if (queriesInput) {
|
if (queriesInput) {
|
||||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails.url, logger);
|
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails, logger);
|
||||||
}
|
}
|
||||||
if (shouldAddConfigFileQueries(queriesInput) &&
|
if (shouldAddConfigFileQueries(queriesInput) &&
|
||||||
QUERIES_PROPERTY in parsedYAML) {
|
QUERIES_PROPERTY in parsedYAML) {
|
||||||
@@ -462,7 +462,7 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
|
|||||||
typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||||
throw new Error(getQueryUsesInvalid(configFile));
|
throw new Error(getQueryUsesInvalid(configFile));
|
||||||
}
|
}
|
||||||
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, checkoutPath, apiDetails.url, logger, configFile);
|
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, checkoutPath, apiDetails, logger, configFile);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||||
@@ -556,7 +556,9 @@ async function getRemoteConfig(configFile, apiDetails) {
|
|||||||
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
||||||
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
|
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
|
||||||
}
|
}
|
||||||
const response = await api.getApiClient(apiDetails, true).repos.getContent({
|
const response = await api
|
||||||
|
.getApiClient(apiDetails, { allowLocalRun: true, allowExternal: true })
|
||||||
|
.repos.getContent({
|
||||||
owner: pieces.groups.owner,
|
owner: pieces.groups.owner,
|
||||||
repo: pieces.groups.repo,
|
repo: pieces.groups.repo,
|
||||||
path: pieces.groups.path,
|
path: pieces.groups.path,
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
5
lib/config-utils.test.js
generated
5
lib/config-utils.test.js
generated
@@ -25,9 +25,10 @@ const util = __importStar(require("./util"));
|
|||||||
testing_utils_1.setupTests(ava_1.default);
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
const sampleApiDetails = {
|
const sampleApiDetails = {
|
||||||
auth: "token",
|
auth: "token",
|
||||||
|
externalRepoAuth: "token",
|
||||||
url: "https://github.example.com",
|
url: "https://github.example.com",
|
||||||
};
|
};
|
||||||
const gitHubVersion = { type: "dotcom" };
|
const gitHubVersion = { type: util.GitHubVariant.DOTCOM };
|
||||||
// Returns the filepath of the newly-created file
|
// Returns the filepath of the newly-created file
|
||||||
function createConfigFile(inputFileContents, tmpDir) {
|
function createConfigFile(inputFileContents, tmpDir) {
|
||||||
const configFilePath = path.join(tmpDir, "input");
|
const configFilePath = path.join(tmpDir, "input");
|
||||||
@@ -427,7 +428,7 @@ ava_1.default("Invalid queries in workflow file handled correctly", async (t) =>
|
|||||||
// This function just needs to be type-correct; it doesn't need to do anything,
|
// This function just needs to be type-correct; it doesn't need to do anything,
|
||||||
// since we're deliberately passing in invalid data
|
// since we're deliberately passing in invalid data
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = codeql_1.setCodeQL({
|
||||||
async resolveQueries(_queries, _extraSearchPath) {
|
async resolveQueries() {
|
||||||
return {
|
return {
|
||||||
byLanguage: {
|
byLanguage: {
|
||||||
javascript: {},
|
javascript: {},
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -1,3 +1,3 @@
|
|||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-20201127"
|
"bundleVersion": "codeql-bundle-20210326"
|
||||||
}
|
}
|
||||||
|
|||||||
19
lib/external-queries.js
generated
19
lib/external-queries.js
generated
@@ -14,7 +14,7 @@ const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
|||||||
/**
|
/**
|
||||||
* Check out repository at the given ref, and return the directory of the checkout.
|
* Check out repository at the given ref, and return the directory of the checkout.
|
||||||
*/
|
*/
|
||||||
async function checkoutExternalRepository(repository, ref, githubUrl, tempDir, logger) {
|
async function checkoutExternalRepository(repository, ref, apiDetails, tempDir, logger) {
|
||||||
logger.info(`Checking out ${repository}`);
|
logger.info(`Checking out ${repository}`);
|
||||||
const checkoutLocation = path.join(tempDir, repository, ref);
|
const checkoutLocation = path.join(tempDir, repository, ref);
|
||||||
if (!checkoutLocation.startsWith(tempDir)) {
|
if (!checkoutLocation.startsWith(tempDir)) {
|
||||||
@@ -22,10 +22,10 @@ async function checkoutExternalRepository(repository, ref, githubUrl, tempDir, l
|
|||||||
throw new Error(`'${repository}@${ref}' is not a valid repository and reference.`);
|
throw new Error(`'${repository}@${ref}' is not a valid repository and reference.`);
|
||||||
}
|
}
|
||||||
if (!fs.existsSync(checkoutLocation)) {
|
if (!fs.existsSync(checkoutLocation)) {
|
||||||
const repoURL = `${githubUrl}/${repository}`;
|
const repoCloneURL = buildCheckoutURL(repository, apiDetails);
|
||||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
|
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
|
||||||
"clone",
|
"clone",
|
||||||
repoURL,
|
repoCloneURL,
|
||||||
checkoutLocation,
|
checkoutLocation,
|
||||||
]).exec();
|
]).exec();
|
||||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
|
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
|
||||||
@@ -38,4 +38,17 @@ async function checkoutExternalRepository(repository, ref, githubUrl, tempDir, l
|
|||||||
return checkoutLocation;
|
return checkoutLocation;
|
||||||
}
|
}
|
||||||
exports.checkoutExternalRepository = checkoutExternalRepository;
|
exports.checkoutExternalRepository = checkoutExternalRepository;
|
||||||
|
function buildCheckoutURL(repository, apiDetails) {
|
||||||
|
const repoCloneURL = new URL(apiDetails.url);
|
||||||
|
if (apiDetails.externalRepoAuth !== undefined) {
|
||||||
|
repoCloneURL.username = "x-access-token";
|
||||||
|
repoCloneURL.password = apiDetails.externalRepoAuth;
|
||||||
|
}
|
||||||
|
if (!repoCloneURL.pathname.endsWith("/")) {
|
||||||
|
repoCloneURL.pathname += "/";
|
||||||
|
}
|
||||||
|
repoCloneURL.pathname += `${repository}`;
|
||||||
|
return repoCloneURL.toString();
|
||||||
|
}
|
||||||
|
exports.buildCheckoutURL = buildCheckoutURL;
|
||||||
//# sourceMappingURL=external-queries.js.map
|
//# sourceMappingURL=external-queries.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAIpD;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,SAAiB,EACjB,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,GAAG,SAAS,IAAI,UAAU,EAAE,CAAC;QAC7C,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,OAAO;YACP,OAAO;YACP,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC"}
|
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAKpD;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,UAAwC,EACxC,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,YAAY,GAAG,gBAAgB,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAC9D,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,OAAO;YACP,YAAY;YACZ,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC;AAED,SAAgB,gBAAgB,CAC9B,UAAkB,EAClB,UAAwC;IAExC,MAAM,YAAY,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC7C,IAAI,UAAU,CAAC,gBAAgB,KAAK,SAAS,EAAE;QAC7C,YAAY,CAAC,QAAQ,GAAG,gBAAgB,CAAC;QACzC,YAAY,CAAC,QAAQ,GAAG,UAAU,CAAC,gBAAgB,CAAC;KACrD;IACD,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;QACxC,YAAY,CAAC,QAAQ,IAAI,GAAG,CAAC;KAC9B;IACD,YAAY,CAAC,QAAQ,IAAI,GAAG,UAAU,EAAE,CAAC;IACzC,OAAO,YAAY,CAAC,QAAQ,EAAE,CAAC;AACjC,CAAC;AAdD,4CAcC"}
|
||||||
22
lib/external-queries.test.js
generated
22
lib/external-queries.test.js
generated
@@ -81,17 +81,35 @@ ava_1.default("checkoutExternalQueries", async (t) => {
|
|||||||
const commit2Sha = await runGit(["rev-parse", "HEAD"]);
|
const commit2Sha = await runGit(["rev-parse", "HEAD"]);
|
||||||
// Checkout the first commit, which should contain 'a' and 'b'
|
// Checkout the first commit, which should contain 'a' and 'b'
|
||||||
t.false(fs.existsSync(path.join(tmpDir, repoName)));
|
t.false(fs.existsSync(path.join(tmpDir, repoName)));
|
||||||
await externalQueries.checkoutExternalRepository(repoName, commit1Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true));
|
await externalQueries.checkoutExternalRepository(repoName, commit1Sha, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" }, tmpDir, logging_1.getRunnerLogger(true));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName)));
|
t.true(fs.existsSync(path.join(tmpDir, repoName)));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha)));
|
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha)));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "a")));
|
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "a")));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "b")));
|
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "b")));
|
||||||
// Checkout the second commit as well, which should only contain 'a'
|
// Checkout the second commit as well, which should only contain 'a'
|
||||||
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
|
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
|
||||||
await externalQueries.checkoutExternalRepository(repoName, commit2Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true));
|
await externalQueries.checkoutExternalRepository(repoName, commit2Sha, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" }, tmpDir, logging_1.getRunnerLogger(true));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
|
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "a")));
|
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "a")));
|
||||||
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
|
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
ava_1.default("buildCheckoutURL", (t) => {
|
||||||
|
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
||||||
|
url: "https://github.com",
|
||||||
|
externalRepoAuth: undefined,
|
||||||
|
}), "https://github.com/foo/bar");
|
||||||
|
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
||||||
|
url: "https://github.example.com/",
|
||||||
|
externalRepoAuth: undefined,
|
||||||
|
}), "https://github.example.com/foo/bar");
|
||||||
|
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
||||||
|
url: "https://github.com",
|
||||||
|
externalRepoAuth: "abc",
|
||||||
|
}), "https://x-access-token:abc@github.com/foo/bar");
|
||||||
|
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
||||||
|
url: "https://github.example.com/",
|
||||||
|
externalRepoAuth: "abc",
|
||||||
|
}), "https://x-access-token:abc@github.example.com/foo/bar");
|
||||||
|
});
|
||||||
//# sourceMappingURL=external-queries.test.js.map
|
//# sourceMappingURL=external-queries.test.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AACpD,8CAAuB;AAEvB,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAChC,OAAO,EACP;oBACE,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CACF,CAAC,IAAI,EAAE,CAAC;aACV;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,gBAAgB,EAAE,OAAO,CAAC,CAAC,CAAC;QAEpD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AACpD,8CAAuB;AAEvB,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAChC,OAAO,EACP;oBACE,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CACF,CAAC,IAAI,EAAE,CAAC;aACV;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,gBAAgB,EAAE,OAAO,CAAC,CAAC,CAAC;QAEpD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,EAAE;IAC7B,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,4BAA4B,CAC7B,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,oCAAoC,CACrC,CAAC;IAEF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,+CAA+C,CAChD,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,uDAAuD,CACxD,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
||||||
20
lib/init-action.js
generated
20
lib/init-action.js
generated
@@ -56,29 +56,21 @@ async function run() {
|
|||||||
let toolsVersion;
|
let toolsVersion;
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth: actionsUtil.getRequiredInput("token"),
|
auth: actionsUtil.getRequiredInput("token"),
|
||||||
|
externalRepoAuth: actionsUtil.getOptionalInput("external-repository-token"),
|
||||||
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
||||||
if (gitHubVersion !== undefined) {
|
util_1.checkGitHubVersionInRange(gitHubVersion, "actions", logger);
|
||||||
util_1.checkGitHubVersionInRange(gitHubVersion, "actions", logger);
|
|
||||||
}
|
|
||||||
try {
|
try {
|
||||||
actionsUtil.prepareLocalRunEnvironment();
|
actionsUtil.prepareLocalRunEnvironment();
|
||||||
const workflowErrors = await actionsUtil.getWorkflowErrors();
|
const workflowErrors = await actionsUtil.validateWorkflow();
|
||||||
// we do not want to worry users if linting is failing
|
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "starting", startedAt, workflowErrors)))) {
|
||||||
// but we do want to send a status report containing this error code
|
|
||||||
// below
|
|
||||||
const userWorkflowErrors = workflowErrors.filter((o) => o.code !== "LintFailed");
|
|
||||||
if (userWorkflowErrors.length > 0) {
|
|
||||||
core.warning(actionsUtil.formatWorkflowErrors(userWorkflowErrors));
|
|
||||||
}
|
|
||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "starting", startedAt, actionsUtil.formatWorkflowCause(workflowErrors))))) {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const initCodeQLResult = await init_1.initCodeQL(actionsUtil.getOptionalInput("tools"), apiDetails, actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), "actions", logger);
|
const initCodeQLResult = await init_1.initCodeQL(actionsUtil.getOptionalInput("tools"), apiDetails, actionsUtil.getTemporaryDirectory(), "actions", gitHubVersion.type, logger);
|
||||||
codeql = initCodeQLResult.codeql;
|
codeql = initCodeQLResult.codeql;
|
||||||
toolsVersion = initCodeQLResult.toolsVersion;
|
toolsVersion = initCodeQLResult.toolsVersion;
|
||||||
config = await init_1.initConfig(actionsUtil.getOptionalInput("languages"), actionsUtil.getOptionalInput("queries"), actionsUtil.getOptionalInput("config-file"), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), codeql, actionsUtil.getRequiredEnvParam("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, logger);
|
config = await init_1.initConfig(actionsUtil.getOptionalInput("languages"), actionsUtil.getOptionalInput("queries"), actionsUtil.getOptionalInput("config-file"), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), actionsUtil.getTemporaryDirectory(), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), codeql, actionsUtil.getRequiredEnvParam("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, logger);
|
||||||
if (config.languages.includes(languages_1.Language.python) &&
|
if (config.languages.includes(languages_1.Language.python) &&
|
||||||
actionsUtil.getRequiredInput("setup-python-dependencies") === "true") {
|
actionsUtil.getRequiredInput("setup-python-dependencies") === "true") {
|
||||||
try {
|
try {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
6
lib/init.js
generated
6
lib/init.js
generated
@@ -16,9 +16,9 @@ const codeql_1 = require("./codeql");
|
|||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
const tracer_config_1 = require("./tracer-config");
|
const tracer_config_1 = require("./tracer-config");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
async function initCodeQL(codeqlURL, apiDetails, tempDir, toolsDir, mode, logger) {
|
async function initCodeQL(codeqlURL, apiDetails, tempDir, mode, variant, logger) {
|
||||||
logger.startGroup("Setup CodeQL tools");
|
logger.startGroup("Setup CodeQL tools");
|
||||||
const { codeql, toolsVersion } = await codeql_1.setupCodeQL(codeqlURL, apiDetails, tempDir, toolsDir, mode, logger);
|
const { codeql, toolsVersion } = await codeql_1.setupCodeQL(codeqlURL, apiDetails, tempDir, mode, variant, logger);
|
||||||
await codeql.printVersion();
|
await codeql.printVersion();
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
return { codeql, toolsVersion };
|
return { codeql, toolsVersion };
|
||||||
@@ -129,7 +129,7 @@ exports.injectWindowsTracer = injectWindowsTracer;
|
|||||||
async function installPythonDeps(codeql, logger) {
|
async function installPythonDeps(codeql, logger) {
|
||||||
logger.startGroup("Setup Python dependencies");
|
logger.startGroup("Setup Python dependencies");
|
||||||
const scriptsFolder = path.resolve(__dirname, "../python-setup");
|
const scriptsFolder = path.resolve(__dirname, "../python-setup");
|
||||||
// Setup tools on the Github hosted runners
|
// Setup tools on the GitHub hosted runners
|
||||||
if (process.env["ImageOS"] !== undefined) {
|
if (process.env["ImageOS"] !== undefined) {
|
||||||
try {
|
try {
|
||||||
if (process.platform === "win32") {
|
if (process.platform === "win32") {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,QAAgB,EAChB,IAAe,EACf,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,oBAAW,CAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,QAAQ,EACR,IAAI,EACJ,MAAM,CACP,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AApBD,gCAoBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,aAAiC,EACjC,UAA4B,EAC5B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9BD,gCA8BC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE9E,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnBD,0BAmBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,2CAA2C;IAC3C,IAAI,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,SAAS,EAAE;QACxC,IAAI;YACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC,CAAC,CAChD,CAAC,IAAI,EAAE,CAAC;aACV;iBAAM;gBACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;aACV;SACF;QAAC,OAAO,CAAC,EAAE;YACV,mGAAmG;YACnG,uDAAuD;YACvD,MAAM,CAAC,QAAQ,EAAE,CAAC;YAClB,MAAM,CAAC,OAAO,CACZ,mLAAmL,CACpL,CAAC;YACF,OAAO;SACR;KACF;IAED,uBAAuB;IACvB,IAAI;QACF,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,+IAA+I,CAChJ,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAnDD,8CAmDC"}
|
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,IAAe,EACf,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,oBAAW,CAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,IAAI,EACJ,OAAO,EACP,MAAM,CACP,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AApBD,gCAoBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,aAAiC,EACjC,UAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9BD,gCA8BC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE9E,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnBD,0BAmBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,2CAA2C;IAC3C,IAAI,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,SAAS,EAAE;QACxC,IAAI;YACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC,CAAC,CAChD,CAAC,IAAI,EAAE,CAAC;aACV;iBAAM;gBACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;aACV;SACF;QAAC,OAAO,CAAC,EAAE;YACV,mGAAmG;YACnG,uDAAuD;YACvD,MAAM,CAAC,QAAQ,EAAE,CAAC;YAClB,MAAM,CAAC,OAAO,CACZ,mLAAmL,CACpL,CAAC;YACF,OAAO;SACR;KACF;IAED,uBAAuB;IACvB,IAAI;QACF,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,+IAA+I,CAChJ,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAnDD,8CAmDC"}
|
||||||
51
lib/runner.js
generated
51
lib/runner.js
generated
@@ -82,7 +82,8 @@ program
|
|||||||
.description("Initializes CodeQL")
|
.description("Initializes CodeQL")
|
||||||
.requiredOption("--repository <repository>", "Repository name. (Required)")
|
.requiredOption("--repository <repository>", "Repository name. (Required)")
|
||||||
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
|
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
|
||||||
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
|
.option("--github-auth <auth>", "GitHub Apps token or personal access token. This option is insecure and deprecated, please use `--github-auth-stdin` instead.")
|
||||||
|
.option("--github-auth-stdin", "Read GitHub Apps token or personal access token from stdin.")
|
||||||
.option("--languages <languages>", "Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.")
|
.option("--languages <languages>", "Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.")
|
||||||
.option("--queries <queries>", "Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.")
|
.option("--queries <queries>", "Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.")
|
||||||
.option("--config-file <file>", "Path to config file.")
|
.option("--config-file <file>", "Path to config file.")
|
||||||
@@ -91,32 +92,32 @@ program
|
|||||||
.option("--tools-dir <dir>", "Directory to use for CodeQL tools and other files to store between runs. Default is a subdirectory of the home directory.")
|
.option("--tools-dir <dir>", "Directory to use for CodeQL tools and other files to store between runs. Default is a subdirectory of the home directory.")
|
||||||
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
|
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
|
||||||
.option("--debug", "Print more verbose output", false)
|
.option("--debug", "Print more verbose output", false)
|
||||||
// This prevents a message like: error: unknown option '--trace-process-level'
|
.option("--trace-process-name <string>", "(Advanced, windows-only) Inject a windows tracer of this process into a process with the given process name.")
|
||||||
// Remove this if commander.js starts supporting hidden options.
|
.option("--trace-process-level <number>", "(Advanced, windows-only) Inject a windows tracer of this process into a parent process <number> levels up.")
|
||||||
.allowUnknownOption()
|
|
||||||
.action(async (cmd) => {
|
.action(async (cmd) => {
|
||||||
const logger = logging_1.getRunnerLogger(cmd.debug);
|
const logger = logging_1.getRunnerLogger(cmd.debug);
|
||||||
try {
|
try {
|
||||||
const tempDir = getTempDir(cmd.tempDir);
|
const tempDir = getTempDir(cmd.tempDir);
|
||||||
const toolsDir = getToolsDir(cmd.toolsDir);
|
const toolsDir = getToolsDir(cmd.toolsDir);
|
||||||
|
util_1.setupActionsVars(tempDir, toolsDir);
|
||||||
// Wipe the temp dir
|
// Wipe the temp dir
|
||||||
logger.info(`Cleaning temp directory ${tempDir}`);
|
logger.info(`Cleaning temp directory ${tempDir}`);
|
||||||
fs.rmdirSync(tempDir, { recursive: true });
|
fs.rmdirSync(tempDir, { recursive: true });
|
||||||
fs.mkdirSync(tempDir, { recursive: true });
|
fs.mkdirSync(tempDir, { recursive: true });
|
||||||
|
const auth = await util_1.getGitHubAuth(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth: cmd.githubAuth,
|
auth,
|
||||||
url: util_1.parseGithubUrl(cmd.githubUrl),
|
externalRepoAuth: auth,
|
||||||
|
url: util_1.parseGitHubUrl(cmd.githubUrl),
|
||||||
};
|
};
|
||||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
||||||
if (gitHubVersion !== undefined) {
|
util_1.checkGitHubVersionInRange(gitHubVersion, "runner", logger);
|
||||||
util_1.checkGitHubVersionInRange(gitHubVersion, "runner", logger);
|
|
||||||
}
|
|
||||||
let codeql;
|
let codeql;
|
||||||
if (cmd.codeqlPath !== undefined) {
|
if (cmd.codeqlPath !== undefined) {
|
||||||
codeql = codeql_1.getCodeQL(cmd.codeqlPath);
|
codeql = codeql_1.getCodeQL(cmd.codeqlPath);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
codeql = (await init_1.initCodeQL(undefined, apiDetails, tempDir, toolsDir, "runner", logger)).codeql;
|
codeql = (await init_1.initCodeQL(undefined, apiDetails, tempDir, "runner", gitHubVersion.type, logger)).codeql;
|
||||||
}
|
}
|
||||||
const config = await init_1.initConfig(cmd.languages, cmd.queries, cmd.configFile, repository_1.parseRepositoryNwo(cmd.repository), tempDir, toolsDir, codeql, cmd.checkoutPath || process.cwd(), gitHubVersion, apiDetails, logger);
|
const config = await init_1.initConfig(cmd.languages, cmd.queries, cmd.configFile, repository_1.parseRepositoryNwo(cmd.repository), tempDir, toolsDir, codeql, cmd.checkoutPath || process.cwd(), gitHubVersion, apiDetails, logger);
|
||||||
const tracerConfig = await init_1.runInit(codeql, config);
|
const tracerConfig = await init_1.runInit(codeql, config);
|
||||||
@@ -178,6 +179,7 @@ program
|
|||||||
throw new Error("Config file could not be found at expected location. " +
|
throw new Error("Config file could not be found at expected location. " +
|
||||||
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
|
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
|
||||||
}
|
}
|
||||||
|
util_1.setupActionsVars(config.tempDir, config.toolCacheDir);
|
||||||
importTracerEnvironment(config);
|
importTracerEnvironment(config);
|
||||||
let language = undefined;
|
let language = undefined;
|
||||||
if (cmd.language !== undefined) {
|
if (cmd.language !== undefined) {
|
||||||
@@ -207,7 +209,8 @@ program
|
|||||||
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
|
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
|
||||||
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
|
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
|
||||||
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
|
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
|
||||||
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
|
.option("--github-auth <auth>", "GitHub Apps token or personal access token. This option is insecure and deprecated, please use `--github-auth-stdin` instead.")
|
||||||
|
.option("--github-auth-stdin", "Read GitHub Apps token or personal access token from stdin.")
|
||||||
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
|
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
|
||||||
.option("--no-upload", "Do not upload results after analysis.")
|
.option("--no-upload", "Do not upload results after analysis.")
|
||||||
.option("--output-dir <dir>", "Directory to output SARIF files to. Default is in the temp directory.")
|
.option("--output-dir <dir>", "Directory to output SARIF files to. Default is in the temp directory.")
|
||||||
@@ -220,18 +223,24 @@ program
|
|||||||
.action(async (cmd) => {
|
.action(async (cmd) => {
|
||||||
const logger = logging_1.getRunnerLogger(cmd.debug);
|
const logger = logging_1.getRunnerLogger(cmd.debug);
|
||||||
try {
|
try {
|
||||||
const tempDir = getTempDir(cmd.tempDir);
|
|
||||||
const outputDir = cmd.outputDir || path.join(tempDir, "codeql-sarif");
|
|
||||||
const config = await config_utils_1.getConfig(getTempDir(cmd.tempDir), logger);
|
const config = await config_utils_1.getConfig(getTempDir(cmd.tempDir), logger);
|
||||||
if (config === undefined) {
|
if (config === undefined) {
|
||||||
throw new Error("Config file could not be found at expected location. " +
|
throw new Error("Config file could not be found at expected location. " +
|
||||||
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
|
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
|
||||||
}
|
}
|
||||||
|
util_1.setupActionsVars(config.tempDir, config.toolCacheDir);
|
||||||
|
const auth = await util_1.getGitHubAuth(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth: cmd.githubAuth,
|
auth,
|
||||||
url: util_1.parseGithubUrl(cmd.githubUrl),
|
url: util_1.parseGitHubUrl(cmd.githubUrl),
|
||||||
};
|
};
|
||||||
await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, apiDetails, cmd.upload, "runner", outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), util_1.getThreadsFlag(cmd.threads, logger), config, logger);
|
const outputDir = cmd.outputDir || path.join(config.tempDir, "codeql-sarif");
|
||||||
|
await analyze_1.runAnalyze(outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), util_1.getThreadsFlag(cmd.threads, logger), config, logger);
|
||||||
|
if (!cmd.upload) {
|
||||||
|
logger.info("Not uploading results");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await upload_lib.uploadFromRunner(outputDir, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.checkoutPath || process.cwd(), config.gitHubVersion, apiDetails, logger);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.error("Analyze failed");
|
logger.error("Analyze failed");
|
||||||
@@ -247,18 +256,20 @@ program
|
|||||||
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
|
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
|
||||||
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
|
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
|
||||||
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
|
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
|
||||||
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
|
.option("--github-auth <auth>", "GitHub Apps token or personal access token. This option is insecure and deprecated, please use `--github-auth-stdin` instead.")
|
||||||
|
.option("--github-auth-stdin", "Read GitHub Apps token or personal access token from stdin.")
|
||||||
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
|
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
|
||||||
.option("--debug", "Print more verbose output", false)
|
.option("--debug", "Print more verbose output", false)
|
||||||
.action(async (cmd) => {
|
.action(async (cmd) => {
|
||||||
const logger = logging_1.getRunnerLogger(cmd.debug);
|
const logger = logging_1.getRunnerLogger(cmd.debug);
|
||||||
|
const auth = await util_1.getGitHubAuth(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth: cmd.githubAuth,
|
auth,
|
||||||
url: util_1.parseGithubUrl(cmd.githubUrl),
|
url: util_1.parseGitHubUrl(cmd.githubUrl),
|
||||||
};
|
};
|
||||||
try {
|
try {
|
||||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
||||||
await upload_lib.upload(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, gitHubVersion, apiDetails, "runner", logger);
|
await upload_lib.uploadFromRunner(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.checkoutPath || process.cwd(), gitHubVersion, apiDetails, logger);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.error("Upload failed");
|
logger.error("Upload failed");
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
6
lib/tracer-config.js
generated
6
lib/tracer-config.js
generated
@@ -141,6 +141,12 @@ async function getCombinedTracerConfig(config, codeql) {
|
|||||||
else if (process.platform !== "win32") {
|
else if (process.platform !== "win32") {
|
||||||
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
|
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
|
||||||
}
|
}
|
||||||
|
// On macos it's necessary to prefix the build command with the runner executable
|
||||||
|
// on order to trace when System Integrity Protection is enabled.
|
||||||
|
// The executable also exists and works for other platforms so we output this env
|
||||||
|
// var with a path to the runner regardless so it's always available.
|
||||||
|
const runnerExeName = process.platform === "win32" ? "runner.exe" : "runner";
|
||||||
|
mainTracerConfig.env["CODEQL_RUNNER"] = path.join(mainTracerConfig.env["CODEQL_DIST"], "tools", mainTracerConfig.env["CODEQL_PLATFORM"], runnerExeName);
|
||||||
return mainTracerConfig;
|
return mainTracerConfig;
|
||||||
}
|
}
|
||||||
exports.getCombinedTracerConfig = getCombinedTracerConfig;
|
exports.getCombinedTracerConfig = getCombinedTracerConfig;
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
23
lib/tracer-config.test.js
generated
23
lib/tracer-config.test.js
generated
@@ -29,7 +29,7 @@ function getTestConfig(tmpDir) {
|
|||||||
tempDir: tmpDir,
|
tempDir: tmpDir,
|
||||||
toolCacheDir: tmpDir,
|
toolCacheDir: tmpDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
gitHubVersion: { type: "dotcom" },
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
// A very minimal setup
|
// A very minimal setup
|
||||||
@@ -238,6 +238,7 @@ ava_1.default("getCombinedTracerConfig - return undefined when no languages are
|
|||||||
async getTracerEnv() {
|
async getTracerEnv() {
|
||||||
return {
|
return {
|
||||||
ODASA_TRACER_CONFIGURATION: "abc",
|
ODASA_TRACER_CONFIGURATION: "abc",
|
||||||
|
CODEQL_DIST: "/",
|
||||||
foo: "bar",
|
foo: "bar",
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
@@ -250,17 +251,28 @@ ava_1.default("getCombinedTracerConfig - valid spec file", async (t) => {
|
|||||||
const config = getTestConfig(tmpDir);
|
const config = getTestConfig(tmpDir);
|
||||||
const spec = path.join(tmpDir, "spec");
|
const spec = path.join(tmpDir, "spec");
|
||||||
fs.writeFileSync(spec, "foo.log\n2\nabc\ndef");
|
fs.writeFileSync(spec, "foo.log\n2\nabc\ndef");
|
||||||
|
const bundlePath = path.join(tmpDir, "bundle");
|
||||||
|
const codeqlPlatform = process.platform === "win32"
|
||||||
|
? "win64"
|
||||||
|
: process.platform === "darwin"
|
||||||
|
? "osx64"
|
||||||
|
: "linux64";
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = codeql_1.setCodeQL({
|
||||||
async getTracerEnv() {
|
async getTracerEnv() {
|
||||||
return {
|
return {
|
||||||
ODASA_TRACER_CONFIGURATION: spec,
|
ODASA_TRACER_CONFIGURATION: spec,
|
||||||
|
CODEQL_DIST: bundlePath,
|
||||||
|
CODEQL_PLATFORM: codeqlPlatform,
|
||||||
foo: "bar",
|
foo: "bar",
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const result = await tracer_config_1.getCombinedTracerConfig(config, codeQL);
|
const result = await tracer_config_1.getCombinedTracerConfig(config, codeQL);
|
||||||
|
t.notDeepEqual(result, undefined);
|
||||||
const expectedEnv = {
|
const expectedEnv = {
|
||||||
foo: "bar",
|
foo: "bar",
|
||||||
|
CODEQL_DIST: bundlePath,
|
||||||
|
CODEQL_PLATFORM: codeqlPlatform,
|
||||||
ODASA_TRACER_CONFIGURATION: result.spec,
|
ODASA_TRACER_CONFIGURATION: result.spec,
|
||||||
};
|
};
|
||||||
if (process.platform === "darwin") {
|
if (process.platform === "darwin") {
|
||||||
@@ -269,6 +281,15 @@ ava_1.default("getCombinedTracerConfig - valid spec file", async (t) => {
|
|||||||
else if (process.platform !== "win32") {
|
else if (process.platform !== "win32") {
|
||||||
expectedEnv["LD_PRELOAD"] = path.join(path.dirname(codeQL.getPath()), "tools", "linux64", "${LIB}trace.so");
|
expectedEnv["LD_PRELOAD"] = path.join(path.dirname(codeQL.getPath()), "tools", "linux64", "${LIB}trace.so");
|
||||||
}
|
}
|
||||||
|
if (process.platform === "win32") {
|
||||||
|
expectedEnv["CODEQL_RUNNER"] = path.join(bundlePath, "tools/win64/runner.exe");
|
||||||
|
}
|
||||||
|
else if (process.platform === "darwin") {
|
||||||
|
expectedEnv["CODEQL_RUNNER"] = path.join(bundlePath, "tools/osx64/runner");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
expectedEnv["CODEQL_RUNNER"] = path.join(bundlePath, "tools/linux64/runner");
|
||||||
|
}
|
||||||
t.deepEqual(result, {
|
t.deepEqual(result, {
|
||||||
spec: path.join(tmpDir, "compound-spec"),
|
spec: path.join(tmpDir, "compound-spec"),
|
||||||
env: expectedEnv,
|
env: expectedEnv,
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
68
lib/upload-lib.js
generated
68
lib/upload-lib.js
generated
@@ -17,8 +17,10 @@ const core = __importStar(require("@actions/core"));
|
|||||||
const file_url_1 = __importDefault(require("file-url"));
|
const file_url_1 = __importDefault(require("file-url"));
|
||||||
const jsonschema = __importStar(require("jsonschema"));
|
const jsonschema = __importStar(require("jsonschema"));
|
||||||
const semver = __importStar(require("semver"));
|
const semver = __importStar(require("semver"));
|
||||||
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const fingerprints = __importStar(require("./fingerprints"));
|
const fingerprints = __importStar(require("./fingerprints"));
|
||||||
|
const repository_1 = require("./repository");
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
// Takes a list of paths to sarif files and combines them together,
|
// Takes a list of paths to sarif files and combines them together,
|
||||||
@@ -63,36 +65,72 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, mode, logger) {
|
|||||||
logger.debug(`response status: ${response.status}`);
|
logger.debug(`response status: ${response.status}`);
|
||||||
logger.info("Successfully uploaded results");
|
logger.info("Successfully uploaded results");
|
||||||
}
|
}
|
||||||
|
// Recursively walks a directory and returns all SARIF files it finds.
|
||||||
|
// Does not follow symlinks.
|
||||||
|
function findSarifFilesInDir(sarifPath) {
|
||||||
|
const sarifFiles = [];
|
||||||
|
const walkSarifFiles = (dir) => {
|
||||||
|
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.isFile() && entry.name.endsWith(".sarif")) {
|
||||||
|
sarifFiles.push(path.resolve(dir, entry.name));
|
||||||
|
}
|
||||||
|
else if (entry.isDirectory()) {
|
||||||
|
walkSarifFiles(path.resolve(dir, entry.name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
walkSarifFiles(sarifPath);
|
||||||
|
return sarifFiles;
|
||||||
|
}
|
||||||
|
exports.findSarifFilesInDir = findSarifFilesInDir;
|
||||||
// Uploads a single sarif file or a directory of sarif files
|
// Uploads a single sarif file or a directory of sarif files
|
||||||
// depending on what the path happens to refer to.
|
// depending on what the path happens to refer to.
|
||||||
// Returns true iff the upload occurred and succeeded
|
// Returns true iff the upload occurred and succeeded
|
||||||
async function upload(sarifPath, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, gitHubVersion, apiDetails, mode, logger) {
|
async function uploadFromActions(sarifPath, gitHubVersion, apiDetails, logger) {
|
||||||
const sarifFiles = [];
|
return await uploadFiles(getSarifFilePaths(sarifPath), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), gitHubVersion, apiDetails, "actions", logger);
|
||||||
|
}
|
||||||
|
exports.uploadFromActions = uploadFromActions;
|
||||||
|
// Uploads a single sarif file or a directory of sarif files
|
||||||
|
// depending on what the path happens to refer to.
|
||||||
|
// Returns true iff the upload occurred and succeeded
|
||||||
|
async function uploadFromRunner(sarifPath, repositoryNwo, commitOid, ref, checkoutPath, gitHubVersion, apiDetails, logger) {
|
||||||
|
return await uploadFiles(getSarifFilePaths(sarifPath), repositoryNwo, commitOid, ref, undefined, undefined, undefined, checkoutPath, undefined, gitHubVersion, apiDetails, "runner", logger);
|
||||||
|
}
|
||||||
|
exports.uploadFromRunner = uploadFromRunner;
|
||||||
|
function getSarifFilePaths(sarifPath) {
|
||||||
if (!fs.existsSync(sarifPath)) {
|
if (!fs.existsSync(sarifPath)) {
|
||||||
throw new Error(`Path does not exist: ${sarifPath}`);
|
throw new Error(`Path does not exist: ${sarifPath}`);
|
||||||
}
|
}
|
||||||
|
let sarifFiles;
|
||||||
if (fs.lstatSync(sarifPath).isDirectory()) {
|
if (fs.lstatSync(sarifPath).isDirectory()) {
|
||||||
const paths = fs
|
sarifFiles = findSarifFilesInDir(sarifPath);
|
||||||
.readdirSync(sarifPath)
|
|
||||||
.filter((f) => f.endsWith(".sarif"))
|
|
||||||
.map((f) => path.resolve(sarifPath, f));
|
|
||||||
for (const filepath of paths) {
|
|
||||||
sarifFiles.push(filepath);
|
|
||||||
}
|
|
||||||
if (sarifFiles.length === 0) {
|
if (sarifFiles.length === 0) {
|
||||||
throw new Error(`No SARIF files found to upload in "${sarifPath}".`);
|
throw new Error(`No SARIF files found to upload in "${sarifPath}".`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
sarifFiles.push(sarifPath);
|
sarifFiles = [sarifPath];
|
||||||
}
|
}
|
||||||
return await uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, gitHubVersion, apiDetails, mode, logger);
|
return sarifFiles;
|
||||||
}
|
}
|
||||||
exports.upload = upload;
|
|
||||||
// Counts the number of results in the given SARIF file
|
// Counts the number of results in the given SARIF file
|
||||||
function countResultsInSarif(sarif) {
|
function countResultsInSarif(sarif) {
|
||||||
let numResults = 0;
|
let numResults = 0;
|
||||||
for (const run of JSON.parse(sarif).runs) {
|
let parsedSarif;
|
||||||
|
try {
|
||||||
|
parsedSarif = JSON.parse(sarif);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
throw new Error(`Invalid SARIF. JSON syntax error: ${e.message}`);
|
||||||
|
}
|
||||||
|
if (!Array.isArray(parsedSarif.runs)) {
|
||||||
|
throw new Error("Invalid SARIF. Missing 'runs' array.");
|
||||||
|
}
|
||||||
|
for (const run of parsedSarif.runs) {
|
||||||
|
if (!Array.isArray(run.results)) {
|
||||||
|
throw new Error("Invalid SARIF. Missing 'results' array in run.");
|
||||||
|
}
|
||||||
numResults += run.results.length;
|
numResults += run.results.length;
|
||||||
}
|
}
|
||||||
return numResults;
|
return numResults;
|
||||||
@@ -137,12 +175,12 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
|||||||
base_sha: undefined,
|
base_sha: undefined,
|
||||||
};
|
};
|
||||||
// This behaviour can be made the default when support for GHES 3.0 is discontinued.
|
// This behaviour can be made the default when support for GHES 3.0 is discontinued.
|
||||||
if (gitHubVersion.type === "dotcom" ||
|
if (gitHubVersion.type !== util.GitHubVariant.GHES ||
|
||||||
semver.satisfies(gitHubVersion.version, `>=3.1`)) {
|
semver.satisfies(gitHubVersion.version, `>=3.1`)) {
|
||||||
if (process.env.GITHUB_EVENT_NAME === "pull_request" &&
|
if (process.env.GITHUB_EVENT_NAME === "pull_request" &&
|
||||||
process.env.GITHUB_EVENT_PATH) {
|
process.env.GITHUB_EVENT_PATH) {
|
||||||
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
|
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
|
||||||
payloadObj.base_ref = `refs/heads/$githubEvent.pull_request.base.ref`;
|
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
|
||||||
payloadObj.base_sha = githubEvent.pull_request.base.sha;
|
payloadObj.base_sha = githubEvent.pull_request.base.sha;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
46
lib/upload-lib.test.js
generated
46
lib/upload-lib.test.js
generated
@@ -1,7 +1,4 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
@@ -9,11 +6,17 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
result["default"] = mod;
|
result["default"] = mod;
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const uploadLib = __importStar(require("./upload-lib"));
|
const uploadLib = __importStar(require("./upload-lib"));
|
||||||
|
const util_1 = require("./util");
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
ava_1.default("validateSarifFileSchema - valid", (t) => {
|
ava_1.default("validateSarifFileSchema - valid", (t) => {
|
||||||
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
|
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
|
||||||
@@ -25,12 +28,12 @@ ava_1.default("validateSarifFileSchema - invalid", (t) => {
|
|||||||
});
|
});
|
||||||
ava_1.default("validate correct payload used per version", async (t) => {
|
ava_1.default("validate correct payload used per version", async (t) => {
|
||||||
const newVersions = [
|
const newVersions = [
|
||||||
{ type: "dotcom" },
|
{ type: util_1.GitHubVariant.DOTCOM },
|
||||||
{ type: "ghes", version: "3.1.0" },
|
{ type: util_1.GitHubVariant.GHES, version: "3.1.0" },
|
||||||
];
|
];
|
||||||
const oldVersions = [
|
const oldVersions = [
|
||||||
{ type: "ghes", version: "2.22.1" },
|
{ type: util_1.GitHubVariant.GHES, version: "2.22.1" },
|
||||||
{ type: "ghes", version: "3.0.0" },
|
{ type: util_1.GitHubVariant.GHES, version: "3.0.0" },
|
||||||
];
|
];
|
||||||
const allVersions = newVersions.concat(oldVersions);
|
const allVersions = newVersions.concat(oldVersions);
|
||||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||||
@@ -44,8 +47,8 @@ ava_1.default("validate correct payload used per version", async (t) => {
|
|||||||
process.env["GITHUB_EVENT_PATH"] = `${__dirname}/../src/testdata/pull_request.json`;
|
process.env["GITHUB_EVENT_PATH"] = `${__dirname}/../src/testdata/pull_request.json`;
|
||||||
for (const version of newVersions) {
|
for (const version of newVersions) {
|
||||||
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "actions");
|
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "actions");
|
||||||
t.truthy(payload.base_ref);
|
t.deepEqual(payload.base_ref, "refs/heads/master");
|
||||||
t.truthy(payload.base_sha);
|
t.deepEqual(payload.base_sha, "f95f852bd8fca8fcc58a9a2d6c842781e32a215e");
|
||||||
}
|
}
|
||||||
for (const version of oldVersions) {
|
for (const version of oldVersions) {
|
||||||
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "actions");
|
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "actions");
|
||||||
@@ -54,4 +57,29 @@ ava_1.default("validate correct payload used per version", async (t) => {
|
|||||||
t.falsy(payload.base_sha);
|
t.falsy(payload.base_sha);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
ava_1.default("finding SARIF files", async (t) => {
|
||||||
|
await util_1.withTmpDir(async (tmpDir) => {
|
||||||
|
// include a couple of sarif files
|
||||||
|
fs.writeFileSync(path.join(tmpDir, "a.sarif"), "");
|
||||||
|
fs.writeFileSync(path.join(tmpDir, "b.sarif"), "");
|
||||||
|
// other random files shouldn't be returned
|
||||||
|
fs.writeFileSync(path.join(tmpDir, "c.foo"), "");
|
||||||
|
// we should recursively look in subdirectories
|
||||||
|
fs.mkdirSync(path.join(tmpDir, "dir1"));
|
||||||
|
fs.writeFileSync(path.join(tmpDir, "dir1", "d.sarif"), "");
|
||||||
|
fs.mkdirSync(path.join(tmpDir, "dir1", "dir2"));
|
||||||
|
fs.writeFileSync(path.join(tmpDir, "dir1", "dir2", "e.sarif"), "");
|
||||||
|
// we should ignore symlinks
|
||||||
|
fs.mkdirSync(path.join(tmpDir, "dir3"));
|
||||||
|
fs.symlinkSync(tmpDir, path.join(tmpDir, "dir3", "symlink1"), "dir");
|
||||||
|
fs.symlinkSync(path.join(tmpDir, "a.sarif"), path.join(tmpDir, "dir3", "symlink2.sarif"), "file");
|
||||||
|
const sarifFiles = uploadLib.findSarifFilesInDir(tmpDir);
|
||||||
|
t.deepEqual(sarifFiles, [
|
||||||
|
path.join(tmpDir, "a.sarif"),
|
||||||
|
path.join(tmpDir, "b.sarif"),
|
||||||
|
path.join(tmpDir, "dir1", "d.sarif"),
|
||||||
|
path.join(tmpDir, "dir1", "dir2", "e.sarif"),
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
//# sourceMappingURL=upload-lib.test.js.map
|
//# sourceMappingURL=upload-lib.test.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,uCAA4C;AAC5C,mDAA6C;AAC7C,wDAA0C;AAG1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,MAAM,SAAS,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CACf,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC9C,MAAM,SAAS,GAAG,GAAG,SAAS,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CACZ,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,2CAA2C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC5D,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,QAAQ,EAAE;QAClB,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE;KACnC,CAAC;IACF,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE;QACnC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE;KACnC,CAAC;IACF,MAAM,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;IAEpD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,MAAM,CAAC;IAC1C,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,mBAAmB,EACnB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,kCAAkC;QAClC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;IAED,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,cAAc,CAAC;IAClD,OAAO,CAAC,GAAG,CACT,mBAAmB,CACpB,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACrD,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC3B,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC5B;IAED,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,iDAAiD;QACjD,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;AACH,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,8CAAuB;AAEvB,uCAA4C;AAC5C,mDAA6C;AAC7C,wDAA0C;AAC1C,iCAAkE;AAElE,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,MAAM,SAAS,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CACf,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC9C,MAAM,SAAS,GAAG,GAAG,SAAS,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CACZ,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,2CAA2C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC5D,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE;QAC9B,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC/C,CAAC;IACF,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE;QAC/C,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC/C,CAAC;IACF,MAAM,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;IAEpD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,MAAM,CAAC;IAC1C,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,mBAAmB,EACnB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,kCAAkC;QAClC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;IAED,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,cAAc,CAAC;IAClD,OAAO,CAAC,GAAG,CACT,mBAAmB,CACpB,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACrD,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,QAAQ,EAAE,mBAAmB,CAAC,CAAC;QACnD,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,QAAQ,EAAE,0CAA0C,CAAC,CAAC;KAC3E;IAED,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,iDAAiD;QACjD,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qBAAqB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACtC,MAAM,iBAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,kCAAkC;QAClC,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QACnD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAEnD,2CAA2C;QAC3C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,EAAE,CAAC,CAAC;QAEjD,+CAA+C;QAC/C,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QACxC,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAC3D,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QAChD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAEnE,4BAA4B;QAC5B,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QACxC,EAAE,CAAC,WAAW,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,KAAK,CAAC,CAAC;QACrE,EAAE,CAAC,WAAW,CACZ,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,gBAAgB,CAAC,EAC3C,MAAM,CACP,CAAC;QAEF,MAAM,UAAU,GAAG,SAAS,CAAC,mBAAmB,CAAC,MAAM,CAAC,CAAC;QAEzD,CAAC,CAAC,SAAS,CAAC,UAAU,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;YAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;YAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC;YACpC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC;SAC7C,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
3
lib/upload-sarif-action.js
generated
3
lib/upload-sarif-action.js
generated
@@ -10,7 +10,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const repository_1 = require("./repository");
|
|
||||||
const upload_lib = __importStar(require("./upload-lib"));
|
const upload_lib = __importStar(require("./upload-lib"));
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
async function sendSuccessStatusReport(startedAt, uploadStats) {
|
async function sendSuccessStatusReport(startedAt, uploadStats) {
|
||||||
@@ -32,7 +31,7 @@ async function run() {
|
|||||||
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
||||||
const uploadStats = await upload_lib.upload(actionsUtil.getRequiredInput("sarif_file"), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), gitHubVersion, apiDetails, "actions", logging_1.getActionsLogger());
|
const uploadStats = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, logging_1.getActionsLogger());
|
||||||
await sendSuccessStatusReport(startedAt, uploadStats);
|
await sendSuccessStatusReport(startedAt, uploadStats);
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAA0C;AAM1C,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,uBAAgB,CAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,MAAM,WAAW,CAAC,YAAY,EAAE,EAChC,MAAM,WAAW,CAAC,MAAM,EAAE,EAC1B,MAAM,WAAW,CAAC,cAAc,EAAE,EAClC,WAAW,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAClD,WAAW,CAAC,gBAAgB,EAAE,EAC9B,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,aAAa,EACb,UAAU,EACV,SAAS,EACT,0BAAgB,EAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,yDAA2C;AAC3C,iCAA0C;AAM1C,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,uBAAgB,CAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,0BAAgB,EAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
112
lib/util.js
generated
112
lib/util.js
generated
@@ -72,9 +72,21 @@ async function withTmpDir(body) {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
exports.withTmpDir = withTmpDir;
|
exports.withTmpDir = withTmpDir;
|
||||||
|
/**
|
||||||
|
* Gets an OS-specific amount of memory (in MB) to reserve for OS processes
|
||||||
|
* when the user doesn't explicitly specify a memory setting.
|
||||||
|
* This is a heuristic to avoid OOM errors (exit code 137 / SIGKILL)
|
||||||
|
* from committing too much of the available memory to CodeQL.
|
||||||
|
* @returns number
|
||||||
|
*/
|
||||||
|
function getSystemReservedMemoryMegaBytes() {
|
||||||
|
// Windows needs more memory for OS processes.
|
||||||
|
return 1024 * (process.platform === "win32" ? 1.5 : 1);
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
||||||
* specified, the total available memory will be used minus 256 MB.
|
* specified, the total available memory will be used minus a threshold
|
||||||
|
* reserved for the OS.
|
||||||
*
|
*
|
||||||
* @returns string
|
* @returns string
|
||||||
*/
|
*/
|
||||||
@@ -89,8 +101,8 @@ function getMemoryFlag(userInput) {
|
|||||||
else {
|
else {
|
||||||
const totalMemoryBytes = os.totalmem();
|
const totalMemoryBytes = os.totalmem();
|
||||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||||
const systemReservedMemoryMegaBytes = 256;
|
const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes();
|
||||||
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
|
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
|
||||||
}
|
}
|
||||||
return `--ram=${Math.floor(memoryToUseMegaBytes)}`;
|
return `--ram=${Math.floor(memoryToUseMegaBytes)}`;
|
||||||
}
|
}
|
||||||
@@ -159,7 +171,7 @@ exports.getCodeQLDatabasePath = getCodeQLDatabasePath;
|
|||||||
* Parses user input of a github.com or GHES URL to a canonical form.
|
* Parses user input of a github.com or GHES URL to a canonical form.
|
||||||
* Removes any API prefix or suffix if one is present.
|
* Removes any API prefix or suffix if one is present.
|
||||||
*/
|
*/
|
||||||
function parseGithubUrl(inputUrl) {
|
function parseGitHubUrl(inputUrl) {
|
||||||
const originalUrl = inputUrl;
|
const originalUrl = inputUrl;
|
||||||
if (inputUrl.indexOf("://") === -1) {
|
if (inputUrl.indexOf("://") === -1) {
|
||||||
inputUrl = `https://${inputUrl}`;
|
inputUrl = `https://${inputUrl}`;
|
||||||
@@ -193,14 +205,20 @@ function parseGithubUrl(inputUrl) {
|
|||||||
}
|
}
|
||||||
return url.toString();
|
return url.toString();
|
||||||
}
|
}
|
||||||
exports.parseGithubUrl = parseGithubUrl;
|
exports.parseGitHubUrl = parseGitHubUrl;
|
||||||
const GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
const GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
||||||
const CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR = "CODEQL_ACTION_WARNED_ABOUT_VERSION";
|
const CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR = "CODEQL_ACTION_WARNED_ABOUT_VERSION";
|
||||||
let hasBeenWarnedAboutVersion = false;
|
let hasBeenWarnedAboutVersion = false;
|
||||||
|
var GitHubVariant;
|
||||||
|
(function (GitHubVariant) {
|
||||||
|
GitHubVariant[GitHubVariant["DOTCOM"] = 0] = "DOTCOM";
|
||||||
|
GitHubVariant[GitHubVariant["GHES"] = 1] = "GHES";
|
||||||
|
GitHubVariant[GitHubVariant["GHAE"] = 2] = "GHAE";
|
||||||
|
})(GitHubVariant = exports.GitHubVariant || (exports.GitHubVariant = {}));
|
||||||
async function getGitHubVersion(apiDetails) {
|
async function getGitHubVersion(apiDetails) {
|
||||||
// We can avoid making an API request in the standard dotcom case
|
// We can avoid making an API request in the standard dotcom case
|
||||||
if (parseGithubUrl(apiDetails.url) === exports.GITHUB_DOTCOM_URL) {
|
if (parseGitHubUrl(apiDetails.url) === exports.GITHUB_DOTCOM_URL) {
|
||||||
return { type: "dotcom" };
|
return { type: GitHubVariant.DOTCOM };
|
||||||
}
|
}
|
||||||
// Doesn't strictly have to be the meta endpoint as we're only
|
// Doesn't strictly have to be the meta endpoint as we're only
|
||||||
// using the response headers which are available on every request.
|
// using the response headers which are available on every request.
|
||||||
@@ -209,23 +227,26 @@ async function getGitHubVersion(apiDetails) {
|
|||||||
// This happens on dotcom, although we expect to have already returned in that
|
// This happens on dotcom, although we expect to have already returned in that
|
||||||
// case. This can also serve as a fallback in cases we haven't foreseen.
|
// case. This can also serve as a fallback in cases we haven't foreseen.
|
||||||
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === undefined) {
|
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === undefined) {
|
||||||
return { type: "dotcom" };
|
return { type: GitHubVariant.DOTCOM };
|
||||||
|
}
|
||||||
|
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "GitHub AE") {
|
||||||
|
return { type: GitHubVariant.GHAE };
|
||||||
}
|
}
|
||||||
const version = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER];
|
const version = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER];
|
||||||
return { type: "ghes", version };
|
return { type: GitHubVariant.GHES, version };
|
||||||
}
|
}
|
||||||
exports.getGitHubVersion = getGitHubVersion;
|
exports.getGitHubVersion = getGitHubVersion;
|
||||||
function checkGitHubVersionInRange(version, mode, logger) {
|
function checkGitHubVersionInRange(version, mode, logger) {
|
||||||
if (hasBeenWarnedAboutVersion || version.type !== "ghes") {
|
if (hasBeenWarnedAboutVersion || version.type !== GitHubVariant.GHES) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const disallowedAPIVersionReason = apiVersionInRange(version.version, apiCompatibility.minimumVersion, apiCompatibility.maximumVersion);
|
const disallowedAPIVersionReason = apiVersionInRange(version.version, apiCompatibility.minimumVersion, apiCompatibility.maximumVersion);
|
||||||
const toolName = mode === "actions" ? "Action" : "Runner";
|
const toolName = mode === "actions" ? "Action" : "Runner";
|
||||||
if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_OLD) {
|
if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_OLD) {
|
||||||
logger.warning(`The CodeQL ${toolName} version you are using is too old to be compatible with GitHub Enterprise ${version}. If you experience issues, please upgrade to a more recent version of the CodeQL ${toolName}.`);
|
logger.warning(`The CodeQL ${toolName} version you are using is too old to be compatible with GitHub Enterprise ${version.version}. If you experience issues, please upgrade to a more recent version of the CodeQL ${toolName}.`);
|
||||||
}
|
}
|
||||||
if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_NEW) {
|
if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_NEW) {
|
||||||
logger.warning(`GitHub Enterprise ${version} is too old to be compatible with this version of the CodeQL ${toolName}. If you experience issues, please upgrade to a more recent version of GitHub Enterprise or use an older version of the CodeQL ${toolName}.`);
|
logger.warning(`GitHub Enterprise ${version.version} is too old to be compatible with this version of the CodeQL ${toolName}. If you experience issues, please upgrade to a more recent version of GitHub Enterprise or use an older version of the CodeQL ${toolName}.`);
|
||||||
}
|
}
|
||||||
hasBeenWarnedAboutVersion = true;
|
hasBeenWarnedAboutVersion = true;
|
||||||
if (mode === "actions") {
|
if (mode === "actions") {
|
||||||
@@ -248,4 +269,71 @@ function apiVersionInRange(version, minimumVersion, maximumVersion) {
|
|||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
exports.apiVersionInRange = apiVersionInRange;
|
exports.apiVersionInRange = apiVersionInRange;
|
||||||
|
/**
|
||||||
|
* Retrieves the github auth token for use with the runner. There are
|
||||||
|
* three possible locations for the token:
|
||||||
|
*
|
||||||
|
* 1. from the cli (considered insecure)
|
||||||
|
* 2. from stdin
|
||||||
|
* 3. from the GITHUB_TOKEN environment variable
|
||||||
|
*
|
||||||
|
* If both 1 & 2 are specified, then an error is thrown.
|
||||||
|
* If 1 & 3 or 2 & 3 are specified, then the environment variable is ignored.
|
||||||
|
*
|
||||||
|
* @param githubAuth a github app token or PAT
|
||||||
|
* @param fromStdIn read the github app token or PAT from stdin up to, but excluding the first whitespace
|
||||||
|
* @param readable the readable stream to use for getting the token (defaults to stdin)
|
||||||
|
*
|
||||||
|
* @return a promise resolving to the auth token.
|
||||||
|
*/
|
||||||
|
async function getGitHubAuth(logger, githubAuth, fromStdIn, readable = process.stdin) {
|
||||||
|
if (githubAuth && fromStdIn) {
|
||||||
|
throw new Error("Cannot specify both `--github-auth` and `--github-auth-stdin`. Please use `--github-auth-stdin`, which is more secure.");
|
||||||
|
}
|
||||||
|
if (githubAuth) {
|
||||||
|
logger.warning("Using `--github-auth` via the CLI is insecure. Use `--github-auth-stdin` instead.");
|
||||||
|
return githubAuth;
|
||||||
|
}
|
||||||
|
if (fromStdIn) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
let token = "";
|
||||||
|
readable.on("data", (data) => {
|
||||||
|
token += data.toString("utf8");
|
||||||
|
});
|
||||||
|
readable.on("end", () => {
|
||||||
|
token = token.split(/\s+/)[0].trim();
|
||||||
|
if (token) {
|
||||||
|
resolve(token);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
reject(new Error("Standard input is empty"));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
readable.on("error", (err) => {
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (process.env.GITHUB_TOKEN) {
|
||||||
|
return process.env.GITHUB_TOKEN;
|
||||||
|
}
|
||||||
|
throw new Error("No GitHub authentication token was specified. Please provide a token via the GITHUB_TOKEN environment variable, or by adding the `--github-auth-stdin` flag and passing the token via standard input.");
|
||||||
|
}
|
||||||
|
exports.getGitHubAuth = getGitHubAuth;
|
||||||
|
// Sets environment variables that make using some libraries designed for
|
||||||
|
// use only on actions safe to use outside of actions.
|
||||||
|
//
|
||||||
|
// Obviously this is not a tremendously great thing we're doing and it
|
||||||
|
// would be better to write our own implementation of libraries to use
|
||||||
|
// outside of actions. For now this works well enough.
|
||||||
|
//
|
||||||
|
// Currently this list of libraries that is deemed to now be safe includes:
|
||||||
|
// - @actions/tool-cache
|
||||||
|
//
|
||||||
|
// Also see "queries/unguarded-action-lib.ql".
|
||||||
|
function setupActionsVars(tempDir, toolsDir) {
|
||||||
|
process.env["RUNNER_TEMP"] = tempDir;
|
||||||
|
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
|
||||||
|
}
|
||||||
|
exports.setupActionsVars = setupActionsVars;
|
||||||
//# sourceMappingURL=util.js.map
|
//# sourceMappingURL=util.js.map
|
||||||
File diff suppressed because one or more lines are too long
82
lib/util.test.js
generated
82
lib/util.test.js
generated
@@ -12,6 +12,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
|
const stream = __importStar(require("stream"));
|
||||||
const github = __importStar(require("@actions/github"));
|
const github = __importStar(require("@actions/github"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
const sinon_1 = __importDefault(require("sinon"));
|
||||||
@@ -27,9 +28,10 @@ ava_1.default("getToolNames", (t) => {
|
|||||||
});
|
});
|
||||||
ava_1.default("getMemoryFlag() should return the correct --ram flag", (t) => {
|
ava_1.default("getMemoryFlag() should return the correct --ram flag", (t) => {
|
||||||
const totalMem = Math.floor(os.totalmem() / (1024 * 1024));
|
const totalMem = Math.floor(os.totalmem() / (1024 * 1024));
|
||||||
|
const expectedThreshold = process.platform === "win32" ? 1536 : 1024;
|
||||||
const tests = [
|
const tests = [
|
||||||
[undefined, `--ram=${totalMem - 256}`],
|
[undefined, `--ram=${totalMem - expectedThreshold}`],
|
||||||
["", `--ram=${totalMem - 256}`],
|
["", `--ram=${totalMem - expectedThreshold}`],
|
||||||
["512", "--ram=512"],
|
["512", "--ram=512"],
|
||||||
];
|
];
|
||||||
for (const [input, expectedFlag] of tests) {
|
for (const [input, expectedFlag] of tests) {
|
||||||
@@ -100,27 +102,27 @@ ava_1.default("getExtraOptionsEnvParam() fails on invalid JSON", (t) => {
|
|||||||
t.throws(util.getExtraOptionsEnvParam);
|
t.throws(util.getExtraOptionsEnvParam);
|
||||||
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
|
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
|
||||||
});
|
});
|
||||||
ava_1.default("parseGithubUrl", (t) => {
|
ava_1.default("parseGitHubUrl", (t) => {
|
||||||
t.deepEqual(util.parseGithubUrl("github.com"), "https://github.com");
|
t.deepEqual(util.parseGitHubUrl("github.com"), "https://github.com");
|
||||||
t.deepEqual(util.parseGithubUrl("https://github.com"), "https://github.com");
|
t.deepEqual(util.parseGitHubUrl("https://github.com"), "https://github.com");
|
||||||
t.deepEqual(util.parseGithubUrl("https://api.github.com"), "https://github.com");
|
t.deepEqual(util.parseGitHubUrl("https://api.github.com"), "https://github.com");
|
||||||
t.deepEqual(util.parseGithubUrl("https://github.com/foo/bar"), "https://github.com");
|
t.deepEqual(util.parseGitHubUrl("https://github.com/foo/bar"), "https://github.com");
|
||||||
t.deepEqual(util.parseGithubUrl("github.example.com"), "https://github.example.com/");
|
t.deepEqual(util.parseGitHubUrl("github.example.com"), "https://github.example.com/");
|
||||||
t.deepEqual(util.parseGithubUrl("https://github.example.com"), "https://github.example.com/");
|
t.deepEqual(util.parseGitHubUrl("https://github.example.com"), "https://github.example.com/");
|
||||||
t.deepEqual(util.parseGithubUrl("https://api.github.example.com"), "https://github.example.com/");
|
t.deepEqual(util.parseGitHubUrl("https://api.github.example.com"), "https://github.example.com/");
|
||||||
t.deepEqual(util.parseGithubUrl("https://github.example.com/api/v3"), "https://github.example.com/");
|
t.deepEqual(util.parseGitHubUrl("https://github.example.com/api/v3"), "https://github.example.com/");
|
||||||
t.deepEqual(util.parseGithubUrl("https://github.example.com:1234"), "https://github.example.com:1234/");
|
t.deepEqual(util.parseGitHubUrl("https://github.example.com:1234"), "https://github.example.com:1234/");
|
||||||
t.deepEqual(util.parseGithubUrl("https://api.github.example.com:1234"), "https://github.example.com:1234/");
|
t.deepEqual(util.parseGitHubUrl("https://api.github.example.com:1234"), "https://github.example.com:1234/");
|
||||||
t.deepEqual(util.parseGithubUrl("https://github.example.com:1234/api/v3"), "https://github.example.com:1234/");
|
t.deepEqual(util.parseGitHubUrl("https://github.example.com:1234/api/v3"), "https://github.example.com:1234/");
|
||||||
t.deepEqual(util.parseGithubUrl("https://github.example.com/base/path"), "https://github.example.com/base/path/");
|
t.deepEqual(util.parseGitHubUrl("https://github.example.com/base/path"), "https://github.example.com/base/path/");
|
||||||
t.deepEqual(util.parseGithubUrl("https://github.example.com/base/path/api/v3"), "https://github.example.com/base/path/");
|
t.deepEqual(util.parseGitHubUrl("https://github.example.com/base/path/api/v3"), "https://github.example.com/base/path/");
|
||||||
t.throws(() => util.parseGithubUrl(""), {
|
t.throws(() => util.parseGitHubUrl(""), {
|
||||||
message: '"" is not a valid URL',
|
message: '"" is not a valid URL',
|
||||||
});
|
});
|
||||||
t.throws(() => util.parseGithubUrl("ssh://github.com"), {
|
t.throws(() => util.parseGitHubUrl("ssh://github.com"), {
|
||||||
message: '"ssh://github.com" is not a http or https URL',
|
message: '"ssh://github.com" is not a http or https URL',
|
||||||
});
|
});
|
||||||
t.throws(() => util.parseGithubUrl("http:///::::433"), {
|
t.throws(() => util.parseGitHubUrl("http:///::::433"), {
|
||||||
message: '"http:///::::433" is not a valid URL',
|
message: '"http:///::::433" is not a valid URL',
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -152,18 +154,54 @@ ava_1.default("getGitHubVersion", async (t) => {
|
|||||||
auth: "",
|
auth: "",
|
||||||
url: "https://github.com",
|
url: "https://github.com",
|
||||||
});
|
});
|
||||||
t.deepEqual("dotcom", v.type);
|
t.deepEqual(util.GitHubVariant.DOTCOM, v.type);
|
||||||
mockGetMetaVersionHeader("2.0");
|
mockGetMetaVersionHeader("2.0");
|
||||||
const v2 = await util.getGitHubVersion({
|
const v2 = await util.getGitHubVersion({
|
||||||
auth: "",
|
auth: "",
|
||||||
url: "https://ghe.example.com",
|
url: "https://ghe.example.com",
|
||||||
});
|
});
|
||||||
t.deepEqual({ type: "ghes", version: "2.0" }, v2);
|
t.deepEqual({ type: util.GitHubVariant.GHES, version: "2.0" }, v2);
|
||||||
|
mockGetMetaVersionHeader("GitHub AE");
|
||||||
|
const ghae = await util.getGitHubVersion({
|
||||||
|
auth: "",
|
||||||
|
url: "https://example.githubenterprise.com",
|
||||||
|
});
|
||||||
|
t.deepEqual({ type: util.GitHubVariant.GHAE }, ghae);
|
||||||
mockGetMetaVersionHeader(undefined);
|
mockGetMetaVersionHeader(undefined);
|
||||||
const v3 = await util.getGitHubVersion({
|
const v3 = await util.getGitHubVersion({
|
||||||
auth: "",
|
auth: "",
|
||||||
url: "https://ghe.example.com",
|
url: "https://ghe.example.com",
|
||||||
});
|
});
|
||||||
t.deepEqual({ type: "dotcom" }, v3);
|
t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3);
|
||||||
});
|
});
|
||||||
|
ava_1.default("getGitHubAuth", async (t) => {
|
||||||
|
const msgs = [];
|
||||||
|
const mockLogger = {
|
||||||
|
warning: (msg) => msgs.push(msg),
|
||||||
|
};
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||||
|
t.throwsAsync(async () => util.getGitHubAuth(mockLogger, "abc", true));
|
||||||
|
process.env.GITHUB_TOKEN = "123";
|
||||||
|
t.is("123", await util.getGitHubAuth(mockLogger, undefined, undefined));
|
||||||
|
t.is(msgs.length, 0);
|
||||||
|
t.is("abc", await util.getGitHubAuth(mockLogger, "abc", undefined));
|
||||||
|
t.is(msgs.length, 1); // warning expected
|
||||||
|
msgs.length = 0;
|
||||||
|
await mockStdInForAuth(t, mockLogger, "def", "def");
|
||||||
|
await mockStdInForAuth(t, mockLogger, "def", "", "def");
|
||||||
|
await mockStdInForAuth(t, mockLogger, "def", "def\n some extra garbage", "ghi");
|
||||||
|
await mockStdInForAuth(t, mockLogger, "defghi", "def", "ghi\n123");
|
||||||
|
await mockStdInForAuthExpectError(t, mockLogger, "");
|
||||||
|
await mockStdInForAuthExpectError(t, mockLogger, "", " ", "abc");
|
||||||
|
await mockStdInForAuthExpectError(t, mockLogger, " def\n some extra garbage", "ghi");
|
||||||
|
t.is(msgs.length, 0);
|
||||||
|
});
|
||||||
|
async function mockStdInForAuth(t, mockLogger, expected, ...text) {
|
||||||
|
const stdin = stream.Readable.from(text);
|
||||||
|
t.is(expected, await util.getGitHubAuth(mockLogger, undefined, true, stdin));
|
||||||
|
}
|
||||||
|
async function mockStdInForAuthExpectError(t, mockLogger, ...text) {
|
||||||
|
const stdin = stream.Readable.from(text);
|
||||||
|
await t.throwsAsync(async () => util.getGitHubAuth(mockLogger, undefined, true, stdin));
|
||||||
|
}
|
||||||
//# sourceMappingURL=util.test.js.map
|
//# sourceMappingURL=util.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
5555
node_modules/.package-lock.json
generated
vendored
Normal file
5555
node_modules/.package-lock.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
94
node_modules/decode-uri-component/index.js
generated
vendored
Normal file
94
node_modules/decode-uri-component/index.js
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
'use strict';
|
||||||
|
var token = '%[a-f0-9]{2}';
|
||||||
|
var singleMatcher = new RegExp(token, 'gi');
|
||||||
|
var multiMatcher = new RegExp('(' + token + ')+', 'gi');
|
||||||
|
|
||||||
|
function decodeComponents(components, split) {
|
||||||
|
try {
|
||||||
|
// Try to decode the entire string first
|
||||||
|
return decodeURIComponent(components.join(''));
|
||||||
|
} catch (err) {
|
||||||
|
// Do nothing
|
||||||
|
}
|
||||||
|
|
||||||
|
if (components.length === 1) {
|
||||||
|
return components;
|
||||||
|
}
|
||||||
|
|
||||||
|
split = split || 1;
|
||||||
|
|
||||||
|
// Split the array in 2 parts
|
||||||
|
var left = components.slice(0, split);
|
||||||
|
var right = components.slice(split);
|
||||||
|
|
||||||
|
return Array.prototype.concat.call([], decodeComponents(left), decodeComponents(right));
|
||||||
|
}
|
||||||
|
|
||||||
|
function decode(input) {
|
||||||
|
try {
|
||||||
|
return decodeURIComponent(input);
|
||||||
|
} catch (err) {
|
||||||
|
var tokens = input.match(singleMatcher);
|
||||||
|
|
||||||
|
for (var i = 1; i < tokens.length; i++) {
|
||||||
|
input = decodeComponents(tokens, i).join('');
|
||||||
|
|
||||||
|
tokens = input.match(singleMatcher);
|
||||||
|
}
|
||||||
|
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function customDecodeURIComponent(input) {
|
||||||
|
// Keep track of all the replacements and prefill the map with the `BOM`
|
||||||
|
var replaceMap = {
|
||||||
|
'%FE%FF': '\uFFFD\uFFFD',
|
||||||
|
'%FF%FE': '\uFFFD\uFFFD'
|
||||||
|
};
|
||||||
|
|
||||||
|
var match = multiMatcher.exec(input);
|
||||||
|
while (match) {
|
||||||
|
try {
|
||||||
|
// Decode as big chunks as possible
|
||||||
|
replaceMap[match[0]] = decodeURIComponent(match[0]);
|
||||||
|
} catch (err) {
|
||||||
|
var result = decode(match[0]);
|
||||||
|
|
||||||
|
if (result !== match[0]) {
|
||||||
|
replaceMap[match[0]] = result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match = multiMatcher.exec(input);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add `%C2` at the end of the map to make sure it does not replace the combinator before everything else
|
||||||
|
replaceMap['%C2'] = '\uFFFD';
|
||||||
|
|
||||||
|
var entries = Object.keys(replaceMap);
|
||||||
|
|
||||||
|
for (var i = 0; i < entries.length; i++) {
|
||||||
|
// Replace all decoded components
|
||||||
|
var key = entries[i];
|
||||||
|
input = input.replace(new RegExp(key, 'g'), replaceMap[key]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = function (encodedURI) {
|
||||||
|
if (typeof encodedURI !== 'string') {
|
||||||
|
throw new TypeError('Expected `encodedURI` to be of type `string`, got `' + typeof encodedURI + '`');
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
encodedURI = encodedURI.replace(/\+/g, ' ');
|
||||||
|
|
||||||
|
// Try the built in decoder first
|
||||||
|
return decodeURIComponent(encodedURI);
|
||||||
|
} catch (err) {
|
||||||
|
// Fallback to a more advanced decoder
|
||||||
|
return customDecodeURIComponent(encodedURI);
|
||||||
|
}
|
||||||
|
};
|
||||||
21
node_modules/decode-uri-component/license
generated
vendored
Normal file
21
node_modules/decode-uri-component/license
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) Sam Verschueren <sam.verschueren@gmail.com> (github.com/SamVerschueren)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
37
node_modules/decode-uri-component/package.json
generated
vendored
Normal file
37
node_modules/decode-uri-component/package.json
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
"name": "decode-uri-component",
|
||||||
|
"version": "0.2.0",
|
||||||
|
"description": "A better decodeURIComponent",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "SamVerschueren/decode-uri-component",
|
||||||
|
"author": {
|
||||||
|
"name": "Sam Verschueren",
|
||||||
|
"email": "sam.verschueren@gmail.com",
|
||||||
|
"url": "github.com/SamVerschueren"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "xo && nyc ava",
|
||||||
|
"coveralls": "nyc report --reporter=text-lcov | coveralls"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"keywords": [
|
||||||
|
"decode",
|
||||||
|
"uri",
|
||||||
|
"component",
|
||||||
|
"decodeuricomponent",
|
||||||
|
"components",
|
||||||
|
"decoder",
|
||||||
|
"url"
|
||||||
|
],
|
||||||
|
"devDependencies": {
|
||||||
|
"ava": "^0.17.0",
|
||||||
|
"coveralls": "^2.13.1",
|
||||||
|
"nyc": "^10.3.2",
|
||||||
|
"xo": "^0.16.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
70
node_modules/decode-uri-component/readme.md
generated
vendored
Normal file
70
node_modules/decode-uri-component/readme.md
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
# decode-uri-component
|
||||||
|
|
||||||
|
[](https://travis-ci.org/SamVerschueren/decode-uri-component) [](https://coveralls.io/github/SamVerschueren/decode-uri-component?branch=master)
|
||||||
|
|
||||||
|
> A better [decodeURIComponent](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent)
|
||||||
|
|
||||||
|
|
||||||
|
## Why?
|
||||||
|
|
||||||
|
- Decodes `+` to a space.
|
||||||
|
- Converts the [BOM](https://en.wikipedia.org/wiki/Byte_order_mark) to a [replacement character](https://en.wikipedia.org/wiki/Specials_(Unicode_block)#Replacement_character) `<60>`.
|
||||||
|
- Does not throw with invalid encoded input.
|
||||||
|
- Decodes as much of the string as possible.
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install --save decode-uri-component
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
const decodeUriComponent = require('decode-uri-component');
|
||||||
|
|
||||||
|
decodeUriComponent('%25');
|
||||||
|
//=> '%'
|
||||||
|
|
||||||
|
decodeUriComponent('%');
|
||||||
|
//=> '%'
|
||||||
|
|
||||||
|
decodeUriComponent('st%C3%A5le');
|
||||||
|
//=> 'ståle'
|
||||||
|
|
||||||
|
decodeUriComponent('%st%C3%A5le%');
|
||||||
|
//=> '%ståle%'
|
||||||
|
|
||||||
|
decodeUriComponent('%%7Bst%C3%A5le%7D%');
|
||||||
|
//=> '%{ståle}%'
|
||||||
|
|
||||||
|
decodeUriComponent('%7B%ab%%7C%de%%7D');
|
||||||
|
//=> '{%ab%|%de%}'
|
||||||
|
|
||||||
|
decodeUriComponent('%FE%FF');
|
||||||
|
//=> '\uFFFD\uFFFD'
|
||||||
|
|
||||||
|
decodeUriComponent('%C2');
|
||||||
|
//=> '\uFFFD'
|
||||||
|
|
||||||
|
decodeUriComponent('%C2%B5');
|
||||||
|
//=> 'µ'
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
### decodeUriComponent(encodedURI)
|
||||||
|
|
||||||
|
#### encodedURI
|
||||||
|
|
||||||
|
Type: `string`
|
||||||
|
|
||||||
|
An encoded component of a Uniform Resource Identifier.
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT © [Sam Verschueren](https://github.com/SamVerschueren)
|
||||||
17
node_modules/filter-obj/index.js
generated
vendored
Normal file
17
node_modules/filter-obj/index.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
'use strict';
|
||||||
|
module.exports = function (obj, predicate) {
|
||||||
|
var ret = {};
|
||||||
|
var keys = Object.keys(obj);
|
||||||
|
var isArr = Array.isArray(predicate);
|
||||||
|
|
||||||
|
for (var i = 0; i < keys.length; i++) {
|
||||||
|
var key = keys[i];
|
||||||
|
var val = obj[key];
|
||||||
|
|
||||||
|
if (isArr ? predicate.indexOf(key) !== -1 : predicate(key, val, obj)) {
|
||||||
|
ret[key] = val;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
};
|
||||||
21
node_modules/filter-obj/license
generated
vendored
Normal file
21
node_modules/filter-obj/license
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
37
node_modules/filter-obj/package.json
generated
vendored
Normal file
37
node_modules/filter-obj/package.json
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
"name": "filter-obj",
|
||||||
|
"version": "1.1.0",
|
||||||
|
"description": "Filter object keys and values into a new object",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "sindresorhus/filter-obj",
|
||||||
|
"author": {
|
||||||
|
"name": "Sindre Sorhus",
|
||||||
|
"email": "sindresorhus@gmail.com",
|
||||||
|
"url": "sindresorhus.com"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "xo && node test.js"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"keywords": [
|
||||||
|
"filter",
|
||||||
|
"obj",
|
||||||
|
"object",
|
||||||
|
"key",
|
||||||
|
"keys",
|
||||||
|
"value",
|
||||||
|
"values",
|
||||||
|
"val",
|
||||||
|
"iterate",
|
||||||
|
"iterator"
|
||||||
|
],
|
||||||
|
"devDependencies": {
|
||||||
|
"ava": "0.0.4",
|
||||||
|
"xo": "*"
|
||||||
|
}
|
||||||
|
}
|
||||||
41
node_modules/filter-obj/readme.md
generated
vendored
Normal file
41
node_modules/filter-obj/readme.md
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# filter-obj [](https://travis-ci.org/sindresorhus/filter-obj)
|
||||||
|
|
||||||
|
> Filter object keys and values into a new object
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install --save filter-obj
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
var filterObj = require('filter-obj');
|
||||||
|
|
||||||
|
var obj = {
|
||||||
|
foo: true,
|
||||||
|
bar: false
|
||||||
|
};
|
||||||
|
|
||||||
|
var newObject = filterObj(obj, function (key, value, object) {
|
||||||
|
return value === true;
|
||||||
|
});
|
||||||
|
//=> {foo: true}
|
||||||
|
|
||||||
|
var newObject2 = filterObj(obj, ['bar']);
|
||||||
|
//=> {bar: true}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Related
|
||||||
|
|
||||||
|
- [map-obj](https://github.com/sindresorhus/map-obj) - Map object keys and values into a new object
|
||||||
|
- [object-assign](https://github.com/sindresorhus/object-assign) - Copy enumerable own properties from one or more source objects to a target object
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
||||||
86
node_modules/ini/ini.js
generated
vendored
86
node_modules/ini/ini.js
generated
vendored
@@ -15,7 +15,7 @@ function encode (obj, opt) {
|
|||||||
if (typeof opt === 'string') {
|
if (typeof opt === 'string') {
|
||||||
opt = {
|
opt = {
|
||||||
section: opt,
|
section: opt,
|
||||||
whitespace: false
|
whitespace: false,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
opt = opt || {}
|
opt = opt || {}
|
||||||
@@ -30,27 +30,25 @@ function encode (obj, opt) {
|
|||||||
val.forEach(function (item) {
|
val.forEach(function (item) {
|
||||||
out += safe(k + '[]') + separator + safe(item) + '\n'
|
out += safe(k + '[]') + separator + safe(item) + '\n'
|
||||||
})
|
})
|
||||||
} else if (val && typeof val === 'object') {
|
} else if (val && typeof val === 'object')
|
||||||
children.push(k)
|
children.push(k)
|
||||||
} else {
|
else
|
||||||
out += safe(k) + separator + safe(val) + eol
|
out += safe(k) + separator + safe(val) + eol
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if (opt.section && out.length) {
|
if (opt.section && out.length)
|
||||||
out = '[' + safe(opt.section) + ']' + eol + out
|
out = '[' + safe(opt.section) + ']' + eol + out
|
||||||
}
|
|
||||||
|
|
||||||
children.forEach(function (k, _, __) {
|
children.forEach(function (k, _, __) {
|
||||||
var nk = dotSplit(k).join('\\.')
|
var nk = dotSplit(k).join('\\.')
|
||||||
var section = (opt.section ? opt.section + '.' : '') + nk
|
var section = (opt.section ? opt.section + '.' : '') + nk
|
||||||
var child = encode(obj[k], {
|
var child = encode(obj[k], {
|
||||||
section: section,
|
section: section,
|
||||||
whitespace: opt.whitespace
|
whitespace: opt.whitespace,
|
||||||
})
|
})
|
||||||
if (out.length && child.length) {
|
if (out.length && child.length)
|
||||||
out += eol
|
out += eol
|
||||||
}
|
|
||||||
out += child
|
out += child
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -62,7 +60,7 @@ function dotSplit (str) {
|
|||||||
.replace(/\\\./g, '\u0001')
|
.replace(/\\\./g, '\u0001')
|
||||||
.split(/\./).map(function (part) {
|
.split(/\./).map(function (part) {
|
||||||
return part.replace(/\1/g, '\\.')
|
return part.replace(/\1/g, '\\.')
|
||||||
.replace(/\2LITERAL\\1LITERAL\2/g, '\u0001')
|
.replace(/\2LITERAL\\1LITERAL\2/g, '\u0001')
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -75,15 +73,25 @@ function decode (str) {
|
|||||||
var lines = str.split(/[\r\n]+/g)
|
var lines = str.split(/[\r\n]+/g)
|
||||||
|
|
||||||
lines.forEach(function (line, _, __) {
|
lines.forEach(function (line, _, __) {
|
||||||
if (!line || line.match(/^\s*[;#]/)) return
|
if (!line || line.match(/^\s*[;#]/))
|
||||||
|
return
|
||||||
var match = line.match(re)
|
var match = line.match(re)
|
||||||
if (!match) return
|
if (!match)
|
||||||
|
return
|
||||||
if (match[1] !== undefined) {
|
if (match[1] !== undefined) {
|
||||||
section = unsafe(match[1])
|
section = unsafe(match[1])
|
||||||
|
if (section === '__proto__') {
|
||||||
|
// not allowed
|
||||||
|
// keep parsing the section, but don't attach it.
|
||||||
|
p = {}
|
||||||
|
return
|
||||||
|
}
|
||||||
p = out[section] = out[section] || {}
|
p = out[section] = out[section] || {}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
var key = unsafe(match[2])
|
var key = unsafe(match[2])
|
||||||
|
if (key === '__proto__')
|
||||||
|
return
|
||||||
var value = match[3] ? unsafe(match[4]) : true
|
var value = match[3] ? unsafe(match[4]) : true
|
||||||
switch (value) {
|
switch (value) {
|
||||||
case 'true':
|
case 'true':
|
||||||
@@ -94,20 +102,20 @@ function decode (str) {
|
|||||||
// Convert keys with '[]' suffix to an array
|
// Convert keys with '[]' suffix to an array
|
||||||
if (key.length > 2 && key.slice(-2) === '[]') {
|
if (key.length > 2 && key.slice(-2) === '[]') {
|
||||||
key = key.substring(0, key.length - 2)
|
key = key.substring(0, key.length - 2)
|
||||||
if (!p[key]) {
|
if (key === '__proto__')
|
||||||
|
return
|
||||||
|
if (!p[key])
|
||||||
p[key] = []
|
p[key] = []
|
||||||
} else if (!Array.isArray(p[key])) {
|
else if (!Array.isArray(p[key]))
|
||||||
p[key] = [p[key]]
|
p[key] = [p[key]]
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// safeguard against resetting a previously defined
|
// safeguard against resetting a previously defined
|
||||||
// array by accidentally forgetting the brackets
|
// array by accidentally forgetting the brackets
|
||||||
if (Array.isArray(p[key])) {
|
if (Array.isArray(p[key]))
|
||||||
p[key].push(value)
|
p[key].push(value)
|
||||||
} else {
|
else
|
||||||
p[key] = value
|
p[key] = value
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
// {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}}
|
// {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}}
|
||||||
@@ -115,9 +123,9 @@ function decode (str) {
|
|||||||
Object.keys(out).filter(function (k, _, __) {
|
Object.keys(out).filter(function (k, _, __) {
|
||||||
if (!out[k] ||
|
if (!out[k] ||
|
||||||
typeof out[k] !== 'object' ||
|
typeof out[k] !== 'object' ||
|
||||||
Array.isArray(out[k])) {
|
Array.isArray(out[k]))
|
||||||
return false
|
return false
|
||||||
}
|
|
||||||
// see if the parent section is also an object.
|
// see if the parent section is also an object.
|
||||||
// if so, add it to that, and mark this one for deletion
|
// if so, add it to that, and mark this one for deletion
|
||||||
var parts = dotSplit(k)
|
var parts = dotSplit(k)
|
||||||
@@ -125,12 +133,15 @@ function decode (str) {
|
|||||||
var l = parts.pop()
|
var l = parts.pop()
|
||||||
var nl = l.replace(/\\\./g, '.')
|
var nl = l.replace(/\\\./g, '.')
|
||||||
parts.forEach(function (part, _, __) {
|
parts.forEach(function (part, _, __) {
|
||||||
if (!p[part] || typeof p[part] !== 'object') p[part] = {}
|
if (part === '__proto__')
|
||||||
|
return
|
||||||
|
if (!p[part] || typeof p[part] !== 'object')
|
||||||
|
p[part] = {}
|
||||||
p = p[part]
|
p = p[part]
|
||||||
})
|
})
|
||||||
if (p === out && nl === l) {
|
if (p === out && nl === l)
|
||||||
return false
|
return false
|
||||||
}
|
|
||||||
p[nl] = out[k]
|
p[nl] = out[k]
|
||||||
return true
|
return true
|
||||||
}).forEach(function (del, _, __) {
|
}).forEach(function (del, _, __) {
|
||||||
@@ -152,18 +163,20 @@ function safe (val) {
|
|||||||
(val.length > 1 &&
|
(val.length > 1 &&
|
||||||
isQuoted(val)) ||
|
isQuoted(val)) ||
|
||||||
val !== val.trim())
|
val !== val.trim())
|
||||||
? JSON.stringify(val)
|
? JSON.stringify(val)
|
||||||
: val.replace(/;/g, '\\;').replace(/#/g, '\\#')
|
: val.replace(/;/g, '\\;').replace(/#/g, '\\#')
|
||||||
}
|
}
|
||||||
|
|
||||||
function unsafe (val, doUnesc) {
|
function unsafe (val, doUnesc) {
|
||||||
val = (val || '').trim()
|
val = (val || '').trim()
|
||||||
if (isQuoted(val)) {
|
if (isQuoted(val)) {
|
||||||
// remove the single quotes before calling JSON.parse
|
// remove the single quotes before calling JSON.parse
|
||||||
if (val.charAt(0) === "'") {
|
if (val.charAt(0) === "'")
|
||||||
val = val.substr(1, val.length - 2)
|
val = val.substr(1, val.length - 2)
|
||||||
}
|
|
||||||
try { val = JSON.parse(val) } catch (_) {}
|
try {
|
||||||
|
val = JSON.parse(val)
|
||||||
|
} catch (_) {}
|
||||||
} else {
|
} else {
|
||||||
// walk the val to find the first not-escaped ; character
|
// walk the val to find the first not-escaped ; character
|
||||||
var esc = false
|
var esc = false
|
||||||
@@ -171,23 +184,22 @@ function unsafe (val, doUnesc) {
|
|||||||
for (var i = 0, l = val.length; i < l; i++) {
|
for (var i = 0, l = val.length; i < l; i++) {
|
||||||
var c = val.charAt(i)
|
var c = val.charAt(i)
|
||||||
if (esc) {
|
if (esc) {
|
||||||
if ('\\;#'.indexOf(c) !== -1) {
|
if ('\\;#'.indexOf(c) !== -1)
|
||||||
unesc += c
|
unesc += c
|
||||||
} else {
|
else
|
||||||
unesc += '\\' + c
|
unesc += '\\' + c
|
||||||
}
|
|
||||||
esc = false
|
esc = false
|
||||||
} else if (';#'.indexOf(c) !== -1) {
|
} else if (';#'.indexOf(c) !== -1)
|
||||||
break
|
break
|
||||||
} else if (c === '\\') {
|
else if (c === '\\')
|
||||||
esc = true
|
esc = true
|
||||||
} else {
|
else
|
||||||
unesc += c
|
unesc += c
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (esc) {
|
if (esc)
|
||||||
unesc += '\\'
|
unesc += '\\'
|
||||||
}
|
|
||||||
return unesc.trim()
|
return unesc.trim()
|
||||||
}
|
}
|
||||||
return val
|
return val
|
||||||
|
|||||||
23
node_modules/ini/package.json
generated
vendored
23
node_modules/ini/package.json
generated
vendored
@@ -2,26 +2,29 @@
|
|||||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||||
"name": "ini",
|
"name": "ini",
|
||||||
"description": "An ini encoder/decoder for node",
|
"description": "An ini encoder/decoder for node",
|
||||||
"version": "1.3.5",
|
"version": "1.3.8",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git://github.com/isaacs/ini.git"
|
"url": "git://github.com/isaacs/ini.git"
|
||||||
},
|
},
|
||||||
"main": "ini.js",
|
"main": "ini.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"pretest": "standard ini.js",
|
"eslint": "eslint",
|
||||||
"test": "tap test/*.js --100 -J",
|
"lint": "npm run eslint -- ini.js test/*.js",
|
||||||
|
"lintfix": "npm run lint -- --fix",
|
||||||
|
"test": "tap",
|
||||||
|
"posttest": "npm run lint",
|
||||||
"preversion": "npm test",
|
"preversion": "npm test",
|
||||||
"postversion": "npm publish",
|
"postversion": "npm publish",
|
||||||
"postpublish": "git push origin --all; git push origin --tags"
|
"prepublishOnly": "git push origin --follow-tags"
|
||||||
},
|
},
|
||||||
"engines": {
|
|
||||||
"node": "*"
|
|
||||||
},
|
|
||||||
"dependencies": {},
|
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"standard": "^10.0.3",
|
"eslint": "^7.9.0",
|
||||||
"tap": "^10.7.3 || 11"
|
"eslint-plugin-import": "^2.22.0",
|
||||||
|
"eslint-plugin-node": "^11.1.0",
|
||||||
|
"eslint-plugin-promise": "^4.2.1",
|
||||||
|
"eslint-plugin-standard": "^4.0.1",
|
||||||
|
"tap": "14"
|
||||||
},
|
},
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"files": [
|
"files": [
|
||||||
|
|||||||
489
node_modules/query-string/index.d.ts
generated
vendored
Normal file
489
node_modules/query-string/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,489 @@
|
|||||||
|
export interface ParseOptions {
|
||||||
|
/**
|
||||||
|
Decode the keys and values. URI components are decoded with [`decode-uri-component`](https://github.com/SamVerschueren/decode-uri-component).
|
||||||
|
|
||||||
|
@default true
|
||||||
|
*/
|
||||||
|
readonly decode?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
@default 'none'
|
||||||
|
|
||||||
|
- `bracket`: Parse arrays with bracket representation:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo[]=1&foo[]=2&foo[]=3', {arrayFormat: 'bracket'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `index`: Parse arrays with index representation:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo[0]=1&foo[1]=2&foo[3]=3', {arrayFormat: 'index'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `comma`: Parse arrays with elements separated by comma:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1,2,3', {arrayFormat: 'comma'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `separator`: Parse arrays with elements separated by a custom character:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1|2|3', {arrayFormat: 'separator', arrayFormatSeparator: '|'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `none`: Parse arrays with elements using duplicate keys:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1&foo=2&foo=3');
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly arrayFormat?: 'bracket' | 'index' | 'comma' | 'separator' | 'none';
|
||||||
|
|
||||||
|
/**
|
||||||
|
The character used to separate array elements when using `{arrayFormat: 'separator'}`.
|
||||||
|
|
||||||
|
@default ,
|
||||||
|
*/
|
||||||
|
readonly arrayFormatSeparator?: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Supports both `Function` as a custom sorting function or `false` to disable sorting.
|
||||||
|
|
||||||
|
If omitted, keys are sorted using `Array#sort`, which means, converting them to strings and comparing strings in Unicode code point order.
|
||||||
|
|
||||||
|
@default true
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
const order = ['c', 'a', 'b'];
|
||||||
|
|
||||||
|
queryString.parse('?a=one&b=two&c=three', {
|
||||||
|
sort: (itemLeft, itemRight) => order.indexOf(itemLeft) - order.indexOf(itemRight)
|
||||||
|
});
|
||||||
|
//=> {c: 'three', a: 'one', b: 'two'}
|
||||||
|
```
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('?a=one&c=three&b=two', {sort: false});
|
||||||
|
//=> {a: 'one', c: 'three', b: 'two'}
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly sort?: ((itemLeft: string, itemRight: string) => number) | false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Parse the value as a number type instead of string type if it's a number.
|
||||||
|
|
||||||
|
@default false
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1', {parseNumbers: true});
|
||||||
|
//=> {foo: 1}
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly parseNumbers?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Parse the value as a boolean type instead of string type if it's a boolean.
|
||||||
|
|
||||||
|
@default false
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=true', {parseBooleans: true});
|
||||||
|
//=> {foo: true}
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly parseBooleans?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Parse the fragment identifier from the URL and add it to result object.
|
||||||
|
|
||||||
|
@default false
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parseUrl('https://foo.bar?foo=bar#xyz', {parseFragmentIdentifier: true});
|
||||||
|
//=> {url: 'https://foo.bar', query: {foo: 'bar'}, fragmentIdentifier: 'xyz'}
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly parseFragmentIdentifier?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ParsedQuery<T = string> {
|
||||||
|
[key: string]: T | T[] | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
Parse a query string into an object. Leading `?` or `#` are ignored, so you can pass `location.search` or `location.hash` directly.
|
||||||
|
|
||||||
|
The returned object is created with [`Object.create(null)`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/create) and thus does not have a `prototype`.
|
||||||
|
|
||||||
|
@param query - The query string to parse.
|
||||||
|
*/
|
||||||
|
export function parse(query: string, options: {parseBooleans: true, parseNumbers: true} & ParseOptions): ParsedQuery<string | boolean | number>;
|
||||||
|
export function parse(query: string, options: {parseBooleans: true} & ParseOptions): ParsedQuery<string | boolean>;
|
||||||
|
export function parse(query: string, options: {parseNumbers: true} & ParseOptions): ParsedQuery<string | number>;
|
||||||
|
export function parse(query: string, options?: ParseOptions): ParsedQuery;
|
||||||
|
|
||||||
|
export interface ParsedUrl {
|
||||||
|
readonly url: string;
|
||||||
|
readonly query: ParsedQuery;
|
||||||
|
|
||||||
|
/**
|
||||||
|
The fragment identifier of the URL.
|
||||||
|
|
||||||
|
Present when the `parseFragmentIdentifier` option is `true`.
|
||||||
|
*/
|
||||||
|
readonly fragmentIdentifier?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
Extract the URL and the query string as an object.
|
||||||
|
|
||||||
|
If the `parseFragmentIdentifier` option is `true`, the object will also contain a `fragmentIdentifier` property.
|
||||||
|
|
||||||
|
@param url - The URL to parse.
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parseUrl('https://foo.bar?foo=bar');
|
||||||
|
//=> {url: 'https://foo.bar', query: {foo: 'bar'}}
|
||||||
|
|
||||||
|
queryString.parseUrl('https://foo.bar?foo=bar#xyz', {parseFragmentIdentifier: true});
|
||||||
|
//=> {url: 'https://foo.bar', query: {foo: 'bar'}, fragmentIdentifier: 'xyz'}
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
export function parseUrl(url: string, options?: ParseOptions): ParsedUrl;
|
||||||
|
|
||||||
|
export interface StringifyOptions {
|
||||||
|
/**
|
||||||
|
Strictly encode URI components with [`strict-uri-encode`](https://github.com/kevva/strict-uri-encode). It uses [`encodeURIComponent`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent) if set to `false`. You probably [don't care](https://github.com/sindresorhus/query-string/issues/42) about this option.
|
||||||
|
|
||||||
|
@default true
|
||||||
|
*/
|
||||||
|
readonly strict?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
[URL encode](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent) the keys and values.
|
||||||
|
|
||||||
|
@default true
|
||||||
|
*/
|
||||||
|
readonly encode?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
@default 'none'
|
||||||
|
|
||||||
|
- `bracket`: Serialize arrays using bracket representation:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'bracket'});
|
||||||
|
//=> 'foo[]=1&foo[]=2&foo[]=3'
|
||||||
|
```
|
||||||
|
|
||||||
|
- `index`: Serialize arrays using index representation:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'index'});
|
||||||
|
//=> 'foo[0]=1&foo[1]=2&foo[2]=3'
|
||||||
|
```
|
||||||
|
|
||||||
|
- `comma`: Serialize arrays by separating elements with comma:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'comma'});
|
||||||
|
//=> 'foo=1,2,3'
|
||||||
|
```
|
||||||
|
|
||||||
|
- `separator`: Serialize arrays by separating elements with character:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'separator', arrayFormatSeparator: '|'});
|
||||||
|
//=> 'foo=1|2|3'
|
||||||
|
```
|
||||||
|
|
||||||
|
- `none`: Serialize arrays by using duplicate keys:
|
||||||
|
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]});
|
||||||
|
//=> 'foo=1&foo=2&foo=3'
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly arrayFormat?: 'bracket' | 'index' | 'comma' | 'separator' | 'none';
|
||||||
|
|
||||||
|
/**
|
||||||
|
The character used to separate array elements when using `{arrayFormat: 'separator'}`.
|
||||||
|
|
||||||
|
@default ,
|
||||||
|
*/
|
||||||
|
readonly arrayFormatSeparator?: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Supports both `Function` as a custom sorting function or `false` to disable sorting.
|
||||||
|
|
||||||
|
If omitted, keys are sorted using `Array#sort`, which means, converting them to strings and comparing strings in Unicode code point order.
|
||||||
|
|
||||||
|
@default true
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
const order = ['c', 'a', 'b'];
|
||||||
|
|
||||||
|
queryString.stringify({a: 1, b: 2, c: 3}, {
|
||||||
|
sort: (itemLeft, itemRight) => order.indexOf(itemLeft) - order.indexOf(itemRight)
|
||||||
|
});
|
||||||
|
//=> 'c=3&a=1&b=2'
|
||||||
|
```
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({b: 1, c: 2, a: 3}, {sort: false});
|
||||||
|
//=> 'b=1&c=2&a=3'
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly sort?: ((itemLeft: string, itemRight: string) => number) | false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Skip keys with `null` as the value.
|
||||||
|
|
||||||
|
Note that keys with `undefined` as the value are always skipped.
|
||||||
|
|
||||||
|
@default false
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({a: 1, b: undefined, c: null, d: 4}, {
|
||||||
|
skipNull: true
|
||||||
|
});
|
||||||
|
//=> 'a=1&d=4'
|
||||||
|
|
||||||
|
queryString.stringify({a: undefined, b: null}, {
|
||||||
|
skipNull: true
|
||||||
|
});
|
||||||
|
//=> ''
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly skipNull?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Skip keys with an empty string as the value.
|
||||||
|
|
||||||
|
@default false
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({a: 1, b: '', c: '', d: 4}, {
|
||||||
|
skipEmptyString: true
|
||||||
|
});
|
||||||
|
//=> 'a=1&d=4'
|
||||||
|
```
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({a: '', b: ''}, {
|
||||||
|
skipEmptyString: true
|
||||||
|
});
|
||||||
|
//=> ''
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
readonly skipEmptyString?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Stringifiable = string | boolean | number | null | undefined;
|
||||||
|
|
||||||
|
export type StringifiableRecord = Record<
|
||||||
|
string,
|
||||||
|
Stringifiable | readonly Stringifiable[]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Stringify an object into a query string and sort the keys.
|
||||||
|
*/
|
||||||
|
export function stringify(
|
||||||
|
// TODO: Use the below instead when the following TS issues are fixed:
|
||||||
|
// - https://github.com/microsoft/TypeScript/issues/15300
|
||||||
|
// - https://github.com/microsoft/TypeScript/issues/42021
|
||||||
|
// Context: https://github.com/sindresorhus/query-string/issues/298
|
||||||
|
// object: StringifiableRecord,
|
||||||
|
object: Record<string, any>,
|
||||||
|
options?: StringifyOptions
|
||||||
|
): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Extract a query string from a URL that can be passed into `.parse()`.
|
||||||
|
|
||||||
|
Note: This behaviour can be changed with the `skipNull` option.
|
||||||
|
*/
|
||||||
|
export function extract(url: string): string;
|
||||||
|
|
||||||
|
export interface UrlObject {
|
||||||
|
readonly url: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Overrides queries in the `url` property.
|
||||||
|
*/
|
||||||
|
readonly query: StringifiableRecord;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Overrides the fragment identifier in the `url` property.
|
||||||
|
*/
|
||||||
|
readonly fragmentIdentifier?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
Stringify an object into a URL with a query string and sorting the keys. The inverse of [`.parseUrl()`](https://github.com/sindresorhus/query-string#parseurlstring-options)
|
||||||
|
|
||||||
|
Query items in the `query` property overrides queries in the `url` property.
|
||||||
|
|
||||||
|
The `fragmentIdentifier` property overrides the fragment identifier in the `url` property.
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
queryString.stringifyUrl({url: 'https://foo.bar', query: {foo: 'bar'}});
|
||||||
|
//=> 'https://foo.bar?foo=bar'
|
||||||
|
|
||||||
|
queryString.stringifyUrl({url: 'https://foo.bar?foo=baz', query: {foo: 'bar'}});
|
||||||
|
//=> 'https://foo.bar?foo=bar'
|
||||||
|
|
||||||
|
queryString.stringifyUrl({
|
||||||
|
url: 'https://foo.bar',
|
||||||
|
query: {
|
||||||
|
top: 'foo'
|
||||||
|
},
|
||||||
|
fragmentIdentifier: 'bar'
|
||||||
|
});
|
||||||
|
//=> 'https://foo.bar?top=foo#bar'
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
export function stringifyUrl(
|
||||||
|
object: UrlObject,
|
||||||
|
options?: StringifyOptions
|
||||||
|
): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Pick query parameters from a URL.
|
||||||
|
|
||||||
|
@param url - The URL containing the query parameters to pick.
|
||||||
|
@param keys - The names of the query parameters to keep. All other query parameters will be removed from the URL.
|
||||||
|
@param filter - A filter predicate that will be provided the name of each query parameter and its value. The `parseNumbers` and `parseBooleans` options also affect `value`.
|
||||||
|
|
||||||
|
@returns The URL with the picked query parameters.
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
queryString.pick('https://foo.bar?foo=1&bar=2#hello', ['foo']);
|
||||||
|
//=> 'https://foo.bar?foo=1#hello'
|
||||||
|
|
||||||
|
queryString.pick('https://foo.bar?foo=1&bar=2#hello', (name, value) => value === 2, {parseNumbers: true});
|
||||||
|
//=> 'https://foo.bar?bar=2#hello'
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
export function pick(
|
||||||
|
url: string,
|
||||||
|
keys: readonly string[],
|
||||||
|
options?: ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
|
export function pick(
|
||||||
|
url: string,
|
||||||
|
filter: (key: string, value: string | boolean | number) => boolean,
|
||||||
|
options?: {parseBooleans: true, parseNumbers: true} & ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
|
export function pick(
|
||||||
|
url: string,
|
||||||
|
filter: (key: string, value: string | boolean) => boolean,
|
||||||
|
options?: {parseBooleans: true} & ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
|
export function pick(
|
||||||
|
url: string,
|
||||||
|
filter: (key: string, value: string | number) => boolean,
|
||||||
|
options?: {parseNumbers: true} & ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
|
|
||||||
|
/**
|
||||||
|
Exclude query parameters from a URL. Like `.pick()` but reversed.
|
||||||
|
|
||||||
|
@param url - The URL containing the query parameters to exclude.
|
||||||
|
@param keys - The names of the query parameters to remove. All other query parameters will remain in the URL.
|
||||||
|
@param filter - A filter predicate that will be provided the name of each query parameter and its value. The `parseNumbers` and `parseBooleans` options also affect `value`.
|
||||||
|
|
||||||
|
@returns The URL without the excluded the query parameters.
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
queryString.exclude('https://foo.bar?foo=1&bar=2#hello', ['foo']);
|
||||||
|
//=> 'https://foo.bar?bar=2#hello'
|
||||||
|
|
||||||
|
queryString.exclude('https://foo.bar?foo=1&bar=2#hello', (name, value) => value === 2, {parseNumbers: true});
|
||||||
|
//=> 'https://foo.bar?foo=1#hello'
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
export function exclude(
|
||||||
|
url: string,
|
||||||
|
keys: readonly string[],
|
||||||
|
options?: ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
|
export function exclude(
|
||||||
|
url: string,
|
||||||
|
filter: (key: string, value: string | boolean | number) => boolean,
|
||||||
|
options?: {parseBooleans: true, parseNumbers: true} & ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
|
export function exclude(
|
||||||
|
url: string,
|
||||||
|
filter: (key: string, value: string | boolean) => boolean,
|
||||||
|
options?: {parseBooleans: true} & ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
|
export function exclude(
|
||||||
|
url: string,
|
||||||
|
filter: (key: string, value: string | number) => boolean,
|
||||||
|
options?: {parseNumbers: true} & ParseOptions & StringifyOptions
|
||||||
|
): string
|
||||||
404
node_modules/query-string/index.js
generated
vendored
Normal file
404
node_modules/query-string/index.js
generated
vendored
Normal file
@@ -0,0 +1,404 @@
|
|||||||
|
'use strict';
|
||||||
|
const strictUriEncode = require('strict-uri-encode');
|
||||||
|
const decodeComponent = require('decode-uri-component');
|
||||||
|
const splitOnFirst = require('split-on-first');
|
||||||
|
const filterObject = require('filter-obj');
|
||||||
|
|
||||||
|
const isNullOrUndefined = value => value === null || value === undefined;
|
||||||
|
|
||||||
|
function encoderForArrayFormat(options) {
|
||||||
|
switch (options.arrayFormat) {
|
||||||
|
case 'index':
|
||||||
|
return key => (result, value) => {
|
||||||
|
const index = result.length;
|
||||||
|
|
||||||
|
if (
|
||||||
|
value === undefined ||
|
||||||
|
(options.skipNull && value === null) ||
|
||||||
|
(options.skipEmptyString && value === '')
|
||||||
|
) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value === null) {
|
||||||
|
return [...result, [encode(key, options), '[', index, ']'].join('')];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [
|
||||||
|
...result,
|
||||||
|
[encode(key, options), '[', encode(index, options), ']=', encode(value, options)].join('')
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
|
case 'bracket':
|
||||||
|
return key => (result, value) => {
|
||||||
|
if (
|
||||||
|
value === undefined ||
|
||||||
|
(options.skipNull && value === null) ||
|
||||||
|
(options.skipEmptyString && value === '')
|
||||||
|
) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value === null) {
|
||||||
|
return [...result, [encode(key, options), '[]'].join('')];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [...result, [encode(key, options), '[]=', encode(value, options)].join('')];
|
||||||
|
};
|
||||||
|
|
||||||
|
case 'comma':
|
||||||
|
case 'separator':
|
||||||
|
return key => (result, value) => {
|
||||||
|
if (value === null || value === undefined || value.length === 0) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.length === 0) {
|
||||||
|
return [[encode(key, options), '=', encode(value, options)].join('')];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [[result, encode(value, options)].join(options.arrayFormatSeparator)];
|
||||||
|
};
|
||||||
|
|
||||||
|
default:
|
||||||
|
return key => (result, value) => {
|
||||||
|
if (
|
||||||
|
value === undefined ||
|
||||||
|
(options.skipNull && value === null) ||
|
||||||
|
(options.skipEmptyString && value === '')
|
||||||
|
) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value === null) {
|
||||||
|
return [...result, encode(key, options)];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [...result, [encode(key, options), '=', encode(value, options)].join('')];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function parserForArrayFormat(options) {
|
||||||
|
let result;
|
||||||
|
|
||||||
|
switch (options.arrayFormat) {
|
||||||
|
case 'index':
|
||||||
|
return (key, value, accumulator) => {
|
||||||
|
result = /\[(\d*)\]$/.exec(key);
|
||||||
|
|
||||||
|
key = key.replace(/\[\d*\]$/, '');
|
||||||
|
|
||||||
|
if (!result) {
|
||||||
|
accumulator[key] = value;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (accumulator[key] === undefined) {
|
||||||
|
accumulator[key] = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
accumulator[key][result[1]] = value;
|
||||||
|
};
|
||||||
|
|
||||||
|
case 'bracket':
|
||||||
|
return (key, value, accumulator) => {
|
||||||
|
result = /(\[\])$/.exec(key);
|
||||||
|
key = key.replace(/\[\]$/, '');
|
||||||
|
|
||||||
|
if (!result) {
|
||||||
|
accumulator[key] = value;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (accumulator[key] === undefined) {
|
||||||
|
accumulator[key] = [value];
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
accumulator[key] = [].concat(accumulator[key], value);
|
||||||
|
};
|
||||||
|
|
||||||
|
case 'comma':
|
||||||
|
case 'separator':
|
||||||
|
return (key, value, accumulator) => {
|
||||||
|
const isArray = typeof value === 'string' && value.includes(options.arrayFormatSeparator);
|
||||||
|
const isEncodedArray = (typeof value === 'string' && !isArray && decode(value, options).includes(options.arrayFormatSeparator));
|
||||||
|
value = isEncodedArray ? decode(value, options) : value;
|
||||||
|
const newValue = isArray || isEncodedArray ? value.split(options.arrayFormatSeparator).map(item => decode(item, options)) : value === null ? value : decode(value, options);
|
||||||
|
accumulator[key] = newValue;
|
||||||
|
};
|
||||||
|
|
||||||
|
default:
|
||||||
|
return (key, value, accumulator) => {
|
||||||
|
if (accumulator[key] === undefined) {
|
||||||
|
accumulator[key] = value;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
accumulator[key] = [].concat(accumulator[key], value);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateArrayFormatSeparator(value) {
|
||||||
|
if (typeof value !== 'string' || value.length !== 1) {
|
||||||
|
throw new TypeError('arrayFormatSeparator must be single character string');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function encode(value, options) {
|
||||||
|
if (options.encode) {
|
||||||
|
return options.strict ? strictUriEncode(value) : encodeURIComponent(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
function decode(value, options) {
|
||||||
|
if (options.decode) {
|
||||||
|
return decodeComponent(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
function keysSorter(input) {
|
||||||
|
if (Array.isArray(input)) {
|
||||||
|
return input.sort();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof input === 'object') {
|
||||||
|
return keysSorter(Object.keys(input))
|
||||||
|
.sort((a, b) => Number(a) - Number(b))
|
||||||
|
.map(key => input[key]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeHash(input) {
|
||||||
|
const hashStart = input.indexOf('#');
|
||||||
|
if (hashStart !== -1) {
|
||||||
|
input = input.slice(0, hashStart);
|
||||||
|
}
|
||||||
|
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getHash(url) {
|
||||||
|
let hash = '';
|
||||||
|
const hashStart = url.indexOf('#');
|
||||||
|
if (hashStart !== -1) {
|
||||||
|
hash = url.slice(hashStart);
|
||||||
|
}
|
||||||
|
|
||||||
|
return hash;
|
||||||
|
}
|
||||||
|
|
||||||
|
function extract(input) {
|
||||||
|
input = removeHash(input);
|
||||||
|
const queryStart = input.indexOf('?');
|
||||||
|
if (queryStart === -1) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
return input.slice(queryStart + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseValue(value, options) {
|
||||||
|
if (options.parseNumbers && !Number.isNaN(Number(value)) && (typeof value === 'string' && value.trim() !== '')) {
|
||||||
|
value = Number(value);
|
||||||
|
} else if (options.parseBooleans && value !== null && (value.toLowerCase() === 'true' || value.toLowerCase() === 'false')) {
|
||||||
|
value = value.toLowerCase() === 'true';
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parse(query, options) {
|
||||||
|
options = Object.assign({
|
||||||
|
decode: true,
|
||||||
|
sort: true,
|
||||||
|
arrayFormat: 'none',
|
||||||
|
arrayFormatSeparator: ',',
|
||||||
|
parseNumbers: false,
|
||||||
|
parseBooleans: false
|
||||||
|
}, options);
|
||||||
|
|
||||||
|
validateArrayFormatSeparator(options.arrayFormatSeparator);
|
||||||
|
|
||||||
|
const formatter = parserForArrayFormat(options);
|
||||||
|
|
||||||
|
// Create an object with no prototype
|
||||||
|
const ret = Object.create(null);
|
||||||
|
|
||||||
|
if (typeof query !== 'string') {
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
query = query.trim().replace(/^[?#&]/, '');
|
||||||
|
|
||||||
|
if (!query) {
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const param of query.split('&')) {
|
||||||
|
if (param === '') {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let [key, value] = splitOnFirst(options.decode ? param.replace(/\+/g, ' ') : param, '=');
|
||||||
|
|
||||||
|
// Missing `=` should be `null`:
|
||||||
|
// http://w3.org/TR/2012/WD-url-20120524/#collect-url-parameters
|
||||||
|
value = value === undefined ? null : ['comma', 'separator'].includes(options.arrayFormat) ? value : decode(value, options);
|
||||||
|
formatter(decode(key, options), value, ret);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const key of Object.keys(ret)) {
|
||||||
|
const value = ret[key];
|
||||||
|
if (typeof value === 'object' && value !== null) {
|
||||||
|
for (const k of Object.keys(value)) {
|
||||||
|
value[k] = parseValue(value[k], options);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
ret[key] = parseValue(value, options);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.sort === false) {
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (options.sort === true ? Object.keys(ret).sort() : Object.keys(ret).sort(options.sort)).reduce((result, key) => {
|
||||||
|
const value = ret[key];
|
||||||
|
if (Boolean(value) && typeof value === 'object' && !Array.isArray(value)) {
|
||||||
|
// Sort object keys, not values
|
||||||
|
result[key] = keysSorter(value);
|
||||||
|
} else {
|
||||||
|
result[key] = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}, Object.create(null));
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.extract = extract;
|
||||||
|
exports.parse = parse;
|
||||||
|
|
||||||
|
exports.stringify = (object, options) => {
|
||||||
|
if (!object) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
options = Object.assign({
|
||||||
|
encode: true,
|
||||||
|
strict: true,
|
||||||
|
arrayFormat: 'none',
|
||||||
|
arrayFormatSeparator: ','
|
||||||
|
}, options);
|
||||||
|
|
||||||
|
validateArrayFormatSeparator(options.arrayFormatSeparator);
|
||||||
|
|
||||||
|
const shouldFilter = key => (
|
||||||
|
(options.skipNull && isNullOrUndefined(object[key])) ||
|
||||||
|
(options.skipEmptyString && object[key] === '')
|
||||||
|
);
|
||||||
|
|
||||||
|
const formatter = encoderForArrayFormat(options);
|
||||||
|
|
||||||
|
const objectCopy = {};
|
||||||
|
|
||||||
|
for (const key of Object.keys(object)) {
|
||||||
|
if (!shouldFilter(key)) {
|
||||||
|
objectCopy[key] = object[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const keys = Object.keys(objectCopy);
|
||||||
|
|
||||||
|
if (options.sort !== false) {
|
||||||
|
keys.sort(options.sort);
|
||||||
|
}
|
||||||
|
|
||||||
|
return keys.map(key => {
|
||||||
|
const value = object[key];
|
||||||
|
|
||||||
|
if (value === undefined) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value === null) {
|
||||||
|
return encode(key, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
return value
|
||||||
|
.reduce(formatter(key), [])
|
||||||
|
.join('&');
|
||||||
|
}
|
||||||
|
|
||||||
|
return encode(key, options) + '=' + encode(value, options);
|
||||||
|
}).filter(x => x.length > 0).join('&');
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.parseUrl = (url, options) => {
|
||||||
|
options = Object.assign({
|
||||||
|
decode: true
|
||||||
|
}, options);
|
||||||
|
|
||||||
|
const [url_, hash] = splitOnFirst(url, '#');
|
||||||
|
|
||||||
|
return Object.assign(
|
||||||
|
{
|
||||||
|
url: url_.split('?')[0] || '',
|
||||||
|
query: parse(extract(url), options)
|
||||||
|
},
|
||||||
|
options && options.parseFragmentIdentifier && hash ? {fragmentIdentifier: decode(hash, options)} : {}
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.stringifyUrl = (object, options) => {
|
||||||
|
options = Object.assign({
|
||||||
|
encode: true,
|
||||||
|
strict: true
|
||||||
|
}, options);
|
||||||
|
|
||||||
|
const url = removeHash(object.url).split('?')[0] || '';
|
||||||
|
const queryFromUrl = exports.extract(object.url);
|
||||||
|
const parsedQueryFromUrl = exports.parse(queryFromUrl, {sort: false});
|
||||||
|
|
||||||
|
const query = Object.assign(parsedQueryFromUrl, object.query);
|
||||||
|
let queryString = exports.stringify(query, options);
|
||||||
|
if (queryString) {
|
||||||
|
queryString = `?${queryString}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
let hash = getHash(object.url);
|
||||||
|
if (object.fragmentIdentifier) {
|
||||||
|
hash = `#${encode(object.fragmentIdentifier, options)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return `${url}${queryString}${hash}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.pick = (input, filter, options) => {
|
||||||
|
options = Object.assign({
|
||||||
|
parseFragmentIdentifier: true
|
||||||
|
}, options);
|
||||||
|
|
||||||
|
const {url, query, fragmentIdentifier} = exports.parseUrl(input, options);
|
||||||
|
return exports.stringifyUrl({
|
||||||
|
url,
|
||||||
|
query: filterObject(query, filter),
|
||||||
|
fragmentIdentifier
|
||||||
|
}, options);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.exclude = (input, filter, options) => {
|
||||||
|
const exclusionFilter = Array.isArray(filter) ? key => !filter.includes(key) : (key, value) => !filter(key, value);
|
||||||
|
|
||||||
|
return exports.pick(input, exclusionFilter, options);
|
||||||
|
};
|
||||||
9
node_modules/query-string/license
generated
vendored
Normal file
9
node_modules/query-string/license
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (http://sindresorhus.com)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
54
node_modules/query-string/package.json
generated
vendored
Normal file
54
node_modules/query-string/package.json
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
{
|
||||||
|
"name": "query-string",
|
||||||
|
"version": "6.14.0",
|
||||||
|
"description": "Parse and stringify URL query strings",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "sindresorhus/query-string",
|
||||||
|
"funding": "https://github.com/sponsors/sindresorhus",
|
||||||
|
"author": {
|
||||||
|
"name": "Sindre Sorhus",
|
||||||
|
"email": "sindresorhus@gmail.com",
|
||||||
|
"url": "https://sindresorhus.com"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"benchmark": "node benchmark.js",
|
||||||
|
"test": "xo && ava && tsd"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js",
|
||||||
|
"index.d.ts"
|
||||||
|
],
|
||||||
|
"keywords": [
|
||||||
|
"browser",
|
||||||
|
"querystring",
|
||||||
|
"query",
|
||||||
|
"string",
|
||||||
|
"qs",
|
||||||
|
"param",
|
||||||
|
"parameter",
|
||||||
|
"url",
|
||||||
|
"parse",
|
||||||
|
"stringify",
|
||||||
|
"encode",
|
||||||
|
"decode",
|
||||||
|
"searchparams",
|
||||||
|
"filter"
|
||||||
|
],
|
||||||
|
"dependencies": {
|
||||||
|
"decode-uri-component": "^0.2.0",
|
||||||
|
"filter-obj": "^1.1.0",
|
||||||
|
"split-on-first": "^1.0.0",
|
||||||
|
"strict-uri-encode": "^2.0.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"ava": "^1.4.1",
|
||||||
|
"benchmark": "^2.1.4",
|
||||||
|
"deep-equal": "^1.0.1",
|
||||||
|
"fast-check": "^1.5.0",
|
||||||
|
"tsd": "^0.7.3",
|
||||||
|
"xo": "^0.24.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
527
node_modules/query-string/readme.md
generated
vendored
Normal file
527
node_modules/query-string/readme.md
generated
vendored
Normal file
@@ -0,0 +1,527 @@
|
|||||||
|
# query-string
|
||||||
|
|
||||||
|
> Parse and stringify URL [query strings](https://en.wikipedia.org/wiki/Query_string)
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
<p>
|
||||||
|
<p>
|
||||||
|
<sup>
|
||||||
|
<a href="https://github.com/sponsors/sindresorhus">My open source work is supported by the community</a>
|
||||||
|
</sup>
|
||||||
|
</p>
|
||||||
|
<sup>Special thanks to:</sup>
|
||||||
|
<br>
|
||||||
|
<br>
|
||||||
|
<a href="https://standardresume.co/tech">
|
||||||
|
<img src="https://sindresorhus.com/assets/thanks/standard-resume-logo.svg" width="200"/>
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install query-string
|
||||||
|
```
|
||||||
|
|
||||||
|
This module targets Node.js 6 or later and the latest version of Chrome, Firefox, and Safari. If you want support for older browsers, or, if your project is using create-react-app v1, use version 5: `npm install query-string@5`.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
console.log(location.search);
|
||||||
|
//=> '?foo=bar'
|
||||||
|
|
||||||
|
const parsed = queryString.parse(location.search);
|
||||||
|
console.log(parsed);
|
||||||
|
//=> {foo: 'bar'}
|
||||||
|
|
||||||
|
console.log(location.hash);
|
||||||
|
//=> '#token=bada55cafe'
|
||||||
|
|
||||||
|
const parsedHash = queryString.parse(location.hash);
|
||||||
|
console.log(parsedHash);
|
||||||
|
//=> {token: 'bada55cafe'}
|
||||||
|
|
||||||
|
parsed.foo = 'unicorn';
|
||||||
|
parsed.ilike = 'pizza';
|
||||||
|
|
||||||
|
const stringified = queryString.stringify(parsed);
|
||||||
|
//=> 'foo=unicorn&ilike=pizza'
|
||||||
|
|
||||||
|
location.search = stringified;
|
||||||
|
// note that `location.search` automatically prepends a question mark
|
||||||
|
console.log(location.search);
|
||||||
|
//=> '?foo=unicorn&ilike=pizza'
|
||||||
|
```
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
### .parse(string, options?)
|
||||||
|
|
||||||
|
Parse a query string into an object. Leading `?` or `#` are ignored, so you can pass `location.search` or `location.hash` directly.
|
||||||
|
|
||||||
|
The returned object is created with [`Object.create(null)`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/create) and thus does not have a `prototype`.
|
||||||
|
|
||||||
|
#### options
|
||||||
|
|
||||||
|
Type: `object`
|
||||||
|
|
||||||
|
##### decode
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
Decode the keys and values. URL components are decoded with [`decode-uri-component`](https://github.com/SamVerschueren/decode-uri-component).
|
||||||
|
|
||||||
|
##### arrayFormat
|
||||||
|
|
||||||
|
Type: `string`\
|
||||||
|
Default: `'none'`
|
||||||
|
|
||||||
|
- `'bracket'`: Parse arrays with bracket representation:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo[]=1&foo[]=2&foo[]=3', {arrayFormat: 'bracket'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `'index'`: Parse arrays with index representation:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo[0]=1&foo[1]=2&foo[3]=3', {arrayFormat: 'index'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `'comma'`: Parse arrays with elements separated by comma:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1,2,3', {arrayFormat: 'comma'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `'separator'`: Parse arrays with elements separated by a custom character:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1|2|3', {arrayFormat: 'separator', arrayFormatSeparator: '|'});
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `'none'`: Parse arrays with elements using duplicate keys:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1&foo=2&foo=3');
|
||||||
|
//=> {foo: ['1', '2', '3']}
|
||||||
|
```
|
||||||
|
|
||||||
|
##### arrayFormatSeparator
|
||||||
|
|
||||||
|
Type: `string`\
|
||||||
|
Default: `','`
|
||||||
|
|
||||||
|
The character used to separate array elements when using `{arrayFormat: 'separator'}`.
|
||||||
|
|
||||||
|
##### sort
|
||||||
|
|
||||||
|
Type: `Function | boolean`\
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
Supports both `Function` as a custom sorting function or `false` to disable sorting.
|
||||||
|
|
||||||
|
##### parseNumbers
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=1', {parseNumbers: true});
|
||||||
|
//=> {foo: 1}
|
||||||
|
```
|
||||||
|
|
||||||
|
Parse the value as a number type instead of string type if it's a number.
|
||||||
|
|
||||||
|
##### parseBooleans
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('foo=true', {parseBooleans: true});
|
||||||
|
//=> {foo: true}
|
||||||
|
```
|
||||||
|
|
||||||
|
Parse the value as a boolean type instead of string type if it's a boolean.
|
||||||
|
|
||||||
|
### .stringify(object, options?)
|
||||||
|
|
||||||
|
Stringify an object into a query string and sorting the keys.
|
||||||
|
|
||||||
|
#### options
|
||||||
|
|
||||||
|
Type: `object`
|
||||||
|
|
||||||
|
##### strict
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
Strictly encode URI components with [strict-uri-encode](https://github.com/kevva/strict-uri-encode). It uses [encodeURIComponent](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent) if set to false. You probably [don't care](https://github.com/sindresorhus/query-string/issues/42) about this option.
|
||||||
|
|
||||||
|
##### encode
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
[URL encode](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent) the keys and values.
|
||||||
|
|
||||||
|
##### arrayFormat
|
||||||
|
|
||||||
|
Type: `string`\
|
||||||
|
Default: `'none'`
|
||||||
|
|
||||||
|
- `'bracket'`: Serialize arrays using bracket representation:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'bracket'});
|
||||||
|
//=> 'foo[]=1&foo[]=2&foo[]=3'
|
||||||
|
```
|
||||||
|
|
||||||
|
- `'index'`: Serialize arrays using index representation:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'index'});
|
||||||
|
//=> 'foo[0]=1&foo[1]=2&foo[2]=3'
|
||||||
|
```
|
||||||
|
|
||||||
|
- `'comma'`: Serialize arrays by separating elements with comma:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'comma'});
|
||||||
|
//=> 'foo=1,2,3'
|
||||||
|
```
|
||||||
|
|
||||||
|
- `'none'`: Serialize arrays by using duplicate keys:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: [1, 2, 3]});
|
||||||
|
//=> 'foo=1&foo=2&foo=3'
|
||||||
|
```
|
||||||
|
|
||||||
|
##### arrayFormatSeparator
|
||||||
|
|
||||||
|
Type: `string`\
|
||||||
|
Default: `','`
|
||||||
|
|
||||||
|
The character used to separate array elements when using `{arrayFormat: 'separator'}`.
|
||||||
|
|
||||||
|
##### sort
|
||||||
|
|
||||||
|
Type: `Function | boolean`
|
||||||
|
|
||||||
|
Supports both `Function` as a custom sorting function or `false` to disable sorting.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
const order = ['c', 'a', 'b'];
|
||||||
|
|
||||||
|
queryString.stringify({a: 1, b: 2, c: 3}, {
|
||||||
|
sort: (a, b) => order.indexOf(a) - order.indexOf(b)
|
||||||
|
});
|
||||||
|
//=> 'c=3&a=1&b=2'
|
||||||
|
```
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({b: 1, c: 2, a: 3}, {sort: false});
|
||||||
|
//=> 'b=1&c=2&a=3'
|
||||||
|
```
|
||||||
|
|
||||||
|
If omitted, keys are sorted using `Array#sort()`, which means, converting them to strings and comparing strings in Unicode code point order.
|
||||||
|
|
||||||
|
##### skipNull
|
||||||
|
|
||||||
|
Skip keys with `null` as the value.
|
||||||
|
|
||||||
|
Note that keys with `undefined` as the value are always skipped.
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({a: 1, b: undefined, c: null, d: 4}, {
|
||||||
|
skipNull: true
|
||||||
|
});
|
||||||
|
//=> 'a=1&d=4'
|
||||||
|
```
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({a: undefined, b: null}, {
|
||||||
|
skipNull: true
|
||||||
|
});
|
||||||
|
//=> ''
|
||||||
|
```
|
||||||
|
|
||||||
|
##### skipEmptyString
|
||||||
|
|
||||||
|
Skip keys with an empty string as the value.
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({a: 1, b: '', c: '', d: 4}, {
|
||||||
|
skipEmptyString: true
|
||||||
|
});
|
||||||
|
//=> 'a=1&d=4'
|
||||||
|
```
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({a: '', b: ''}, {
|
||||||
|
skipEmptyString: true
|
||||||
|
});
|
||||||
|
//=> ''
|
||||||
|
```
|
||||||
|
|
||||||
|
### .extract(string)
|
||||||
|
|
||||||
|
Extract a query string from a URL that can be passed into `.parse()`.
|
||||||
|
|
||||||
|
Note: This behaviour can be changed with the `skipNull` option.
|
||||||
|
|
||||||
|
### .parseUrl(string, options?)
|
||||||
|
|
||||||
|
Extract the URL and the query string as an object.
|
||||||
|
|
||||||
|
Returns an object with a `url` and `query` property.
|
||||||
|
|
||||||
|
If the `parseFragmentIdentifier` option is `true`, the object will also contain a `fragmentIdentifier` property.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parseUrl('https://foo.bar?foo=bar');
|
||||||
|
//=> {url: 'https://foo.bar', query: {foo: 'bar'}}
|
||||||
|
|
||||||
|
queryString.parseUrl('https://foo.bar?foo=bar#xyz', {parseFragmentIdentifier: true});
|
||||||
|
//=> {url: 'https://foo.bar', query: {foo: 'bar'}, fragmentIdentifier: 'xyz'}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### options
|
||||||
|
|
||||||
|
Type: `object`
|
||||||
|
|
||||||
|
The options are the same as for `.parse()`.
|
||||||
|
|
||||||
|
Extra options are as below.
|
||||||
|
|
||||||
|
##### parseFragmentIdentifier
|
||||||
|
|
||||||
|
Parse the fragment identifier from the URL.
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parseUrl('https://foo.bar?foo=bar#xyz', {parseFragmentIdentifier: true});
|
||||||
|
//=> {url: 'https://foo.bar', query: {foo: 'bar'}, fragmentIdentifier: 'xyz'}
|
||||||
|
```
|
||||||
|
|
||||||
|
### .stringifyUrl(object, options?)
|
||||||
|
|
||||||
|
Stringify an object into a URL with a query string and sorting the keys. The inverse of [`.parseUrl()`](https://github.com/sindresorhus/query-string#parseurlstring-options)
|
||||||
|
|
||||||
|
The `options` are the same as for `.stringify()`.
|
||||||
|
|
||||||
|
Returns a string with the URL and a query string.
|
||||||
|
|
||||||
|
Query items in the `query` property overrides queries in the `url` property.
|
||||||
|
|
||||||
|
The `fragmentIdentifier` property overrides the fragment identifier in the `url` property.
|
||||||
|
|
||||||
|
```js
|
||||||
|
queryString.stringifyUrl({url: 'https://foo.bar', query: {foo: 'bar'}});
|
||||||
|
//=> 'https://foo.bar?foo=bar'
|
||||||
|
|
||||||
|
queryString.stringifyUrl({url: 'https://foo.bar?foo=baz', query: {foo: 'bar'}});
|
||||||
|
//=> 'https://foo.bar?foo=bar'
|
||||||
|
|
||||||
|
queryString.stringifyUrl({
|
||||||
|
url: 'https://foo.bar',
|
||||||
|
query: {
|
||||||
|
top: 'foo'
|
||||||
|
},
|
||||||
|
fragmentIdentifier: 'bar'
|
||||||
|
});
|
||||||
|
//=> 'https://foo.bar?top=foo#bar'
|
||||||
|
```
|
||||||
|
|
||||||
|
#### object
|
||||||
|
|
||||||
|
Type: `object`
|
||||||
|
|
||||||
|
##### url
|
||||||
|
|
||||||
|
Type: `string`
|
||||||
|
|
||||||
|
The URL to stringify.
|
||||||
|
|
||||||
|
##### query
|
||||||
|
|
||||||
|
Type: `object`
|
||||||
|
|
||||||
|
Query items to add to the URL.
|
||||||
|
|
||||||
|
### .pick(url, keys, options?)
|
||||||
|
### .pick(url, filter, options?)
|
||||||
|
|
||||||
|
Pick query parameters from a URL.
|
||||||
|
|
||||||
|
Returns a string with the new URL.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.pick('https://foo.bar?foo=1&bar=2#hello', ['foo']);
|
||||||
|
//=> 'https://foo.bar?foo=1#hello'
|
||||||
|
|
||||||
|
queryString.pick('https://foo.bar?foo=1&bar=2#hello', (name, value) => value === 2, {parseNumbers: true});
|
||||||
|
//=> 'https://foo.bar?bar=2#hello'
|
||||||
|
```
|
||||||
|
|
||||||
|
### .exclude(url, keys, options?)
|
||||||
|
### .exclude(url, filter, options?)
|
||||||
|
|
||||||
|
Exclude query parameters from a URL.
|
||||||
|
|
||||||
|
Returns a string with the new URL.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.exclude('https://foo.bar?foo=1&bar=2#hello', ['foo']);
|
||||||
|
//=> 'https://foo.bar?bar=2#hello'
|
||||||
|
|
||||||
|
queryString.exclude('https://foo.bar?foo=1&bar=2#hello', (name, value) => value === 2, {parseNumbers: true});
|
||||||
|
//=> 'https://foo.bar?foo=1#hello'
|
||||||
|
```
|
||||||
|
|
||||||
|
#### url
|
||||||
|
|
||||||
|
Type: `string`
|
||||||
|
|
||||||
|
The URL containing the query parameters to filter.
|
||||||
|
|
||||||
|
#### keys
|
||||||
|
|
||||||
|
Type: `string[]`
|
||||||
|
|
||||||
|
The names of the query parameters to filter based on the function used.
|
||||||
|
|
||||||
|
#### filter
|
||||||
|
|
||||||
|
Type: `(key, value) => boolean`
|
||||||
|
|
||||||
|
A filter predicate that will be provided the name of each query parameter and its value. The `parseNumbers` and `parseBooleans` options also affect `value`.
|
||||||
|
|
||||||
|
#### options
|
||||||
|
|
||||||
|
Type: `object`
|
||||||
|
|
||||||
|
[Parse options](#options) and [stringify options](#options-1).
|
||||||
|
|
||||||
|
## Nesting
|
||||||
|
|
||||||
|
This module intentionally doesn't support nesting as it's not spec'd and varies between implementations, which causes a lot of [edge cases](https://github.com/visionmedia/node-querystring/issues).
|
||||||
|
|
||||||
|
You're much better off just converting the object to a JSON string:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({
|
||||||
|
foo: 'bar',
|
||||||
|
nested: JSON.stringify({
|
||||||
|
unicorn: 'cake'
|
||||||
|
})
|
||||||
|
});
|
||||||
|
//=> 'foo=bar&nested=%7B%22unicorn%22%3A%22cake%22%7D'
|
||||||
|
```
|
||||||
|
|
||||||
|
However, there is support for multiple instances of the same key:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.parse('likes=cake&name=bob&likes=icecream');
|
||||||
|
//=> {likes: ['cake', 'icecream'], name: 'bob'}
|
||||||
|
|
||||||
|
queryString.stringify({color: ['taupe', 'chartreuse'], id: '515'});
|
||||||
|
//=> 'color=taupe&color=chartreuse&id=515'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Falsy values
|
||||||
|
|
||||||
|
Sometimes you want to unset a key, or maybe just make it present without assigning a value to it. Here is how falsy values are stringified:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryString = require('query-string');
|
||||||
|
|
||||||
|
queryString.stringify({foo: false});
|
||||||
|
//=> 'foo=false'
|
||||||
|
|
||||||
|
queryString.stringify({foo: null});
|
||||||
|
//=> 'foo'
|
||||||
|
|
||||||
|
queryString.stringify({foo: undefined});
|
||||||
|
//=> ''
|
||||||
|
```
|
||||||
|
|
||||||
|
## query-string for enterprise
|
||||||
|
|
||||||
|
Available as part of the Tidelift Subscription.
|
||||||
|
|
||||||
|
The maintainers of query-string and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-query-string?utm_source=npm-query-string&utm_medium=referral&utm_campaign=enterprise&utm_term=repo)
|
||||||
29
node_modules/split-on-first/index.d.ts
generated
vendored
Normal file
29
node_modules/split-on-first/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
/**
|
||||||
|
Split a string on the first occurrence of a given separator.
|
||||||
|
|
||||||
|
@param string - The string to split.
|
||||||
|
@param separator - The separator to split on.
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import splitOnFirst = require('split-on-first');
|
||||||
|
|
||||||
|
splitOnFirst('a-b-c', '-');
|
||||||
|
//=> ['a', 'b-c']
|
||||||
|
|
||||||
|
splitOnFirst('key:value:value2', ':');
|
||||||
|
//=> ['key', 'value:value2']
|
||||||
|
|
||||||
|
splitOnFirst('a---b---c', '---');
|
||||||
|
//=> ['a', 'b---c']
|
||||||
|
|
||||||
|
splitOnFirst('a-b-c', '+');
|
||||||
|
//=> ['a-b-c']
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
declare function splitOnFirst(
|
||||||
|
string: string,
|
||||||
|
separator: string
|
||||||
|
): [string, string?];
|
||||||
|
|
||||||
|
export = splitOnFirst;
|
||||||
22
node_modules/split-on-first/index.js
generated
vendored
Normal file
22
node_modules/split-on-first/index.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = (string, separator) => {
|
||||||
|
if (!(typeof string === 'string' && typeof separator === 'string')) {
|
||||||
|
throw new TypeError('Expected the arguments to be of type `string`');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (separator === '') {
|
||||||
|
return [string];
|
||||||
|
}
|
||||||
|
|
||||||
|
const separatorIndex = string.indexOf(separator);
|
||||||
|
|
||||||
|
if (separatorIndex === -1) {
|
||||||
|
return [string];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [
|
||||||
|
string.slice(0, separatorIndex),
|
||||||
|
string.slice(separatorIndex + separator.length)
|
||||||
|
];
|
||||||
|
};
|
||||||
9
node_modules/split-on-first/license
generated
vendored
Normal file
9
node_modules/split-on-first/license
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
36
node_modules/split-on-first/package.json
generated
vendored
Normal file
36
node_modules/split-on-first/package.json
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
{
|
||||||
|
"name": "split-on-first",
|
||||||
|
"version": "1.1.0",
|
||||||
|
"description": "Split a string on the first occurance of a given separator",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "sindresorhus/split-on-first",
|
||||||
|
"author": {
|
||||||
|
"name": "Sindre Sorhus",
|
||||||
|
"email": "sindresorhus@gmail.com",
|
||||||
|
"url": "sindresorhus.com"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "xo && ava && tsd"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js",
|
||||||
|
"index.d.ts"
|
||||||
|
],
|
||||||
|
"keywords": [
|
||||||
|
"split",
|
||||||
|
"string",
|
||||||
|
"first",
|
||||||
|
"occurrence",
|
||||||
|
"separator",
|
||||||
|
"delimiter",
|
||||||
|
"text"
|
||||||
|
],
|
||||||
|
"devDependencies": {
|
||||||
|
"ava": "^1.4.1",
|
||||||
|
"tsd": "^0.7.2",
|
||||||
|
"xo": "^0.24.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
58
node_modules/split-on-first/readme.md
generated
vendored
Normal file
58
node_modules/split-on-first/readme.md
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
# split-on-first [](https://travis-ci.com/sindresorhus/split-on-first)
|
||||||
|
|
||||||
|
> Split a string on the first occurrence of a given separator
|
||||||
|
|
||||||
|
This is similar to [`String#split()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/split), but that one splits on all the occurrences, not just the first one.
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install split-on-first
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
const splitOnFirst = require('split-on-first');
|
||||||
|
|
||||||
|
splitOnFirst('a-b-c', '-');
|
||||||
|
//=> ['a', 'b-c']
|
||||||
|
|
||||||
|
splitOnFirst('key:value:value2', ':');
|
||||||
|
//=> ['key', 'value:value2']
|
||||||
|
|
||||||
|
splitOnFirst('a---b---c', '---');
|
||||||
|
//=> ['a', 'b---c']
|
||||||
|
|
||||||
|
splitOnFirst('a-b-c', '+');
|
||||||
|
//=> ['a-b-c']
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
### splitOnFirst(string, separator)
|
||||||
|
|
||||||
|
#### string
|
||||||
|
|
||||||
|
Type: `string`
|
||||||
|
|
||||||
|
The string to split.
|
||||||
|
|
||||||
|
#### separator
|
||||||
|
|
||||||
|
Type: `string`
|
||||||
|
|
||||||
|
The separator to split on.
|
||||||
|
|
||||||
|
|
||||||
|
## Related
|
||||||
|
|
||||||
|
- [split-at](https://github.com/sindresorhus/split-at) - Split a string at one or more indices
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||||
2
node_modules/strict-uri-encode/index.js
generated
vendored
Normal file
2
node_modules/strict-uri-encode/index.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
'use strict';
|
||||||
|
module.exports = str => encodeURIComponent(str).replace(/[!'()*]/g, x => `%${x.charCodeAt(0).toString(16).toUpperCase()}`);
|
||||||
21
node_modules/strict-uri-encode/license
generated
vendored
Normal file
21
node_modules/strict-uri-encode/license
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) Kevin Martensson <kevinmartensson@gmail.com> (github.com/kevva)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
31
node_modules/strict-uri-encode/package.json
generated
vendored
Normal file
31
node_modules/strict-uri-encode/package.json
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
{
|
||||||
|
"name": "strict-uri-encode",
|
||||||
|
"version": "2.0.0",
|
||||||
|
"description": "A stricter URI encode adhering to RFC 3986",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "kevva/strict-uri-encode",
|
||||||
|
"author": {
|
||||||
|
"name": "Kevin Mårtensson",
|
||||||
|
"email": "kevinmartensson@gmail.com",
|
||||||
|
"url": "github.com/kevva"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "xo && ava"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"keywords": [
|
||||||
|
"component",
|
||||||
|
"encode",
|
||||||
|
"RFC3986",
|
||||||
|
"uri"
|
||||||
|
],
|
||||||
|
"devDependencies": {
|
||||||
|
"ava": "*",
|
||||||
|
"xo": "*"
|
||||||
|
}
|
||||||
|
}
|
||||||
39
node_modules/strict-uri-encode/readme.md
generated
vendored
Normal file
39
node_modules/strict-uri-encode/readme.md
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# strict-uri-encode [](https://travis-ci.org/kevva/strict-uri-encode)
|
||||||
|
|
||||||
|
> A stricter URI encode adhering to [RFC 3986](http://tools.ietf.org/html/rfc3986)
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install --save strict-uri-encode
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
const strictUriEncode = require('strict-uri-encode');
|
||||||
|
|
||||||
|
strictUriEncode('unicorn!foobar');
|
||||||
|
//=> 'unicorn%21foobar'
|
||||||
|
|
||||||
|
strictUriEncode('unicorn*foobar');
|
||||||
|
//=> 'unicorn%2Afoobar'
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
### strictUriEncode(string)
|
||||||
|
|
||||||
|
#### string
|
||||||
|
|
||||||
|
Type: `string`, `number`
|
||||||
|
|
||||||
|
String to URI encode.
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT © [Kevin Mårtensson](http://github.com/kevva)
|
||||||
7
node_modules/y18n/CHANGELOG.md
generated
vendored
7
node_modules/y18n/CHANGELOG.md
generated
vendored
@@ -2,6 +2,13 @@
|
|||||||
|
|
||||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||||
|
|
||||||
|
|
||||||
|
### 4.0.1 (2020-11-30)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* address prototype pollution issue ([#108](https://www.github.com/yargs/y18n/issues/108)) ([a9ac604](https://www.github.com/yargs/y18n/commit/a9ac604abf756dec9687be3843e2c93bfe581f25))
|
||||||
|
|
||||||
<a name="4.0.0"></a>
|
<a name="4.0.0"></a>
|
||||||
# [4.0.0](https://github.com/yargs/y18n/compare/v3.2.1...v4.0.0) (2017-10-10)
|
# [4.0.0](https://github.com/yargs/y18n/compare/v3.2.1...v4.0.0) (2017-10-10)
|
||||||
|
|
||||||
|
|||||||
2
node_modules/y18n/index.js
generated
vendored
2
node_modules/y18n/index.js
generated
vendored
@@ -11,7 +11,7 @@ function Y18N (opts) {
|
|||||||
this.fallbackToLanguage = typeof opts.fallbackToLanguage === 'boolean' ? opts.fallbackToLanguage : true
|
this.fallbackToLanguage = typeof opts.fallbackToLanguage === 'boolean' ? opts.fallbackToLanguage : true
|
||||||
|
|
||||||
// internal stuff.
|
// internal stuff.
|
||||||
this.cache = {}
|
this.cache = Object.create(null)
|
||||||
this.writeQueue = []
|
this.writeQueue = []
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
2
node_modules/y18n/package.json
generated
vendored
2
node_modules/y18n/package.json
generated
vendored
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "y18n",
|
"name": "y18n",
|
||||||
"version": "4.0.0",
|
"version": "4.0.1",
|
||||||
"description": "the bare-bones internationalization library used by yargs",
|
"description": "the bare-bones internationalization library used by yargs",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
|||||||
46
package-lock.json
generated
46
package-lock.json
generated
@@ -1348,6 +1348,11 @@
|
|||||||
"integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=",
|
"integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"decode-uri-component": {
|
||||||
|
"version": "0.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.0.tgz",
|
||||||
|
"integrity": "sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU="
|
||||||
|
},
|
||||||
"decompress-response": {
|
"decompress-response": {
|
||||||
"version": "3.3.0",
|
"version": "3.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz",
|
||||||
@@ -2077,8 +2082,7 @@
|
|||||||
"fast-deep-equal": {
|
"fast-deep-equal": {
|
||||||
"version": "3.1.3",
|
"version": "3.1.3",
|
||||||
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
||||||
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
|
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"fast-diff": {
|
"fast-diff": {
|
||||||
"version": "1.2.0",
|
"version": "1.2.0",
|
||||||
@@ -2153,6 +2157,11 @@
|
|||||||
"to-regex-range": "^5.0.1"
|
"to-regex-range": "^5.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"filter-obj": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-1.1.0.tgz",
|
||||||
|
"integrity": "sha1-mzERErxsYSehbgFsbF1/GeCAXFs="
|
||||||
|
},
|
||||||
"find-up": {
|
"find-up": {
|
||||||
"version": "4.1.0",
|
"version": "4.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
|
||||||
@@ -2435,9 +2444,9 @@
|
|||||||
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4="
|
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4="
|
||||||
},
|
},
|
||||||
"ini": {
|
"ini": {
|
||||||
"version": "1.3.5",
|
"version": "1.3.8",
|
||||||
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz",
|
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
|
||||||
"integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==",
|
"integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"irregular-plurals": {
|
"irregular-plurals": {
|
||||||
@@ -3518,6 +3527,17 @@
|
|||||||
"escape-goat": "^2.0.0"
|
"escape-goat": "^2.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"query-string": {
|
||||||
|
"version": "6.14.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/query-string/-/query-string-6.14.0.tgz",
|
||||||
|
"integrity": "sha512-In3o+lUxlgejoVJgwEdYtdxrmlL0cQWJXj0+kkI7RWVo7hg5AhFtybeKlC9Dpgbr8eOC4ydpEh8017WwyfzqVQ==",
|
||||||
|
"requires": {
|
||||||
|
"decode-uri-component": "^0.2.0",
|
||||||
|
"filter-obj": "^1.1.0",
|
||||||
|
"split-on-first": "^1.0.0",
|
||||||
|
"strict-uri-encode": "^2.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"rc": {
|
"rc": {
|
||||||
"version": "1.2.8",
|
"version": "1.2.8",
|
||||||
"resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz",
|
"resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz",
|
||||||
@@ -3957,11 +3977,21 @@
|
|||||||
"integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==",
|
"integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"split-on-first": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw=="
|
||||||
|
},
|
||||||
"sprintf-js": {
|
"sprintf-js": {
|
||||||
"version": "1.0.3",
|
"version": "1.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
|
||||||
"integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw="
|
"integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw="
|
||||||
},
|
},
|
||||||
|
"strict-uri-encode": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz",
|
||||||
|
"integrity": "sha1-ucczDHBChi9rFC3CdLvMWGbONUY="
|
||||||
|
},
|
||||||
"string-width": {
|
"string-width": {
|
||||||
"version": "4.2.0",
|
"version": "4.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz",
|
||||||
@@ -4476,9 +4506,9 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"y18n": {
|
"y18n": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz",
|
||||||
"integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==",
|
"integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"yargs": {
|
"yargs": {
|
||||||
|
|||||||
@@ -29,6 +29,7 @@
|
|||||||
"@octokit/types": "^5.5.0",
|
"@octokit/types": "^5.5.0",
|
||||||
"commander": "^6.0.0",
|
"commander": "^6.0.0",
|
||||||
"console-log-level": "^1.4.1",
|
"console-log-level": "^1.4.1",
|
||||||
|
"fast-deep-equal": "^3.1.3",
|
||||||
"file-url": "^3.0.0",
|
"file-url": "^3.0.0",
|
||||||
"fs": "0.0.1-security",
|
"fs": "0.0.1-security",
|
||||||
"js-yaml": "^3.13.1",
|
"js-yaml": "^3.13.1",
|
||||||
@@ -36,6 +37,7 @@
|
|||||||
"long": "^4.0.0",
|
"long": "^4.0.0",
|
||||||
"md5": "^2.2.1",
|
"md5": "^2.2.1",
|
||||||
"path": "^0.12.7",
|
"path": "^0.12.7",
|
||||||
|
"query-string": "^6.14.0",
|
||||||
"semver": "^7.3.2",
|
"semver": "^7.3.2",
|
||||||
"uuid": "^8.3.0",
|
"uuid": "^8.3.0",
|
||||||
"zlib": "^1.0.5"
|
"zlib": "^1.0.5"
|
||||||
|
|||||||
@@ -31,8 +31,7 @@ python3 -m pip install --user pipenv
|
|||||||
if command -v python2 &> /dev/null; then
|
if command -v python2 &> /dev/null; then
|
||||||
# Setup Python 2 dependency installation tools.
|
# Setup Python 2 dependency installation tools.
|
||||||
# The Ubuntu 20.04 GHA environment does not come with a Python 2 pip
|
# The Ubuntu 20.04 GHA environment does not come with a Python 2 pip
|
||||||
curl https://bootstrap.pypa.io/get-pip.py --output get-pip.py
|
curl --location --fail https://bootstrap.pypa.io/pip/2.7/get-pip.py | python2
|
||||||
python2 get-pip.py
|
|
||||||
|
|
||||||
python2 -m pip install --user --upgrade pip setuptools wheel
|
python2 -m pip install --user --upgrade pip setuptools wheel
|
||||||
|
|
||||||
|
|||||||
@@ -19,6 +19,21 @@ predicate isSafeActionLib(string lib) {
|
|||||||
lib.matches("@actions/exec/%")
|
lib.matches("@actions/exec/%")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Matches libraries that are not always safe to use outside of actions
|
||||||
|
* but can be made so by setting certain environment variables.
|
||||||
|
*/
|
||||||
|
predicate isSafeActionLibWithActionsEnvVars(string lib) {
|
||||||
|
lib = "@actions/tool-cache"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Matches the names of runner commands that set action env vars
|
||||||
|
*/
|
||||||
|
predicate commandSetsActionsEnvVars(string commandName) {
|
||||||
|
commandName = "init" or commandName = "autobuild" or commandName = "analyze"
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An import from a library that is meant for GitHub Actions and
|
* An import from a library that is meant for GitHub Actions and
|
||||||
* we do not want to be using outside of actions.
|
* we do not want to be using outside of actions.
|
||||||
@@ -45,6 +60,32 @@ class RunnerEntrypoint extends Function {
|
|||||||
RunnerEntrypoint() {
|
RunnerEntrypoint() {
|
||||||
getFile().getAbsolutePath().matches("%/runner.ts")
|
getFile().getAbsolutePath().matches("%/runner.ts")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Does this runner entry point set the RUNNER_TEMP and
|
||||||
|
* RUNNER_TOOL_CACHE env vars which make some actions libraries
|
||||||
|
* safe to use outside of actions.
|
||||||
|
* See "setupActionsVars" in "util.ts".
|
||||||
|
*/
|
||||||
|
predicate setsActionsEnvVars() {
|
||||||
|
// This is matching code of the following format, where "this"
|
||||||
|
// is the function being passed to the "action" method.
|
||||||
|
//
|
||||||
|
// program
|
||||||
|
// .command("init")
|
||||||
|
// ...
|
||||||
|
// .action(async (cmd: InitArgs) => {
|
||||||
|
// ...
|
||||||
|
// })
|
||||||
|
exists(MethodCallExpr actionCall,
|
||||||
|
MethodCallExpr commandCall |
|
||||||
|
commandCall.getMethodName() = "command" and
|
||||||
|
commandCall.getReceiver().(VarAccess).getVariable().getName() = "program" and
|
||||||
|
commandSetsActionsEnvVars(commandCall.getArgument(0).(StringLiteral).getValue()) and
|
||||||
|
actionCall.getMethodName() = "action" and
|
||||||
|
actionCall.getReceiver().getAChildExpr*() = commandCall and
|
||||||
|
actionCall.getArgument(0).getAChildExpr*() = this)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -114,6 +155,7 @@ Function calledBy(Function f) {
|
|||||||
from VarAccess v, ActionsLibImport actionsLib, RunnerEntrypoint runnerEntry
|
from VarAccess v, ActionsLibImport actionsLib, RunnerEntrypoint runnerEntry
|
||||||
where actionsLib.getAProvidedVariable() = v.getVariable()
|
where actionsLib.getAProvidedVariable() = v.getVariable()
|
||||||
and getAFunctionChildExpr(calledBy*(runnerEntry)) = v
|
and getAFunctionChildExpr(calledBy*(runnerEntry)) = v
|
||||||
|
and not (isSafeActionLibWithActionsEnvVars(actionsLib.getName()) and runnerEntry.setsActionsEnvVars())
|
||||||
select v, "$@ is imported from $@ and this code can be called from $@",
|
select v, "$@ is imported from $@ and this code can be called from $@",
|
||||||
v, v.getName(),
|
v, v.getName(),
|
||||||
actionsLib, actionsLib.getName(),
|
actionsLib, actionsLib.getName(),
|
||||||
|
|||||||
36
runner/package-lock.json
generated
36
runner/package-lock.json
generated
@@ -1194,24 +1194,24 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"elliptic": {
|
"elliptic": {
|
||||||
"version": "6.5.3",
|
"version": "6.5.4",
|
||||||
"resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.3.tgz",
|
"resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz",
|
||||||
"integrity": "sha512-IMqzv5wNQf+E6aHeIqATs0tOLeOTwj1QKbRcS3jBbYkl5oLAserA8yJTT7/VyHUYG91PRmPyeQDObKLPpeS4dw==",
|
"integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"requires": {
|
"requires": {
|
||||||
"bn.js": "^4.4.0",
|
"bn.js": "^4.11.9",
|
||||||
"brorand": "^1.0.1",
|
"brorand": "^1.1.0",
|
||||||
"hash.js": "^1.0.0",
|
"hash.js": "^1.0.0",
|
||||||
"hmac-drbg": "^1.0.0",
|
"hmac-drbg": "^1.0.1",
|
||||||
"inherits": "^2.0.1",
|
"inherits": "^2.0.4",
|
||||||
"minimalistic-assert": "^1.0.0",
|
"minimalistic-assert": "^1.0.1",
|
||||||
"minimalistic-crypto-utils": "^1.0.0"
|
"minimalistic-crypto-utils": "^1.0.1"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"bn.js": {
|
"bn.js": {
|
||||||
"version": "4.11.9",
|
"version": "4.12.0",
|
||||||
"resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz",
|
"resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
|
||||||
"integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==",
|
"integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==",
|
||||||
"dev": true
|
"dev": true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2072,9 +2072,9 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"ini": {
|
"ini": {
|
||||||
"version": "1.3.5",
|
"version": "1.3.8",
|
||||||
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz",
|
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
|
||||||
"integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==",
|
"integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"interpret": {
|
"interpret": {
|
||||||
@@ -4684,9 +4684,9 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"y18n": {
|
"y18n": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz",
|
||||||
"integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==",
|
"integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"yallist": {
|
"yallist": {
|
||||||
|
|||||||
@@ -1,9 +1,17 @@
|
|||||||
import test from "ava";
|
import test from "ava";
|
||||||
|
import * as yaml from "js-yaml";
|
||||||
import sinon from "sinon";
|
import sinon from "sinon";
|
||||||
|
|
||||||
import * as actionsutil from "./actions-util";
|
import * as actionsutil from "./actions-util";
|
||||||
import { setupTests } from "./testing-utils";
|
import { setupTests } from "./testing-utils";
|
||||||
|
|
||||||
|
function errorCodes(
|
||||||
|
actual: actionsutil.CodedError[],
|
||||||
|
expected: actionsutil.CodedError[]
|
||||||
|
): [string[], string[]] {
|
||||||
|
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
||||||
|
}
|
||||||
|
|
||||||
setupTests(test);
|
setupTests(test);
|
||||||
|
|
||||||
test("getRef() throws on the empty string", async (t) => {
|
test("getRef() throws on the empty string", async (t) => {
|
||||||
@@ -17,20 +25,40 @@ test("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t)
|
|||||||
process.env["GITHUB_REF"] = expectedRef;
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
process.env["GITHUB_SHA"] = currentSha;
|
process.env["GITHUB_SHA"] = currentSha;
|
||||||
|
|
||||||
sinon.stub(actionsutil, "getCommitOid").resolves(currentSha);
|
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||||
|
callback.withArgs("HEAD").resolves(currentSha);
|
||||||
|
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsutil.getRef();
|
||||||
t.deepEqual(actualRef, expectedRef);
|
t.deepEqual(actualRef, expectedRef);
|
||||||
|
callback.restore();
|
||||||
});
|
});
|
||||||
|
|
||||||
test("getRef() returns head PR ref if GITHUB_SHA not currently checked out", async (t) => {
|
test("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
||||||
|
const expectedRef = "refs/pull/1/merge";
|
||||||
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
|
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||||
|
const sha = "a".repeat(40);
|
||||||
|
|
||||||
|
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||||
|
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
||||||
|
callback.withArgs("HEAD").resolves(sha);
|
||||||
|
|
||||||
|
const actualRef = await actionsutil.getRef();
|
||||||
|
t.deepEqual(actualRef, expectedRef);
|
||||||
|
callback.restore();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
||||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||||
|
|
||||||
sinon.stub(actionsutil, "getCommitOid").resolves("b".repeat(40));
|
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||||
|
callback.withArgs("refs/pull/1/merge").resolves("a".repeat(40));
|
||||||
|
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||||
|
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsutil.getRef();
|
||||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||||
|
callback.restore();
|
||||||
});
|
});
|
||||||
|
|
||||||
test("getAnalysisKey() when a local run", async (t) => {
|
test("getAnalysisKey() when a local run", async (t) => {
|
||||||
@@ -82,265 +110,302 @@ test("prepareEnvironment() when a local run", (t) => {
|
|||||||
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
|
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on is missing", (t) => {
|
test("getWorkflowErrors() when on is empty", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({});
|
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingHooks]);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is missing", (t) => {
|
test("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({ on: {} });
|
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
||||||
|
|
||||||
console.log(errors);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingHooks]);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is an array missing pull_request", (t) => {
|
test("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({ on: ["push"] });
|
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingPullRequestHook]);
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook])
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is an array missing push", (t) => {
|
test("getWorkflowErrors() when on.push is valid", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({ on: ["pull_request"] });
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MissingPushHook]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is valid", (t) => {
|
|
||||||
const errors = actionsutil.validateWorkflow({
|
|
||||||
on: ["push", "pull_request"],
|
on: ["push", "pull_request"],
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, []);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is a valid superset", (t) => {
|
test("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: ["push", "pull_request", "schedule"],
|
on: ["push", "pull_request", "schedule"],
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, []);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push should not have a path", (t) => {
|
test("getWorkflowErrors() when on.push should not have a path", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"], paths: ["test/*"] },
|
push: { branches: ["main"], paths: ["test/*"] },
|
||||||
pull_request: { branches: ["main"] },
|
pull_request: { branches: ["main"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.PathsSpecified]);
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified])
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is a correct object", (t) => {
|
test("getWorkflowErrors() when on.push is a correct object", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, []);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.pull_requests is a string", (t) => {
|
test("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.pull_requests is a string and correct", (t) => {
|
test("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, []);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is correct with empty objects", (t) => {
|
test("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors(
|
||||||
on: { push: undefined, pull_request: undefined },
|
yaml.safeLoad(`
|
||||||
});
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
`)
|
||||||
|
);
|
||||||
|
|
||||||
t.deepEqual(errors, []);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is mismatched", (t) => {
|
test("getWorkflowErrors() when on.push is mismatched", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"] },
|
push: { branches: ["main"] },
|
||||||
pull_request: { branches: ["feature"] },
|
pull_request: { branches: ["feature"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is not mismatched", (t) => {
|
test("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main", "feature"] },
|
push: { branches: ["main", "feature"] },
|
||||||
pull_request: { branches: ["main"] },
|
pull_request: { branches: ["main"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, []);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.push is mismatched for pull_request", (t) => {
|
test("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"] },
|
push: { branches: ["main"] },
|
||||||
pull_request: { branches: ["main", "feature"] },
|
pull_request: { branches: ["main", "feature"] },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() for a range of malformed workflows", (t) => {
|
test("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
||||||
t.deepEqual(
|
t.deepEqual(
|
||||||
actionsutil.validateWorkflow({
|
...errorCodes(
|
||||||
on: {
|
actionsutil.getWorkflowErrors({
|
||||||
push: 1,
|
on: {
|
||||||
pull_request: 1,
|
push: 1,
|
||||||
},
|
pull_request: 1,
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
actionsutil.validateWorkflow({
|
|
||||||
on: 1,
|
|
||||||
} as any),
|
|
||||||
[actionsutil.WorkflowErrors.MissingHooks]
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
actionsutil.validateWorkflow({
|
|
||||||
on: 1,
|
|
||||||
jobs: 1,
|
|
||||||
} as any),
|
|
||||||
[actionsutil.WorkflowErrors.MissingHooks]
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
actionsutil.validateWorkflow({
|
|
||||||
on: 1,
|
|
||||||
jobs: [1],
|
|
||||||
} as any),
|
|
||||||
[actionsutil.WorkflowErrors.MissingHooks]
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
actionsutil.validateWorkflow({
|
|
||||||
on: 1,
|
|
||||||
jobs: { 1: 1 },
|
|
||||||
} as any),
|
|
||||||
[actionsutil.WorkflowErrors.MissingHooks]
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
actionsutil.validateWorkflow({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: 1 },
|
|
||||||
} as any),
|
|
||||||
[actionsutil.WorkflowErrors.MissingHooks]
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
actionsutil.validateWorkflow({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: [1] },
|
|
||||||
} as any),
|
|
||||||
[actionsutil.WorkflowErrors.MissingHooks]
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
actionsutil.validateWorkflow({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: { steps: 1 } },
|
|
||||||
} as any),
|
|
||||||
[actionsutil.WorkflowErrors.MissingHooks]
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
actionsutil.validateWorkflow({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
|
||||||
} as any),
|
|
||||||
[actionsutil.WorkflowErrors.MissingHooks]
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
actionsutil.validateWorkflow({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: [undefined] },
|
|
||||||
} as any),
|
|
||||||
[actionsutil.WorkflowErrors.MissingHooks]
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(actionsutil.validateWorkflow(1 as any), [
|
|
||||||
actionsutil.WorkflowErrors.MissingHooks,
|
|
||||||
]);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
actionsutil.validateWorkflow({
|
|
||||||
on: {
|
|
||||||
push: {
|
|
||||||
branches: 1,
|
|
||||||
},
|
},
|
||||||
pull_request: {
|
} as any),
|
||||||
branches: 1,
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: 1,
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: [1],
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { 1: 1 },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: 1 },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: [1] },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: { steps: 1 } },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: [undefined] },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1 as any), []));
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: {
|
||||||
|
branches: 1,
|
||||||
|
},
|
||||||
|
pull_request: {
|
||||||
|
branches: 1,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
} as any),
|
||||||
} as any),
|
[]
|
||||||
[]
|
)
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.pull_request for every branch but push specifies branches", (t) => {
|
test("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors(
|
||||||
on: {
|
yaml.safeLoad(`
|
||||||
push: { branches: ["main"] },
|
name: "CodeQL"
|
||||||
pull_request: null,
|
on:
|
||||||
},
|
push:
|
||||||
});
|
branches: ["main"]
|
||||||
|
pull_request:
|
||||||
|
`)
|
||||||
|
);
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.pull_request for wildcard branches", (t) => {
|
test("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["feature/*"] },
|
push: { branches: ["feature/*"] },
|
||||||
pull_request: { branches: "feature/moose" },
|
pull_request: { branches: "feature/moose" },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, []);
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when on.pull_request for mismatched wildcard branches", (t) => {
|
test("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["feature/moose"] },
|
push: { branches: ["feature/moose"] },
|
||||||
pull_request: { branches: "feature/*" },
|
pull_request: { branches: "feature/*" },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.MismatchedBranches]);
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateWorkflow() when HEAD^2 is checked out", (t) => {
|
test("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
||||||
const errors = actionsutil.validateWorkflow({
|
process.env.GITHUB_JOB = "test";
|
||||||
|
|
||||||
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: ["push", "pull_request"],
|
on: ["push", "pull_request"],
|
||||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
||||||
});
|
});
|
||||||
|
|
||||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]);
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead])
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("formatWorkflowErrors() when there is one error", (t) => {
|
test("formatWorkflowErrors() when there is one error", (t) => {
|
||||||
@@ -358,6 +423,12 @@ test("formatWorkflowErrors() when there are multiple errors", (t) => {
|
|||||||
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("formatWorkflowCause() with no errors", (t) => {
|
||||||
|
const message = actionsutil.formatWorkflowCause([]);
|
||||||
|
|
||||||
|
t.deepEqual(message, undefined);
|
||||||
|
});
|
||||||
|
|
||||||
test("formatWorkflowCause()", (t) => {
|
test("formatWorkflowCause()", (t) => {
|
||||||
const message = actionsutil.formatWorkflowCause([
|
const message = actionsutil.formatWorkflowCause([
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||||
@@ -400,3 +471,171 @@ test("patternIsSuperset()", (t) => {
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when branches contain dots", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [4.1, master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [4.1, master]
|
||||||
|
`)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master, ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
`)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test";
|
||||||
|
|
||||||
|
const errors = actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead])
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test3";
|
||||||
|
|
||||||
|
const errors = actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on is missing", (t) => {
|
||||||
|
const errors = actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
`)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() with a different on setup", (t) => {
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: "workflow_dispatch"
|
||||||
|
`)
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: [workflow_dispatch]
|
||||||
|
`)
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
`)
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
`)
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
actionsutil.getWorkflowErrors(
|
||||||
|
yaml.safeLoad(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: ["push"]
|
||||||
|
`)
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|||||||
@@ -45,6 +45,13 @@ export function getRequiredEnvParam(paramName: string): string {
|
|||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getTemporaryDirectory(): string {
|
||||||
|
const value = process.env["CODEQL_ACTION_TEMP"];
|
||||||
|
return value !== undefined && value !== ""
|
||||||
|
? value
|
||||||
|
: getRequiredEnvParam("RUNNER_TEMP");
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ensures all required environment variables are set in the context of a local run.
|
* Ensures all required environment variables are set in the context of a local run.
|
||||||
*/
|
*/
|
||||||
@@ -68,7 +75,7 @@ export function prepareLocalRunEnvironment() {
|
|||||||
/**
|
/**
|
||||||
* Gets the SHA of the commit that is currently checked out.
|
* Gets the SHA of the commit that is currently checked out.
|
||||||
*/
|
*/
|
||||||
export const getCommitOid = async function (): Promise<string> {
|
export const getCommitOid = async function (ref = "HEAD"): Promise<string> {
|
||||||
// Try to use git to get the current commit SHA. If that fails then
|
// Try to use git to get the current commit SHA. If that fails then
|
||||||
// log but otherwise silently fall back to using the SHA from the environment.
|
// log but otherwise silently fall back to using the SHA from the environment.
|
||||||
// The only time these two values will differ is during analysis of a PR when
|
// The only time these two values will differ is during analysis of a PR when
|
||||||
@@ -80,7 +87,7 @@ export const getCommitOid = async function (): Promise<string> {
|
|||||||
let commitOid = "";
|
let commitOid = "";
|
||||||
await new toolrunner.ToolRunner(
|
await new toolrunner.ToolRunner(
|
||||||
await safeWhich.safeWhich("git"),
|
await safeWhich.safeWhich("git"),
|
||||||
["rev-parse", "HEAD"],
|
["rev-parse", ref],
|
||||||
{
|
{
|
||||||
silent: true,
|
silent: true,
|
||||||
listeners: {
|
listeners: {
|
||||||
@@ -143,6 +150,7 @@ function escapeRegExp(string) {
|
|||||||
function patternToRegExp(value) {
|
function patternToRegExp(value) {
|
||||||
return new RegExp(
|
return new RegExp(
|
||||||
`^${value
|
`^${value
|
||||||
|
.toString()
|
||||||
.split(GLOB_PATTERN)
|
.split(GLOB_PATTERN)
|
||||||
.reduce(function (arr, cur) {
|
.reduce(function (arr, cur) {
|
||||||
if (cur === "**") {
|
if (cur === "**") {
|
||||||
@@ -176,44 +184,40 @@ function branchesToArray(branches?: string | null | string[]): string[] | "**" {
|
|||||||
}
|
}
|
||||||
return "**";
|
return "**";
|
||||||
}
|
}
|
||||||
|
export interface CodedError {
|
||||||
enum MissingTriggers {
|
|
||||||
None = 0,
|
|
||||||
Push = 1,
|
|
||||||
PullRequest = 2,
|
|
||||||
}
|
|
||||||
|
|
||||||
interface CodedError {
|
|
||||||
message: string;
|
message: string;
|
||||||
code: string;
|
code: string;
|
||||||
}
|
}
|
||||||
function toCodedErrors(errors: {
|
|
||||||
[key: string]: string;
|
function toCodedErrors<T>(errors: T): Record<keyof T, CodedError> {
|
||||||
}): { [key: string]: CodedError } {
|
|
||||||
return Object.entries(errors).reduce((acc, [key, value]) => {
|
return Object.entries(errors).reduce((acc, [key, value]) => {
|
||||||
acc[key] = { message: value, code: key };
|
acc[key] = { message: value, code: key };
|
||||||
return acc;
|
return acc;
|
||||||
}, {} as ReturnType<typeof toCodedErrors>);
|
}, {} as Record<keyof T, CodedError>);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// code to send back via status report
|
||||||
|
// message to add as a warning annotation to the run
|
||||||
export const WorkflowErrors = toCodedErrors({
|
export const WorkflowErrors = toCodedErrors({
|
||||||
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
MissingHooks: `Please specify on.push and on.pull_request hooks so that Code Scanning can compare pull requests against the state of the base branch.`,
|
|
||||||
MissingPullRequestHook: `Please specify an on.pull_request hook so that Code Scanning is explicitly run against pull requests. This will be required to see results on pull requests from January 31 2021.`,
|
|
||||||
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
||||||
LintFailed: `Unable to lint workflow for CodeQL.`,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export function validateWorkflow(doc: Workflow): CodedError[] {
|
export function getWorkflowErrors(doc: Workflow): CodedError[] {
|
||||||
const errors: CodedError[] = [];
|
const errors: CodedError[] = [];
|
||||||
|
|
||||||
// .jobs[key].steps[].run
|
const jobName = process.env.GITHUB_JOB;
|
||||||
for (const job of Object.values(doc?.jobs || {})) {
|
|
||||||
if (Array.isArray(job?.steps)) {
|
if (jobName) {
|
||||||
for (const step of job?.steps) {
|
const job = doc?.jobs?.[jobName];
|
||||||
|
|
||||||
|
const steps = job?.steps;
|
||||||
|
|
||||||
|
if (Array.isArray(steps)) {
|
||||||
|
for (const step of steps) {
|
||||||
// this was advice that we used to give in the README
|
// this was advice that we used to give in the README
|
||||||
// we actually want to run the analysis on the merge commit
|
// we actually want to run the analysis on the merge commit
|
||||||
// to produce results that are more inline with expectations
|
// to produce results that are more inline with expectations
|
||||||
@@ -221,41 +225,37 @@ export function validateWorkflow(doc: Workflow): CodedError[] {
|
|||||||
// and avoid some race conditions
|
// and avoid some race conditions
|
||||||
if (step?.run === "git checkout HEAD^2") {
|
if (step?.run === "git checkout HEAD^2") {
|
||||||
errors.push(WorkflowErrors.CheckoutWrongHead);
|
errors.push(WorkflowErrors.CheckoutWrongHead);
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let missing = MissingTriggers.None;
|
let missingPush = false;
|
||||||
|
|
||||||
if (doc.on === undefined) {
|
if (doc.on === undefined) {
|
||||||
missing = MissingTriggers.Push | MissingTriggers.PullRequest;
|
// this is not a valid config
|
||||||
} else if (typeof doc.on === "string") {
|
} else if (typeof doc.on === "string") {
|
||||||
switch (doc.on) {
|
if (doc.on === "pull_request") {
|
||||||
case "push":
|
missingPush = true;
|
||||||
missing = MissingTriggers.PullRequest;
|
|
||||||
break;
|
|
||||||
case "pull_request":
|
|
||||||
missing = MissingTriggers.Push;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
missing = MissingTriggers.Push | MissingTriggers.PullRequest;
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
} else if (Array.isArray(doc.on)) {
|
} else if (Array.isArray(doc.on)) {
|
||||||
if (!doc.on.includes("push")) {
|
const hasPush = doc.on.includes("push");
|
||||||
missing = missing | MissingTriggers.Push;
|
const hasPullRequest = doc.on.includes("pull_request");
|
||||||
}
|
if (hasPullRequest && !hasPush) {
|
||||||
if (!doc.on.includes("pull_request")) {
|
missingPush = true;
|
||||||
missing = missing | MissingTriggers.PullRequest;
|
|
||||||
}
|
}
|
||||||
} else if (isObject(doc.on)) {
|
} else if (isObject(doc.on)) {
|
||||||
if (!Object.prototype.hasOwnProperty.call(doc.on, "pull_request")) {
|
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
||||||
missing = missing | MissingTriggers.PullRequest;
|
const hasPullRequest = Object.prototype.hasOwnProperty.call(
|
||||||
|
doc.on,
|
||||||
|
"pull_request"
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!hasPush && hasPullRequest) {
|
||||||
|
missingPush = true;
|
||||||
}
|
}
|
||||||
if (!Object.prototype.hasOwnProperty.call(doc.on, "push")) {
|
if (hasPush && hasPullRequest) {
|
||||||
missing = missing | MissingTriggers.Push;
|
|
||||||
} else {
|
|
||||||
const paths = doc.on.push?.paths;
|
const paths = doc.on.push?.paths;
|
||||||
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
||||||
// if they didn't change any files
|
// if they didn't change any files
|
||||||
@@ -269,59 +269,65 @@ export function validateWorkflow(doc: Workflow): CodedError[] {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const push = branchesToArray(doc.on.push?.branches);
|
// if doc.on.pull_request is null that means 'all branches'
|
||||||
|
// if doc.on.pull_request is undefined that means 'off'
|
||||||
|
// we only want to check for mismatched branches if pull_request is on.
|
||||||
|
if (doc.on.pull_request !== undefined) {
|
||||||
|
const push = branchesToArray(doc.on.push?.branches);
|
||||||
|
|
||||||
if (push !== "**") {
|
if (push !== "**") {
|
||||||
const pull_request = branchesToArray(doc.on.pull_request?.branches);
|
const pull_request = branchesToArray(doc.on.pull_request?.branches);
|
||||||
|
|
||||||
if (pull_request !== "**") {
|
if (pull_request !== "**") {
|
||||||
const difference = pull_request.filter(
|
const difference = pull_request.filter(
|
||||||
(value) => !push.some((o) => patternIsSuperset(o, value))
|
(value) => !push.some((o) => patternIsSuperset(o, value))
|
||||||
);
|
);
|
||||||
if (difference.length > 0) {
|
if (difference.length > 0) {
|
||||||
// there are branches in pull_request that may not have a baseline
|
// there are branches in pull_request that may not have a baseline
|
||||||
// because we are not building them on push
|
// because we are not building them on push
|
||||||
|
errors.push(WorkflowErrors.MismatchedBranches);
|
||||||
|
}
|
||||||
|
} else if (push.length > 0) {
|
||||||
|
// push is set up to run on a subset of branches
|
||||||
|
// and you could open a PR against a branch with no baseline
|
||||||
errors.push(WorkflowErrors.MismatchedBranches);
|
errors.push(WorkflowErrors.MismatchedBranches);
|
||||||
}
|
}
|
||||||
} else if (push.length > 0) {
|
|
||||||
// push is set up to run on a subset of branches
|
|
||||||
// and you could open a PR against a branch with no baseline
|
|
||||||
errors.push(WorkflowErrors.MismatchedBranches);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
// on is not a known type
|
|
||||||
// this workflow is likely malformed
|
|
||||||
missing = MissingTriggers.Push | MissingTriggers.PullRequest;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (missing) {
|
if (missingPush) {
|
||||||
case MissingTriggers.PullRequest | MissingTriggers.Push:
|
errors.push(WorkflowErrors.MissingPushHook);
|
||||||
errors.push(WorkflowErrors.MissingHooks);
|
|
||||||
break;
|
|
||||||
case MissingTriggers.PullRequest:
|
|
||||||
errors.push(WorkflowErrors.MissingPullRequestHook);
|
|
||||||
break;
|
|
||||||
case MissingTriggers.Push:
|
|
||||||
errors.push(WorkflowErrors.MissingPushHook);
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return errors;
|
return errors;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getWorkflowErrors(): Promise<CodedError[]> {
|
export async function validateWorkflow(): Promise<undefined | string> {
|
||||||
|
let workflow: Workflow;
|
||||||
try {
|
try {
|
||||||
const workflow = await getWorkflow();
|
workflow = await getWorkflow();
|
||||||
|
|
||||||
if (workflow === undefined) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
return validateWorkflow(workflow);
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return [WorkflowErrors.LintFailed];
|
return `error: getWorkflow() failed: ${e.toString()}`;
|
||||||
}
|
}
|
||||||
|
let workflowErrors: CodedError[];
|
||||||
|
try {
|
||||||
|
workflowErrors = getWorkflowErrors(workflow);
|
||||||
|
} catch (e) {
|
||||||
|
return `error: getWorkflowErrors() failed: ${e.toString()}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workflowErrors.length > 0) {
|
||||||
|
let message: string;
|
||||||
|
try {
|
||||||
|
message = formatWorkflowErrors(workflowErrors);
|
||||||
|
} catch (e) {
|
||||||
|
return `error: formatWorkflowErrors() failed: ${e.toString()}`;
|
||||||
|
}
|
||||||
|
core.warning(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
return formatWorkflowCause(workflowErrors);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function formatWorkflowErrors(errors: CodedError[]): string {
|
export function formatWorkflowErrors(errors: CodedError[]): string {
|
||||||
@@ -339,19 +345,14 @@ export function formatWorkflowCause(errors: CodedError[]): undefined | string {
|
|||||||
return errors.map((e) => e.code).join(",");
|
return errors.map((e) => e.code).join(",");
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getWorkflow(): Promise<Workflow | undefined> {
|
export async function getWorkflow(): Promise<Workflow> {
|
||||||
const relativePath = await getWorkflowPath();
|
const relativePath = await getWorkflowPath();
|
||||||
const absolutePath = path.join(
|
const absolutePath = path.join(
|
||||||
getRequiredEnvParam("GITHUB_WORKSPACE"),
|
getRequiredEnvParam("GITHUB_WORKSPACE"),
|
||||||
relativePath
|
relativePath
|
||||||
);
|
);
|
||||||
|
|
||||||
try {
|
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
|
||||||
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
|
|
||||||
} catch (e) {
|
|
||||||
core.warning(`Could not read workflow: ${e.toString()}`);
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -395,7 +396,7 @@ export function getWorkflowRunID(): number {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the analysis key paramter for the current job.
|
* Get the analysis key parameter for the current job.
|
||||||
*
|
*
|
||||||
* This will combine the workflow path and current job name.
|
* This will combine the workflow path and current job name.
|
||||||
* Computing this the first time requires making requests to
|
* Computing this the first time requires making requests to
|
||||||
@@ -424,19 +425,35 @@ export async function getRef(): Promise<string> {
|
|||||||
// Will be in the form "refs/heads/master" on a push event
|
// Will be in the form "refs/heads/master" on a push event
|
||||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||||
const ref = getRequiredEnvParam("GITHUB_REF");
|
const ref = getRequiredEnvParam("GITHUB_REF");
|
||||||
|
const sha = getRequiredEnvParam("GITHUB_SHA");
|
||||||
|
|
||||||
// For pull request refs we want to detect whether the workflow
|
// For pull request refs we want to detect whether the workflow
|
||||||
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
||||||
// than the 'merge' ref. If so, we want to convert the ref that
|
// than the 'merge' ref. If so, we want to convert the ref that
|
||||||
// we report back.
|
// we report back.
|
||||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||||
const checkoutSha = await getCommitOid();
|
if (!pull_ref_regex.test(ref)) {
|
||||||
|
return ref;
|
||||||
|
}
|
||||||
|
|
||||||
if (
|
const head = await getCommitOid("HEAD");
|
||||||
pull_ref_regex.test(ref) &&
|
|
||||||
checkoutSha !== getRequiredEnvParam("GITHUB_SHA")
|
// in actions/checkout@v2 we can check if git rev-parse HEAD == GITHUB_SHA
|
||||||
) {
|
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||||
return ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
// using GITHUB_REF. There is a subtle race condition where
|
||||||
|
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
||||||
|
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||||
|
const hasChangedRef =
|
||||||
|
sha !== head &&
|
||||||
|
(await getCommitOid(ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !==
|
||||||
|
head;
|
||||||
|
|
||||||
|
if (hasChangedRef) {
|
||||||
|
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
||||||
|
core.debug(
|
||||||
|
`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`
|
||||||
|
);
|
||||||
|
return newRef;
|
||||||
} else {
|
} else {
|
||||||
return ref;
|
return ref;
|
||||||
}
|
}
|
||||||
@@ -555,12 +572,22 @@ export async function createStatusReportBase(
|
|||||||
|
|
||||||
interface HTTPError {
|
interface HTTPError {
|
||||||
status: number;
|
status: number;
|
||||||
|
message?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
function isHTTPError(arg: any): arg is HTTPError {
|
function isHTTPError(arg: any): arg is HTTPError {
|
||||||
return arg?.status !== undefined && Number.isInteger(arg.status);
|
return arg?.status !== undefined && Number.isInteger(arg.status);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const GENERIC_403_MSG =
|
||||||
|
"The repo on which this action is running is not opted-in to CodeQL code scanning.";
|
||||||
|
const GENERIC_404_MSG =
|
||||||
|
"Not authorized to used the CodeQL code scanning feature on this repo.";
|
||||||
|
const OUT_OF_DATE_MSG =
|
||||||
|
"CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
||||||
|
const INCOMPATIBLE_MSG =
|
||||||
|
"CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||||
*
|
*
|
||||||
@@ -597,32 +624,33 @@ export async function sendStatusReport<S extends StatusReportBase>(
|
|||||||
|
|
||||||
return true;
|
return true;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
console.log(e);
|
||||||
if (isHTTPError(e)) {
|
if (isHTTPError(e)) {
|
||||||
switch (e.status) {
|
switch (e.status) {
|
||||||
case 403:
|
case 403:
|
||||||
core.setFailed(
|
if (workflowIsTriggeredByPushEvent() && isDependabotActor()) {
|
||||||
"The repo on which this action is running is not opted-in to CodeQL code scanning."
|
core.setFailed(
|
||||||
);
|
'Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
||||||
|
"Uploading Code Scanning results requires write access. " +
|
||||||
|
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
||||||
|
"See https://docs.github.com/en/code-security/secure-coding/configuring-code-scanning#scanning-on-push for more information on how to configure these events."
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
core.setFailed(e.message || GENERIC_403_MSG);
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
case 404:
|
case 404:
|
||||||
core.setFailed(
|
core.setFailed(GENERIC_404_MSG);
|
||||||
"Not authorized to used the CodeQL code scanning feature on this repo."
|
|
||||||
);
|
|
||||||
return false;
|
return false;
|
||||||
case 422:
|
case 422:
|
||||||
// schema incompatibility when reporting status
|
// schema incompatibility when reporting status
|
||||||
// this means that this action version is no longer compatible with the API
|
// this means that this action version is no longer compatible with the API
|
||||||
// we still want to continue as it is likely the analysis endpoint will work
|
// we still want to continue as it is likely the analysis endpoint will work
|
||||||
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) {
|
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) {
|
||||||
core.debug(
|
core.debug(INCOMPATIBLE_MSG);
|
||||||
"CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action."
|
|
||||||
);
|
|
||||||
} else {
|
} else {
|
||||||
core.debug(
|
core.debug(OUT_OF_DATE_MSG);
|
||||||
"CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action."
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -630,12 +658,22 @@ export async function sendStatusReport<S extends StatusReportBase>(
|
|||||||
// something else has gone wrong and the request/response will be logged by octokit
|
// something else has gone wrong and the request/response will be logged by octokit
|
||||||
// it's possible this is a transient error and we should continue scanning
|
// it's possible this is a transient error and we should continue scanning
|
||||||
core.error(
|
core.error(
|
||||||
"An unexpected error occured when sending code scanning status report."
|
"An unexpected error occurred when sending code scanning status report."
|
||||||
);
|
);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Was the workflow run triggered by a `push` event, for example as opposed to a `pull_request` event.
|
||||||
|
function workflowIsTriggeredByPushEvent() {
|
||||||
|
return process.env["GITHUB_EVENT_NAME"] === "push";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Is dependabot the actor that triggered the current workflow run.
|
||||||
|
function isDependabotActor() {
|
||||||
|
return process.env["GITHUB_ACTOR"] === "dependabot[bot]";
|
||||||
|
}
|
||||||
|
|
||||||
// Is the current action executing a local copy (i.e. we're running a workflow on the codeql-action repo itself)
|
// Is the current action executing a local copy (i.e. we're running a workflow on the codeql-action repo itself)
|
||||||
// as opposed to running a remote action (i.e. when another repo references us)
|
// as opposed to running a remote action (i.e. when another repo references us)
|
||||||
export function isRunningLocalAction(): boolean {
|
export function isRunningLocalAction(): boolean {
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ test("emptyPaths", async (t) => {
|
|||||||
tempDir: tmpDir,
|
tempDir: tmpDir,
|
||||||
toolCacheDir: tmpDir,
|
toolCacheDir: tmpDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
gitHubVersion: { type: "dotcom" } as util.GitHubVersion,
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM } as util.GitHubVersion,
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||||
@@ -39,7 +39,7 @@ test("nonEmptyPaths", async (t) => {
|
|||||||
tempDir: tmpDir,
|
tempDir: tmpDir,
|
||||||
toolCacheDir: tmpDir,
|
toolCacheDir: tmpDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
gitHubVersion: { type: "dotcom" } as util.GitHubVersion,
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM } as util.GitHubVersion,
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||||
@@ -63,7 +63,7 @@ test("exclude temp dir", async (t) => {
|
|||||||
tempDir,
|
tempDir,
|
||||||
toolCacheDir,
|
toolCacheDir,
|
||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
gitHubVersion: { type: "dotcom" } as util.GitHubVersion,
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM } as util.GitHubVersion,
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ export function printPathFiltersWarning(
|
|||||||
!config.languages.every(isInterpretedLanguage)
|
!config.languages.every(isInterpretedLanguage)
|
||||||
) {
|
) {
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python'
|
'The "paths"/"paths-ignore" fields of the config only have effect for JavaScript and Python'
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user