mirror of
https://github.com/github/codeql-action.git
synced 2025-12-12 02:34:39 +08:00
Compare commits
197 Commits
codeql-bun
...
aeisenberg
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
43d3eddc73 | ||
|
|
bee5aac8d7 | ||
|
|
04a87f068f | ||
|
|
ecffc3ce6b | ||
|
|
fc15a137e0 | ||
|
|
fd20d82b8d | ||
|
|
c49fe2506c | ||
|
|
1990ded3d2 | ||
|
|
f5e572fb4b | ||
|
|
a56f7259c1 | ||
|
|
5f37061461 | ||
|
|
286a237e20 | ||
|
|
43ea2ce0c9 | ||
|
|
3df217266d | ||
|
|
e2f39a6d71 | ||
|
|
6ca99f72e9 | ||
|
|
1ebb0a0a35 | ||
|
|
f2159737dd | ||
|
|
5153cc4c8d | ||
|
|
faa3cfa2e9 | ||
|
|
fd3190bba5 | ||
|
|
4a45fbee96 | ||
|
|
0bbf22e3b7 | ||
|
|
d362b66e03 | ||
|
|
9e7b37f8e4 | ||
|
|
944ae88ffe | ||
|
|
ace3701b4a | ||
|
|
dbb326152c | ||
|
|
03e7dda1b9 | ||
|
|
ee9e32621e | ||
|
|
3bfe8ab742 | ||
|
|
515f16b728 | ||
|
|
8b12e8c5e1 | ||
|
|
10f284c032 | ||
|
|
19d3f6c536 | ||
|
|
7d56b0cd44 | ||
|
|
69ddeed7d5 | ||
|
|
2c73ad667d | ||
|
|
b5a522dfb8 | ||
|
|
1ddd8a5632 | ||
|
|
d96049eeaf | ||
|
|
168327a4aa | ||
|
|
ef3290ce11 | ||
|
|
f04acbbdc3 | ||
|
|
cd1b9df1e3 | ||
|
|
292203e8b6 | ||
|
|
f151a3cfe6 | ||
|
|
ee57c2ed58 | ||
|
|
d521b0e630 | ||
|
|
5aac657d98 | ||
|
|
ed36ec077c | ||
|
|
bb0850a826 | ||
|
|
f16cf435c3 | ||
|
|
167d7583fe | ||
|
|
1b37538d9c | ||
|
|
334262d6cf | ||
|
|
1f4460b9fb | ||
|
|
e40e887968 | ||
|
|
6e631b99c4 | ||
|
|
5a80cb1408 | ||
|
|
014d3ea60a | ||
|
|
8caa0808ee | ||
|
|
fd4659dfd4 | ||
|
|
7112cdaa06 | ||
|
|
58056a4b12 | ||
|
|
18cda24a3b | ||
|
|
c93e88dc4b | ||
|
|
a52e4c3e23 | ||
|
|
9db151d251 | ||
|
|
6e8752e4d5 | ||
|
|
adfea7610d | ||
|
|
40568daca8 | ||
|
|
5b28adb7ed | ||
|
|
008b2cc71c | ||
|
|
58d2adec4b | ||
|
|
b5a94e0978 | ||
|
|
31ae172f4b | ||
|
|
d0b95baf8b | ||
|
|
aa1baf5b84 | ||
|
|
bc190d2af9 | ||
|
|
c0971eed35 | ||
|
|
ac8bd3f417 | ||
|
|
4854dd23d5 | ||
|
|
c5468fcaea | ||
|
|
b0b34e58f9 | ||
|
|
83d5f38dee | ||
|
|
444316b6c6 | ||
|
|
7128833392 | ||
|
|
455ee1f217 | ||
|
|
54fc9c575e | ||
|
|
3525a164c3 | ||
|
|
aeeb3c3e88 | ||
|
|
88d30845f1 | ||
|
|
89d78ba457 | ||
|
|
e72d9f4a72 | ||
|
|
4455bf8cd9 | ||
|
|
f27dc32aeb | ||
|
|
bf6f871097 | ||
|
|
cda7fe109b | ||
|
|
12300ac88e | ||
|
|
11db623ba8 | ||
|
|
3f515d3140 | ||
|
|
a57642e1a0 | ||
|
|
bf85baea70 | ||
|
|
62ef9f5eb2 | ||
|
|
e22a6cd3c9 | ||
|
|
f9b0b9cb7b | ||
|
|
5fc16ebcf6 | ||
|
|
7e2215bc92 | ||
|
|
3a016ebea8 | ||
|
|
2ba7208ff2 | ||
|
|
72399ae69a | ||
|
|
c0a58782b6 | ||
|
|
d1ff4d6297 | ||
|
|
80a6b3a41e | ||
|
|
1c27c52804 | ||
|
|
e833d6e84c | ||
|
|
8a0c541e24 | ||
|
|
e2d592dc8f | ||
|
|
2e71e02553 | ||
|
|
b29bf7b05a | ||
|
|
1785bbb7d8 | ||
|
|
a44b61d961 | ||
|
|
a062fc9bf5 | ||
|
|
50de2e4d1e | ||
|
|
132e08a05f | ||
|
|
720884501a | ||
|
|
6dc5d80b98 | ||
|
|
542390c023 | ||
|
|
fd45eac830 | ||
|
|
e70ec1d70e | ||
|
|
70d2efc353 | ||
|
|
fb77829455 | ||
|
|
4ba53e33d7 | ||
|
|
f0a2954c12 | ||
|
|
bf91ac91d4 | ||
|
|
e3151ae8df | ||
|
|
a2f0227de2 | ||
|
|
2639547a69 | ||
|
|
e86cff2acb | ||
|
|
2eb8300e80 | ||
|
|
78cda05e38 | ||
|
|
b0e70410b4 | ||
|
|
3254fa5859 | ||
|
|
01d17eaf42 | ||
|
|
0c4306b672 | ||
|
|
acd9964b7a | ||
|
|
bc33041cc2 | ||
|
|
c7203c94d9 | ||
|
|
7f1659f0d8 | ||
|
|
962925a448 | ||
|
|
4e477f1b95 | ||
|
|
a068a286e6 | ||
|
|
b19f1f91f0 | ||
|
|
f1c75fc816 | ||
|
|
1e0763ca30 | ||
|
|
5fd8ca8122 | ||
|
|
873a76a1a7 | ||
|
|
4a0d3378b1 | ||
|
|
46043e8a9e | ||
|
|
75aad219f8 | ||
|
|
fe83f965e4 | ||
|
|
3ebf97724d | ||
|
|
0d884244be | ||
|
|
1263b9c651 | ||
|
|
93c9da2c2e | ||
|
|
060eb52d32 | ||
|
|
8c8a933d75 | ||
|
|
743d8dfb6e | ||
|
|
75d42bea8d | ||
|
|
46ddfc6186 | ||
|
|
07fa17da87 | ||
|
|
61fb5d7202 | ||
|
|
89e4b4fff3 | ||
|
|
9ad3f820af | ||
|
|
5ba154a3b4 | ||
|
|
bb0eba15f5 | ||
|
|
db7158f9ba | ||
|
|
94013c25cf | ||
|
|
20de0f01a3 | ||
|
|
21753283b1 | ||
|
|
44c8bd3e63 | ||
|
|
952b2690da | ||
|
|
d6a5bf5c1c | ||
|
|
068ade0b31 | ||
|
|
183487b717 | ||
|
|
38bb211981 | ||
|
|
a0ab4842b5 | ||
|
|
9e304b92ff | ||
|
|
5e1d24657c | ||
|
|
2b6d86c672 | ||
|
|
5bcf9d9972 | ||
|
|
8068352efa | ||
|
|
bef7eecb10 | ||
|
|
1df71f8baa | ||
|
|
53cfc95284 | ||
|
|
b22fb4674b |
@@ -14,11 +14,14 @@
|
|||||||
],
|
],
|
||||||
"rules": {
|
"rules": {
|
||||||
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],
|
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],
|
||||||
|
"i18n-text/no-en": "off",
|
||||||
"import/extensions": "error",
|
"import/extensions": "error",
|
||||||
"import/no-amd": "error",
|
"import/no-amd": "error",
|
||||||
"import/no-commonjs": "error",
|
"import/no-commonjs": "error",
|
||||||
"import/no-dynamic-require": "error",
|
"import/no-dynamic-require": "error",
|
||||||
"import/no-extraneous-dependencies": ["error"],
|
// Disable the rule that checks that devDependencies aren't imported since we use a single
|
||||||
|
// linting configuration file for both source and test code.
|
||||||
|
"import/no-extraneous-dependencies": ["error", {"devDependencies": true}],
|
||||||
"import/no-namespace": "off",
|
"import/no-namespace": "off",
|
||||||
"import/no-unresolved": "error",
|
"import/no-unresolved": "error",
|
||||||
"import/no-webpack-loader-syntax": "error",
|
"import/no-webpack-loader-syntax": "error",
|
||||||
@@ -48,7 +51,8 @@
|
|||||||
"@typescript-eslint/prefer-regexp-exec": "off",
|
"@typescript-eslint/prefer-regexp-exec": "off",
|
||||||
"@typescript-eslint/require-await": "off",
|
"@typescript-eslint/require-await": "off",
|
||||||
"@typescript-eslint/restrict-template-expressions": "off",
|
"@typescript-eslint/restrict-template-expressions": "off",
|
||||||
"func-style": "off"
|
"func-style": "off",
|
||||||
|
"sort-imports": "off"
|
||||||
}
|
}
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
|
|||||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1,4 +1,5 @@
|
|||||||
lib/*.js linguist-generated=true
|
lib/*.js linguist-generated=true
|
||||||
|
.github/workflows/__* linguist-generated=true
|
||||||
|
|
||||||
# Reduce incidence of needless merge conflicts on CHANGELOG.md
|
# Reduce incidence of needless merge conflicts on CHANGELOG.md
|
||||||
# The man page at
|
# The man page at
|
||||||
|
|||||||
6
.github/dependabot.yml
vendored
6
.github/dependabot.yml
vendored
@@ -7,8 +7,14 @@ updates:
|
|||||||
day: "thursday" # Gives us a working day to merge this before our typical release
|
day: "thursday" # Gives us a working day to merge this before our typical release
|
||||||
labels:
|
labels:
|
||||||
- "Update dependencies"
|
- "Update dependencies"
|
||||||
|
ignore:
|
||||||
|
- dependency-name: "*"
|
||||||
|
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||||
- package-ecosystem: "npm"
|
- package-ecosystem: "npm"
|
||||||
directory: "/runner"
|
directory: "/runner"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
day: "thursday" # Gives us a working day to merge this before our typical release
|
day: "thursday" # Gives us a working day to merge this before our typical release
|
||||||
|
ignore:
|
||||||
|
- dependency-name: "*"
|
||||||
|
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||||
|
|||||||
39
.github/prepare-test/action.yml
vendored
Normal file
39
.github/prepare-test/action.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
name: "Prepare test"
|
||||||
|
description: Performs some preparation to run tests
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
required: true
|
||||||
|
outputs:
|
||||||
|
tools-url:
|
||||||
|
value: ${{ steps.get-url.outputs.tools-url }}
|
||||||
|
runs:
|
||||||
|
using: composite
|
||||||
|
steps:
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- id: get-url
|
||||||
|
name: Determine URL
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [[ ${{ inputs.version }} == "nightly-latest" ]]; then
|
||||||
|
export LATEST=`gh release list --repo dsp-testing/codeql-cli-nightlies -L 1 | cut -f 3`
|
||||||
|
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$LATEST/codeql-bundle.tar.gz"
|
||||||
|
elif [[ ${{ inputs.version }} == *"nightly"* ]]; then
|
||||||
|
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
||||||
|
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz"
|
||||||
|
elif [[ ${{ inputs.version }} == *"stable"* ]]; then
|
||||||
|
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
||||||
|
echo "Hello $VERSION"
|
||||||
|
echo "::set-output name=tools-url::https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz"
|
||||||
|
elif [[ ${{ inputs.version }} == "latest" ]]; then
|
||||||
|
echo "::set-output name=tools-url::latest"
|
||||||
|
elif [[ ${{ inputs.version }} == "cached" ]]; then
|
||||||
|
echo "::set-output name=tools-url::"
|
||||||
|
else
|
||||||
|
echo "::error Unrecognized version specified!"
|
||||||
|
fi
|
||||||
62
.github/workflows/__go-custom-queries.yml
generated
vendored
Normal file
62
.github/workflows/__go-custom-queries.yml
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: 'PR Check - Go: Custom queries'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
go-custom-queries:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version:
|
||||||
|
- stable-20201028
|
||||||
|
- stable-20210319
|
||||||
|
- stable-20210809
|
||||||
|
- cached
|
||||||
|
- latest
|
||||||
|
- nightly-latest
|
||||||
|
os:
|
||||||
|
- ubuntu-latest
|
||||||
|
- macos-latest
|
||||||
|
- windows-latest
|
||||||
|
name: 'Go: Custom queries'
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: actions/setup-go@v2
|
||||||
|
with:
|
||||||
|
go-version: ^1.13.1
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: go
|
||||||
|
config-file: ./.github/codeql/custom-queries.yml
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
env:
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
64
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
Normal file
64
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: 'PR Check - Go: Autobuild custom tracing'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
go-custom-tracing-autobuild:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version:
|
||||||
|
- stable-20201028
|
||||||
|
- stable-20210319
|
||||||
|
- stable-20210809
|
||||||
|
- cached
|
||||||
|
- latest
|
||||||
|
- nightly-latest
|
||||||
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
name: 'Go: Autobuild custom tracing'
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: actions/setup-go@v2
|
||||||
|
with:
|
||||||
|
go-version: ^1.13.1
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: go
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- uses: ./../action/autobuild
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- shell: bash
|
||||||
|
run: |
|
||||||
|
cd "$RUNNER_TEMP/codeql_databases"
|
||||||
|
if [[ ! -d go ]]; then
|
||||||
|
echo "Did not find a Go database"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
CODEQL_EXTRACTOR_GO_BUILD_TRACING: 'true'
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
62
.github/workflows/__go-custom-tracing.yml
generated
vendored
Normal file
62
.github/workflows/__go-custom-tracing.yml
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: 'PR Check - Go: Custom tracing'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
go-custom-tracing:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version:
|
||||||
|
- stable-20201028
|
||||||
|
- stable-20210319
|
||||||
|
- stable-20210809
|
||||||
|
- cached
|
||||||
|
- latest
|
||||||
|
- nightly-latest
|
||||||
|
os:
|
||||||
|
- ubuntu-latest
|
||||||
|
- macos-latest
|
||||||
|
- windows-latest
|
||||||
|
name: 'Go: Custom tracing'
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: actions/setup-go@v2
|
||||||
|
with:
|
||||||
|
go-version: ^1.13.1
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: go
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: go build main.go
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
env:
|
||||||
|
CODEQL_EXTRACTOR_GO_BUILD_TRACING: 'true'
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
61
.github/workflows/__javascript-source-root.yml
generated
vendored
Normal file
61
.github/workflows/__javascript-source-root.yml
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Custom source root
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
javascript-source-root:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [latest, cached, nightly-latest] # This feature is not compatible with old CLIs
|
||||||
|
os: [ubuntu-latest]
|
||||||
|
name: Custom source root
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../new-source-root
|
||||||
|
mv * ../new-source-root
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: javascript
|
||||||
|
source-root: ../new-source-root
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
skip-queries: true
|
||||||
|
upload: false
|
||||||
|
- name: Assert database exists
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd "$RUNNER_TEMP/codeql_databases"
|
||||||
|
if [[ ! -d javascript ]]; then
|
||||||
|
echo "Did not find a JavaScript database"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
88
.github/workflows/__multi-language-autodetect.yml
generated
vendored
Normal file
88
.github/workflows/__multi-language-autodetect.yml
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Multi-language repository
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
multi-language-autodetect:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version:
|
||||||
|
- stable-20201028
|
||||||
|
- stable-20210319
|
||||||
|
- stable-20210809
|
||||||
|
- cached
|
||||||
|
- latest
|
||||||
|
- nightly-latest
|
||||||
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
name: Multi-language repository
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
db-location: ${{ runner.temp }}/customDbLocation
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
id: analysis
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- shell: bash
|
||||||
|
run: |
|
||||||
|
CPP_DB=${{ fromJson(steps.analysis.outputs.db-locations).cpp }}
|
||||||
|
if [[ ! -d $CPP_DB ]] || [[ ! $CPP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||||
|
echo "Did not create a database for CPP, or created it in the wrong location."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
CSHARP_DB=${{ fromJson(steps.analysis.outputs.db-locations).csharp }}
|
||||||
|
if [[ ! -d $CSHARP_DB ]] || [[ ! $CSHARP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||||
|
echo "Did not create a database for C Sharp, or created it in the wrong location."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
GO_DB=${{ fromJson(steps.analysis.outputs.db-locations).go }}
|
||||||
|
if [[ ! -d $GO_DB ]] || [[ ! $GO_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||||
|
echo "Did not create a database for Go, or created it in the wrong location."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
JAVA_DB=${{ fromJson(steps.analysis.outputs.db-locations).java }}
|
||||||
|
if [[ ! -d $JAVA_DB ]] || [[ ! $JAVA_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||||
|
echo "Did not create a database for Java, or created it in the wrong location."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
JAVASCRIPT_DB=${{ fromJson(steps.analysis.outputs.db-locations).javascript }}
|
||||||
|
if [[ ! -d $JAVASCRIPT_DB ]] || [[ ! $JAVASCRIPT_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||||
|
echo "Did not create a database for Javascript, or created it in the wrong location."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
PYTHON_DB=${{ fromJson(steps.analysis.outputs.db-locations).python }}
|
||||||
|
if [[ ! -d $PYTHON_DB ]] || [[ ! $PYTHON_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||||
|
echo "Did not create a database for Python, or created it in the wrong location."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
67
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
Normal file
67
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: 'PR Check - Packaging: Config and input'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
packaging-config-inputs-js:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [nightly-20210831] # This CLI version is known to work with package used in this test
|
||||||
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
name: 'Packaging: Config and input'
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||||
|
packs: +dsp-testing/codeql-pack1@0.1.0
|
||||||
|
languages: javascript
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
output: ${{ runner.temp }}/results
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- name: Assert Results
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd "$RUNNER_TEMP/results"
|
||||||
|
# We should have 3 hits from these rules
|
||||||
|
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
||||||
|
|
||||||
|
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||||
|
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
||||||
|
echo "Found matching rules '$RULES'"
|
||||||
|
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||||
|
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
66
.github/workflows/__packaging-config-js.yml
generated
vendored
Normal file
66
.github/workflows/__packaging-config-js.yml
generated
vendored
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: 'PR Check - Packaging: Config file'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
packaging-config-js:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [nightly-20210831] # This CLI version is known to work with package used in this test
|
||||||
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
name: 'Packaging: Config file'
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
config-file: .github/codeql/codeql-config-packaging.yml
|
||||||
|
languages: javascript
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
output: ${{ runner.temp }}/results
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- name: Assert Results
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd "$RUNNER_TEMP/results"
|
||||||
|
# We should have 3 hits from these rules
|
||||||
|
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
||||||
|
|
||||||
|
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||||
|
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
||||||
|
echo "Found matching rules '$RULES'"
|
||||||
|
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||||
|
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
67
.github/workflows/__packaging-inputs-js.yml
generated
vendored
Normal file
67
.github/workflows/__packaging-inputs-js.yml
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: 'PR Check - Packaging: Action input'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
packaging-inputs-js:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [nightly-20210831] # This CLI version is known to work with package used in this test
|
||||||
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
name: 'Packaging: Action input'
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
config-file: .github/codeql/codeql-config-packaging2.yml
|
||||||
|
languages: javascript
|
||||||
|
packs: dsp-testing/codeql-pack1@0.1.0, dsp-testing/codeql-pack2
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
output: ${{ runner.temp }}/results
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- name: Assert Results
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd "$RUNNER_TEMP/results"
|
||||||
|
# We should have 3 hits from these rules
|
||||||
|
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
||||||
|
|
||||||
|
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||||
|
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
||||||
|
echo "Found matching rules '$RULES'"
|
||||||
|
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||||
|
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
60
.github/workflows/__remote-config.yml
generated
vendored
Normal file
60
.github/workflows/__remote-config.yml
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Remote config file
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
remote-config:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version:
|
||||||
|
- stable-20201028
|
||||||
|
- stable-20210319
|
||||||
|
- stable-20210809
|
||||||
|
- cached
|
||||||
|
- latest
|
||||||
|
- nightly-latest
|
||||||
|
os:
|
||||||
|
- ubuntu-latest
|
||||||
|
- macos-latest
|
||||||
|
- windows-latest
|
||||||
|
name: Remote config file
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
languages: cpp,csharp,java,javascript,python
|
||||||
|
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
|
||||||
|
github.sha }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
env:
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
67
.github/workflows/__rubocop-multi-language.yml
generated
vendored
Normal file
67
.github/workflows/__rubocop-multi-language.yml
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - RuboCop multi-language
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
rubocop-multi-language:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version:
|
||||||
|
- stable-20201028
|
||||||
|
- stable-20210319
|
||||||
|
- stable-20210809
|
||||||
|
- cached
|
||||||
|
- latest
|
||||||
|
- nightly-latest
|
||||||
|
os: [ubuntu-latest]
|
||||||
|
name: RuboCop multi-language
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- name: Set up Ruby
|
||||||
|
uses: ruby/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: 2.6
|
||||||
|
- name: Install Code Scanning integration
|
||||||
|
shell: bash
|
||||||
|
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
||||||
|
- name: Install dependencies
|
||||||
|
shell: bash
|
||||||
|
run: bundle install
|
||||||
|
- name: RuboCop run
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
bash -c "
|
||||||
|
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
||||||
|
[[ $? -ne 2 ]]
|
||||||
|
"
|
||||||
|
- uses: ./../action/upload-sarif
|
||||||
|
with:
|
||||||
|
sarif_file: rubocop.sarif
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
env:
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
80
.github/workflows/__split-workflow.yml
generated
vendored
Normal file
80
.github/workflows/__split-workflow.yml
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Split workflow
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
split-workflow:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [nightly-20210831] # This CLI version is known to work with package used in this test
|
||||||
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
name: Split workflow
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||||
|
packs: +dsp-testing/codeql-pack1@0.1.0
|
||||||
|
languages: javascript
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
skip-queries: true
|
||||||
|
output: ${{ runner.temp }}/results
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- name: Assert No Results
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [ "$(ls -A $RUNNER_TEMP/results)" ]; then
|
||||||
|
echo "Expected results directory to be empty after skipping query execution!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
output: ${{ runner.temp }}/results
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- name: Assert Results
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd "$RUNNER_TEMP/results"
|
||||||
|
# We should have 3 hits from these rules
|
||||||
|
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
||||||
|
|
||||||
|
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||||
|
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
||||||
|
echo "Found matching rules '$RULES'"
|
||||||
|
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||||
|
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
54
.github/workflows/__test-local-codeql.yml
generated
vendored
Normal file
54
.github/workflows/__test-local-codeql.yml
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Local CodeQL bundle
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
test-local-codeql:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [nightly-latest]
|
||||||
|
os: [ubuntu-latest]
|
||||||
|
name: Local CodeQL bundle
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- name: Fetch a CodeQL bundle
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
CODEQL_URL: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
run: |
|
||||||
|
wget "$CODEQL_URL"
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
tools: ./codeql-bundle.tar.gz
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
env:
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
55
.github/workflows/__test-proxy.yml
generated
vendored
Normal file
55
.github/workflows/__test-proxy.yml
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Proxy test
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
test-proxy:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [latest]
|
||||||
|
os: [ubuntu-latest]
|
||||||
|
name: Proxy test
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: javascript
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
env:
|
||||||
|
https_proxy: http://squid-proxy:3128
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
|
container:
|
||||||
|
image: ubuntu:18.04
|
||||||
|
options: --dns 127.0.0.1
|
||||||
|
services:
|
||||||
|
squid-proxy:
|
||||||
|
image: datadog/squid:latest
|
||||||
|
ports:
|
||||||
|
- 3128:3128
|
||||||
56
.github/workflows/__test-ruby.yml
generated
vendored
Normal file
56
.github/workflows/__test-ruby.yml
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Ruby analysis
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
test-ruby:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [latest, cached, nightly-latest]
|
||||||
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
name: Ruby analysis
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: ruby
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
id: analysis
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- name: Check database
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
RUBY_DB="${{ fromJson(steps.analysis.outputs.db-locations).ruby }}"
|
||||||
|
if [[ ! -d "$RUBY_DB" ]]; then
|
||||||
|
echo "Did not create a database for Ruby."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
CODEQL_ENABLE_EXPERIMENTAL_FEATURES: 'true'
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
16
.github/workflows/codeql.yml
vendored
16
.github/workflows/codeql.yml
vendored
@@ -17,8 +17,6 @@ jobs:
|
|||||||
versions: ${{ steps.compare.outputs.versions }}
|
versions: ${{ steps.compare.outputs.versions }}
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
actions: read
|
|
||||||
contents: read
|
|
||||||
security-events: write
|
security-events: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -48,13 +46,19 @@ jobs:
|
|||||||
CODEQL_VERSION_LATEST="$("$CODEQL_LATEST" version --format terse)"
|
CODEQL_VERSION_LATEST="$("$CODEQL_LATEST" version --format terse)"
|
||||||
echo "Default CodeQL bundle version is $CODEQL_VERSION_DEFAULT"
|
echo "Default CodeQL bundle version is $CODEQL_VERSION_DEFAULT"
|
||||||
echo "Latest CodeQL bundle version is $CODEQL_VERSION_LATEST"
|
echo "Latest CodeQL bundle version is $CODEQL_VERSION_LATEST"
|
||||||
if [[ "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then
|
|
||||||
# Just use `tools: null` to avoid duplication in the analysis job.
|
# If we're running on a pull request, run with both bundles, even if `tools: latest` would
|
||||||
|
# be the same as `tools: null`. This allows us to make the job for each of the bundles a
|
||||||
|
# required status check.
|
||||||
|
#
|
||||||
|
# If we're running on push, then we can skip running with `tools: latest` when it would be
|
||||||
|
# the same as running with `tools: null`.
|
||||||
|
if [[ "$GITHUB_EVENT_NAME" != "pull_request" && "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then
|
||||||
VERSIONS_JSON='[null]'
|
VERSIONS_JSON='[null]'
|
||||||
else
|
else
|
||||||
# Use both `tools: null` and `tools: latest` in the analysis job.
|
|
||||||
VERSIONS_JSON='[null, "latest"]'
|
VERSIONS_JSON='[null, "latest"]'
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Output a JSON-encoded list with the distinct versions to test against.
|
# Output a JSON-encoded list with the distinct versions to test against.
|
||||||
echo "Suggested matrix config for analysis job: $VERSIONS_JSON"
|
echo "Suggested matrix config for analysis job: $VERSIONS_JSON"
|
||||||
echo "::set-output name=versions::${VERSIONS_JSON}"
|
echo "::set-output name=versions::${VERSIONS_JSON}"
|
||||||
@@ -68,8 +72,6 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
actions: read
|
|
||||||
contents: read
|
|
||||||
security-events: write
|
security-events: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|||||||
1044
.github/workflows/pr-checks.yml
vendored
1044
.github/workflows/pr-checks.yml
vendored
File diff suppressed because it is too large
Load Diff
8
.github/workflows/python-deps.yml
vendored
8
.github/workflows/python-deps.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Verify packages installed
|
- name: Verify packages installed
|
||||||
run: |
|
run: |
|
||||||
$GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh ${PYTHON_VERSION}
|
$GITHUB_WORKSPACE/python-setup/tests/check_requests_2_26_0.sh ${PYTHON_VERSION}
|
||||||
|
|
||||||
# This one shouldn't fail, but also won't install packages
|
# This one shouldn't fail, but also won't install packages
|
||||||
test-setup-python-scripts-non-standard-location:
|
test-setup-python-scripts-non-standard-location:
|
||||||
@@ -123,6 +123,10 @@ jobs:
|
|||||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: ./init
|
uses: ./init
|
||||||
with:
|
with:
|
||||||
@@ -149,5 +153,5 @@ jobs:
|
|||||||
|
|
||||||
- name: Verify packages installed
|
- name: Verify packages installed
|
||||||
run: |
|
run: |
|
||||||
$cmd = $Env:GITHUB_WORKSPACE + "\\python-setup\\tests\\check_requests_123.ps1"
|
$cmd = $Env:GITHUB_WORKSPACE + "\\python-setup\\tests\\check_requests_2_26_0.ps1"
|
||||||
powershell -File $cmd $Env:PYTHON_VERSION
|
powershell -File $cmd $Env:PYTHON_VERSION
|
||||||
|
|||||||
25
.github/workflows/script/verify-pr-checks.sh
vendored
Executable file
25
.github/workflows/script/verify-pr-checks.sh
vendored
Executable file
@@ -0,0 +1,25 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
# Sanity check that repo is clean to start with
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then this workflow needs attention...
|
||||||
|
>&2 echo "Failed: Repo should be clean before testing!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Wipe the generated PR checks in case there are extra unnecessary files in there
|
||||||
|
rm -rf .github/workflows/__*
|
||||||
|
|
||||||
|
# Generate the PR checks
|
||||||
|
cd pr-checks && python3 sync.py
|
||||||
|
|
||||||
|
# Check that repo is still clean
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then the PR needs attention
|
||||||
|
git diff
|
||||||
|
git status
|
||||||
|
>&2 echo "Failed: PR checks are not up to date. Run 'cd pr-checks && python3 sync.py' to update"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Success: PR checks are up to date"
|
||||||
19
.github/workflows/update-dependencies.yml
vendored
19
.github/workflows/update-dependencies.yml
vendored
@@ -4,10 +4,27 @@ on:
|
|||||||
types: [opened, synchronize, reopened, ready_for_review, labeled]
|
types: [opened, synchronize, reopened, ready_for_review, labeled]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
check:
|
||||||
|
name: Check for relevance
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: |
|
||||||
|
contains(github.event.pull_request.labels.*.name, 'Update dependencies') &&
|
||||||
|
(github.actor == 'dependabot[bot]' || github.actor == 'github-actions[bot]') &&
|
||||||
|
github.repository == 'github/codeql-action' &&
|
||||||
|
github.head.repo.full_name == 'github/codeql-action' &&
|
||||||
|
github.base.repo.full_name == 'github/codeql-action'
|
||||||
|
env:
|
||||||
|
ACTOR: '${{ github.actor }}'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check Actor
|
||||||
|
run: echo "This PR should run the Update Dependencies workflow because the actor is $ACTOR, there is no fork involved, and the 'Update dependencies' label exists."
|
||||||
|
|
||||||
update:
|
update:
|
||||||
|
needs: check
|
||||||
|
environment: Update dependencies
|
||||||
name: Update dependencies
|
name: Update dependencies
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
if: contains(github.event.pull_request.labels.*.name, 'Update dependencies')
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
|||||||
38
CHANGELOG.md
38
CHANGELOG.md
@@ -4,6 +4,44 @@
|
|||||||
|
|
||||||
No user facing changes.
|
No user facing changes.
|
||||||
|
|
||||||
|
## 1.0.19 - 18 Oct 2021
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 1.0.18 - 08 Oct 2021
|
||||||
|
|
||||||
|
- Fixed a bug where some builds were no longer being traced correctly. [#766](https://github.com/github/codeql-action/pull/766)
|
||||||
|
|
||||||
|
## 1.0.17 - 07 Oct 2021
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.6.3. [#761](https://github.com/github/codeql-action/pull/761)
|
||||||
|
|
||||||
|
## 1.0.16 - 05 Oct 2021
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 1.0.15 - 22 Sep 2021
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.6.2. [#746](https://github.com/github/codeql-action/pull/746)
|
||||||
|
|
||||||
|
## 1.0.14 - 09 Sep 2021
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.6.1. [#733](https://github.com/github/codeql-action/pull/733)
|
||||||
|
|
||||||
|
## 1.0.13 - 06 Sep 2021
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.6.0. [#712](https://github.com/github/codeql-action/pull/712)
|
||||||
|
- Update baseline lines of code counter for python. All multi-line strings are counted as code. [#714](https://github.com/github/codeql-action/pull/714)
|
||||||
|
- Remove old baseline LoC injection [#715](https://github.com/github/codeql-action/pull/715)
|
||||||
|
|
||||||
|
## 1.0.12 - 16 Aug 2021
|
||||||
|
|
||||||
|
- Update README to include a sample permissions block. [#689](https://github.com/github/codeql-action/pull/689)
|
||||||
|
|
||||||
|
## 1.0.11 - 09 Aug 2021
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.5.9. [#687](https://github.com/github/codeql-action/pull/687)
|
||||||
|
|
||||||
## 1.0.10 - 03 Aug 2021
|
## 1.0.10 - 03 Aug 2021
|
||||||
|
|
||||||
- Fix an issue where a summary of diagnostics information from CodeQL was not output to the logs of the `analyze` step of the Action. [#672](https://github.com/github/codeql-action/pull/672)
|
- Fix an issue where a summary of diagnostics information from CodeQL was not output to the logs of the `analyze` step of the Action. [#672](https://github.com/github/codeql-action/pull/672)
|
||||||
|
|||||||
@@ -58,6 +58,20 @@ Here are a few things you can do that will increase the likelihood of your pull
|
|||||||
- Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests.
|
- Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests.
|
||||||
- Write a [good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
|
- Write a [good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
|
||||||
|
|
||||||
|
## Releasing (write access required)
|
||||||
|
|
||||||
|
1. The first step of releasing a new version of the `codeql-action` is running the "Update release branch" workflow.
|
||||||
|
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `v1` release branch.
|
||||||
|
|
||||||
|
A release is automatically started every Monday via a scheduled run of this workflow, however you can start a release manually by triggering a run via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
|
||||||
|
1. The workflow run will open a pull request titled "Merge main into v1". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
|
||||||
|
1. Review the checklist items in the pull request description.
|
||||||
|
Once you've checked off all but the last of these, approve the PR and automerge it.
|
||||||
|
1. When the "Merge main into v1" pull request is merged into the `v1` branch, the "Tag release and merge back" workflow will create a mergeback PR.
|
||||||
|
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into v1" pull request, and bumps the patch version of the CodeQL Action.
|
||||||
|
|
||||||
|
Approve the mergeback PR and automerge it. Once the mergeback has been merged into main, the release is complete.
|
||||||
|
|
||||||
## Resources
|
## Resources
|
||||||
|
|
||||||
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
||||||
|
|||||||
16
README.md
16
README.md
@@ -42,6 +42,14 @@ jobs:
|
|||||||
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
# required for all workflows
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
# only required for workflows in private repositories
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
@@ -66,8 +74,8 @@ jobs:
|
|||||||
# project uses a compiled language
|
# project uses a compiled language
|
||||||
|
|
||||||
#- run: |
|
#- run: |
|
||||||
# make bootstrap
|
# make bootstrap
|
||||||
# make release
|
# make release
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v1
|
uses: github/codeql-action/analyze@v1
|
||||||
@@ -83,8 +91,8 @@ If you prefer to integrate this within an existing CI workflow, it should end up
|
|||||||
|
|
||||||
# Here is where you build your code
|
# Here is where you build your code
|
||||||
- run: |
|
- run: |
|
||||||
make bootstrap
|
make bootstrap
|
||||||
make release
|
make release
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v1
|
uses: github/codeql-action/analyze@v1
|
||||||
|
|||||||
40
lib/actions-util.js
generated
40
lib/actions-util.js
generated
@@ -60,14 +60,14 @@ function getTemporaryDirectory() {
|
|||||||
const value = process.env["CODEQL_ACTION_TEMP"];
|
const value = process.env["CODEQL_ACTION_TEMP"];
|
||||||
return value !== undefined && value !== ""
|
return value !== undefined && value !== ""
|
||||||
? value
|
? value
|
||||||
: util_1.getRequiredEnvParam("RUNNER_TEMP");
|
: (0, util_1.getRequiredEnvParam)("RUNNER_TEMP");
|
||||||
}
|
}
|
||||||
exports.getTemporaryDirectory = getTemporaryDirectory;
|
exports.getTemporaryDirectory = getTemporaryDirectory;
|
||||||
function getToolCacheDirectory() {
|
function getToolCacheDirectory() {
|
||||||
const value = process.env["CODEQL_ACTION_TOOL_CACHE"];
|
const value = process.env["CODEQL_ACTION_TOOL_CACHE"];
|
||||||
return value !== undefined && value !== ""
|
return value !== undefined && value !== ""
|
||||||
? value
|
? value
|
||||||
: util_1.getRequiredEnvParam("RUNNER_TOOL_CACHE");
|
: (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE");
|
||||||
}
|
}
|
||||||
exports.getToolCacheDirectory = getToolCacheDirectory;
|
exports.getToolCacheDirectory = getToolCacheDirectory;
|
||||||
/**
|
/**
|
||||||
@@ -98,7 +98,7 @@ const getCommitOid = async function (ref = "HEAD") {
|
|||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
|
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
|
||||||
return util_1.getRequiredEnvParam("GITHUB_SHA");
|
return (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
exports.getCommitOid = getCommitOid;
|
exports.getCommitOid = getCommitOid;
|
||||||
@@ -251,14 +251,14 @@ async function validateWorkflow() {
|
|||||||
workflow = await getWorkflow();
|
workflow = await getWorkflow();
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
return `error: getWorkflow() failed: ${e.toString()}`;
|
return `error: getWorkflow() failed: ${String(e)}`;
|
||||||
}
|
}
|
||||||
let workflowErrors;
|
let workflowErrors;
|
||||||
try {
|
try {
|
||||||
workflowErrors = getWorkflowErrors(workflow);
|
workflowErrors = getWorkflowErrors(workflow);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
return `error: getWorkflowErrors() failed: ${e.toString()}`;
|
return `error: getWorkflowErrors() failed: ${String(e)}`;
|
||||||
}
|
}
|
||||||
if (workflowErrors.length > 0) {
|
if (workflowErrors.length > 0) {
|
||||||
let message;
|
let message;
|
||||||
@@ -266,7 +266,7 @@ async function validateWorkflow() {
|
|||||||
message = formatWorkflowErrors(workflowErrors);
|
message = formatWorkflowErrors(workflowErrors);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
return `error: formatWorkflowErrors() failed: ${e.toString()}`;
|
return `error: formatWorkflowErrors() failed: ${String(e)}`;
|
||||||
}
|
}
|
||||||
core.warning(message);
|
core.warning(message);
|
||||||
}
|
}
|
||||||
@@ -288,7 +288,7 @@ function formatWorkflowCause(errors) {
|
|||||||
exports.formatWorkflowCause = formatWorkflowCause;
|
exports.formatWorkflowCause = formatWorkflowCause;
|
||||||
async function getWorkflow() {
|
async function getWorkflow() {
|
||||||
const relativePath = await getWorkflowPath();
|
const relativePath = await getWorkflowPath();
|
||||||
const absolutePath = path.join(util_1.getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath);
|
const absolutePath = path.join((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), relativePath);
|
||||||
return yaml.load(fs.readFileSync(absolutePath, "utf-8"));
|
return yaml.load(fs.readFileSync(absolutePath, "utf-8"));
|
||||||
}
|
}
|
||||||
exports.getWorkflow = getWorkflow;
|
exports.getWorkflow = getWorkflow;
|
||||||
@@ -296,10 +296,10 @@ exports.getWorkflow = getWorkflow;
|
|||||||
* Get the path of the currently executing workflow.
|
* Get the path of the currently executing workflow.
|
||||||
*/
|
*/
|
||||||
async function getWorkflowPath() {
|
async function getWorkflowPath() {
|
||||||
const repo_nwo = util_1.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
|
||||||
const owner = repo_nwo[0];
|
const owner = repo_nwo[0];
|
||||||
const repo = repo_nwo[1];
|
const repo = repo_nwo[1];
|
||||||
const run_id = Number(util_1.getRequiredEnvParam("GITHUB_RUN_ID"));
|
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
||||||
const apiClient = api.getActionsApiClient();
|
const apiClient = api.getActionsApiClient();
|
||||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
|
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
|
||||||
owner,
|
owner,
|
||||||
@@ -314,7 +314,7 @@ async function getWorkflowPath() {
|
|||||||
* Get the workflow run ID.
|
* Get the workflow run ID.
|
||||||
*/
|
*/
|
||||||
function getWorkflowRunID() {
|
function getWorkflowRunID() {
|
||||||
const workflowRunID = parseInt(util_1.getRequiredEnvParam("GITHUB_RUN_ID"), 10);
|
const workflowRunID = parseInt((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"), 10);
|
||||||
if (Number.isNaN(workflowRunID)) {
|
if (Number.isNaN(workflowRunID)) {
|
||||||
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
||||||
}
|
}
|
||||||
@@ -335,7 +335,7 @@ async function getAnalysisKey() {
|
|||||||
return analysisKey;
|
return analysisKey;
|
||||||
}
|
}
|
||||||
const workflowPath = await getWorkflowPath();
|
const workflowPath = await getWorkflowPath();
|
||||||
const jobName = util_1.getRequiredEnvParam("GITHUB_JOB");
|
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
||||||
analysisKey = `${workflowPath}:${jobName}`;
|
analysisKey = `${workflowPath}:${jobName}`;
|
||||||
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
||||||
return analysisKey;
|
return analysisKey;
|
||||||
@@ -372,8 +372,8 @@ exports.computeAutomationID = computeAutomationID;
|
|||||||
async function getRef() {
|
async function getRef() {
|
||||||
// Will be in the form "refs/heads/master" on a push event
|
// Will be in the form "refs/heads/master" on a push event
|
||||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||||
const ref = util_1.getRequiredEnvParam("GITHUB_REF");
|
const ref = (0, util_1.getRequiredEnvParam)("GITHUB_REF");
|
||||||
const sha = util_1.getRequiredEnvParam("GITHUB_SHA");
|
const sha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||||
// For pull request refs we want to detect whether the workflow
|
// For pull request refs we want to detect whether the workflow
|
||||||
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
||||||
// than the 'merge' ref. If so, we want to convert the ref that
|
// than the 'merge' ref. If so, we want to convert the ref that
|
||||||
@@ -382,14 +382,14 @@ async function getRef() {
|
|||||||
if (!pull_ref_regex.test(ref)) {
|
if (!pull_ref_regex.test(ref)) {
|
||||||
return ref;
|
return ref;
|
||||||
}
|
}
|
||||||
const head = await exports.getCommitOid("HEAD");
|
const head = await (0, exports.getCommitOid)("HEAD");
|
||||||
// in actions/checkout@v2 we can check if git rev-parse HEAD == GITHUB_SHA
|
// in actions/checkout@v2 we can check if git rev-parse HEAD == GITHUB_SHA
|
||||||
// in actions/checkout@v1 this may not be true as it checks out the repository
|
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||||
// using GITHUB_REF. There is a subtle race condition where
|
// using GITHUB_REF. There is a subtle race condition where
|
||||||
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
||||||
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
|
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||||
const hasChangedRef = sha !== head &&
|
const hasChangedRef = sha !== head &&
|
||||||
(await exports.getCommitOid(ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !==
|
(await (0, exports.getCommitOid)(ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !==
|
||||||
head;
|
head;
|
||||||
if (hasChangedRef) {
|
if (hasChangedRef) {
|
||||||
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
||||||
@@ -478,7 +478,7 @@ const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code sc
|
|||||||
async function sendStatusReport(statusReport) {
|
async function sendStatusReport(statusReport) {
|
||||||
const statusReportJSON = JSON.stringify(statusReport);
|
const statusReportJSON = JSON.stringify(statusReport);
|
||||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
core.debug(`Sending status report: ${statusReportJSON}`);
|
||||||
const nwo = util_1.getRequiredEnvParam("GITHUB_REPOSITORY");
|
const nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY");
|
||||||
const [owner, repo] = nwo.split("/");
|
const [owner, repo] = nwo.split("/");
|
||||||
const client = api.getActionsApiClient();
|
const client = api.getActionsApiClient();
|
||||||
try {
|
try {
|
||||||
@@ -491,7 +491,7 @@ async function sendStatusReport(statusReport) {
|
|||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
console.log(e);
|
console.log(e);
|
||||||
if (util_1.isHTTPError(e)) {
|
if ((0, util_1.isHTTPError)(e)) {
|
||||||
switch (e.status) {
|
switch (e.status) {
|
||||||
case 403:
|
case 403:
|
||||||
if (workflowIsTriggeredByPushEvent() && isDependabotActor()) {
|
if (workflowIsTriggeredByPushEvent() && isDependabotActor()) {
|
||||||
@@ -511,7 +511,7 @@ async function sendStatusReport(statusReport) {
|
|||||||
// schema incompatibility when reporting status
|
// schema incompatibility when reporting status
|
||||||
// this means that this action version is no longer compatible with the API
|
// this means that this action version is no longer compatible with the API
|
||||||
// we still want to continue as it is likely the analysis endpoint will work
|
// we still want to continue as it is likely the analysis endpoint will work
|
||||||
if (util_1.getRequiredEnvParam("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
|
if ((0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
|
||||||
core.debug(INCOMPATIBLE_MSG);
|
core.debug(INCOMPATIBLE_MSG);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -545,14 +545,14 @@ exports.isRunningLocalAction = isRunningLocalAction;
|
|||||||
// Get the location where the action is running from.
|
// Get the location where the action is running from.
|
||||||
// This can be used to get the actions name or tell if we're running a local action.
|
// This can be used to get the actions name or tell if we're running a local action.
|
||||||
function getRelativeScriptPath() {
|
function getRelativeScriptPath() {
|
||||||
const runnerTemp = util_1.getRequiredEnvParam("RUNNER_TEMP");
|
const runnerTemp = (0, util_1.getRequiredEnvParam)("RUNNER_TEMP");
|
||||||
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
|
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
|
||||||
return path.relative(actionsDirectory, __filename);
|
return path.relative(actionsDirectory, __filename);
|
||||||
}
|
}
|
||||||
exports.getRelativeScriptPath = getRelativeScriptPath;
|
exports.getRelativeScriptPath = getRelativeScriptPath;
|
||||||
// Reads the contents of GITHUB_EVENT_PATH as a JSON object
|
// Reads the contents of GITHUB_EVENT_PATH as a JSON object
|
||||||
function getWorkflowEvent() {
|
function getWorkflowEvent() {
|
||||||
const eventJsonFile = util_1.getRequiredEnvParam("GITHUB_EVENT_PATH");
|
const eventJsonFile = (0, util_1.getRequiredEnvParam)("GITHUB_EVENT_PATH");
|
||||||
try {
|
try {
|
||||||
return JSON.parse(fs.readFileSync(eventJsonFile, "utf-8"));
|
return JSON.parse(fs.readFileSync(eventJsonFile, "utf-8"));
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
94
lib/actions-util.test.js
generated
94
lib/actions-util.test.js
generated
@@ -26,52 +26,52 @@ const fs = __importStar(require("fs"));
|
|||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
const yaml = __importStar(require("js-yaml"));
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const actionsutil = __importStar(require("./actions-util"));
|
const actionsutil = __importStar(require("./actions-util"));
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
function errorCodes(actual, expected) {
|
function errorCodes(actual, expected) {
|
||||||
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
||||||
}
|
}
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
ava_1.default("getRef() throws on the empty string", async (t) => {
|
(0, ava_1.default)("getRef() throws on the empty string", async (t) => {
|
||||||
process.env["GITHUB_REF"] = "";
|
process.env["GITHUB_REF"] = "";
|
||||||
await t.throwsAsync(actionsutil.getRef);
|
await t.throwsAsync(actionsutil.getRef);
|
||||||
});
|
});
|
||||||
ava_1.default("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
||||||
const expectedRef = "refs/pull/1/merge";
|
const expectedRef = "refs/pull/1/merge";
|
||||||
const currentSha = "a".repeat(40);
|
const currentSha = "a".repeat(40);
|
||||||
process.env["GITHUB_REF"] = expectedRef;
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
process.env["GITHUB_SHA"] = currentSha;
|
process.env["GITHUB_SHA"] = currentSha;
|
||||||
const callback = sinon_1.default.stub(actionsutil, "getCommitOid");
|
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||||
callback.withArgs("HEAD").resolves(currentSha);
|
callback.withArgs("HEAD").resolves(currentSha);
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsutil.getRef();
|
||||||
t.deepEqual(actualRef, expectedRef);
|
t.deepEqual(actualRef, expectedRef);
|
||||||
callback.restore();
|
callback.restore();
|
||||||
});
|
});
|
||||||
ava_1.default("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
||||||
const expectedRef = "refs/pull/1/merge";
|
const expectedRef = "refs/pull/1/merge";
|
||||||
process.env["GITHUB_REF"] = expectedRef;
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||||
const sha = "a".repeat(40);
|
const sha = "a".repeat(40);
|
||||||
const callback = sinon_1.default.stub(actionsutil, "getCommitOid");
|
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||||
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
||||||
callback.withArgs("HEAD").resolves(sha);
|
callback.withArgs("HEAD").resolves(sha);
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsutil.getRef();
|
||||||
t.deepEqual(actualRef, expectedRef);
|
t.deepEqual(actualRef, expectedRef);
|
||||||
callback.restore();
|
callback.restore();
|
||||||
});
|
});
|
||||||
ava_1.default("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
(0, ava_1.default)("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
||||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||||
const callback = sinon_1.default.stub(actionsutil, "getCommitOid");
|
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||||
callback.withArgs("refs/pull/1/merge").resolves("a".repeat(40));
|
callback.withArgs("refs/pull/1/merge").resolves("a".repeat(40));
|
||||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsutil.getRef();
|
||||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||||
callback.restore();
|
callback.restore();
|
||||||
});
|
});
|
||||||
ava_1.default("computeAutomationID()", async (t) => {
|
(0, ava_1.default)("computeAutomationID()", async (t) => {
|
||||||
let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
||||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
||||||
// check the environment sorting
|
// check the environment sorting
|
||||||
@@ -87,31 +87,31 @@ ava_1.default("computeAutomationID()", async (t) => {
|
|||||||
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", undefined);
|
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", undefined);
|
||||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on is empty", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on is empty", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook]));
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook]));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.push is valid", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.push is valid", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: ["push", "pull_request"],
|
on: ["push", "pull_request"],
|
||||||
});
|
});
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: ["push", "pull_request", "schedule"],
|
on: ["push", "pull_request", "schedule"],
|
||||||
});
|
});
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.push should not have a path", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.push should not have a path", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"], paths: ["test/*"] },
|
push: { branches: ["main"], paths: ["test/*"] },
|
||||||
@@ -120,25 +120,25 @@ ava_1.default("getWorkflowErrors() when on.push should not have a path", (t) =>
|
|||||||
});
|
});
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified]));
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified]));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.push is a correct object", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.push is a correct object", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
||||||
});
|
});
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
||||||
});
|
});
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
||||||
});
|
});
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -146,7 +146,7 @@ on:
|
|||||||
`));
|
`));
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.push is mismatched", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"] },
|
push: { branches: ["main"] },
|
||||||
@@ -155,7 +155,7 @@ ava_1.default("getWorkflowErrors() when on.push is mismatched", (t) => {
|
|||||||
});
|
});
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main", "feature"] },
|
push: { branches: ["main", "feature"] },
|
||||||
@@ -164,7 +164,7 @@ ava_1.default("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
|||||||
});
|
});
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["main"] },
|
push: { branches: ["main"] },
|
||||||
@@ -173,7 +173,7 @@ ava_1.default("getWorkflowErrors() when on.push is mismatched for pull_request",
|
|||||||
});
|
});
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: 1,
|
push: 1,
|
||||||
@@ -227,7 +227,7 @@ ava_1.default("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
|||||||
},
|
},
|
||||||
}), []));
|
}), []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
on:
|
on:
|
||||||
@@ -237,7 +237,7 @@ on:
|
|||||||
`));
|
`));
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["feature/*"] },
|
push: { branches: ["feature/*"] },
|
||||||
@@ -246,7 +246,7 @@ ava_1.default("getWorkflowErrors() when on.pull_request for wildcard branches",
|
|||||||
});
|
});
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: {
|
on: {
|
||||||
push: { branches: ["feature/moose"] },
|
push: { branches: ["feature/moose"] },
|
||||||
@@ -255,7 +255,7 @@ ava_1.default("getWorkflowErrors() when on.pull_request for mismatched wildcard
|
|||||||
});
|
});
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
||||||
process.env.GITHUB_JOB = "test";
|
process.env.GITHUB_JOB = "test";
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
const errors = actionsutil.getWorkflowErrors({
|
||||||
on: ["push", "pull_request"],
|
on: ["push", "pull_request"],
|
||||||
@@ -263,24 +263,24 @@ ava_1.default("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
|||||||
});
|
});
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
||||||
});
|
});
|
||||||
ava_1.default("formatWorkflowErrors() when there is one error", (t) => {
|
(0, ava_1.default)("formatWorkflowErrors() when there is one error", (t) => {
|
||||||
const message = actionsutil.formatWorkflowErrors([
|
const message = actionsutil.formatWorkflowErrors([
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||||
]);
|
]);
|
||||||
t.true(message.startsWith("1 issue was detected with this workflow:"));
|
t.true(message.startsWith("1 issue was detected with this workflow:"));
|
||||||
});
|
});
|
||||||
ava_1.default("formatWorkflowErrors() when there are multiple errors", (t) => {
|
(0, ava_1.default)("formatWorkflowErrors() when there are multiple errors", (t) => {
|
||||||
const message = actionsutil.formatWorkflowErrors([
|
const message = actionsutil.formatWorkflowErrors([
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||||
actionsutil.WorkflowErrors.PathsSpecified,
|
actionsutil.WorkflowErrors.PathsSpecified,
|
||||||
]);
|
]);
|
||||||
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
||||||
});
|
});
|
||||||
ava_1.default("formatWorkflowCause() with no errors", (t) => {
|
(0, ava_1.default)("formatWorkflowCause() with no errors", (t) => {
|
||||||
const message = actionsutil.formatWorkflowCause([]);
|
const message = actionsutil.formatWorkflowCause([]);
|
||||||
t.deepEqual(message, undefined);
|
t.deepEqual(message, undefined);
|
||||||
});
|
});
|
||||||
ava_1.default("formatWorkflowCause()", (t) => {
|
(0, ava_1.default)("formatWorkflowCause()", (t) => {
|
||||||
const message = actionsutil.formatWorkflowCause([
|
const message = actionsutil.formatWorkflowCause([
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||||
actionsutil.WorkflowErrors.PathsSpecified,
|
actionsutil.WorkflowErrors.PathsSpecified,
|
||||||
@@ -288,7 +288,7 @@ ava_1.default("formatWorkflowCause()", (t) => {
|
|||||||
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
||||||
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
|
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
|
||||||
});
|
});
|
||||||
ava_1.default("patternIsSuperset()", (t) => {
|
(0, ava_1.default)("patternIsSuperset()", (t) => {
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
||||||
t.true(actionsutil.patternIsSuperset("*", "*"));
|
t.true(actionsutil.patternIsSuperset("*", "*"));
|
||||||
t.true(actionsutil.patternIsSuperset("*", "main-*"));
|
t.true(actionsutil.patternIsSuperset("*", "main-*"));
|
||||||
@@ -310,7 +310,7 @@ ava_1.default("patternIsSuperset()", (t) => {
|
|||||||
t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
|
t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
|
||||||
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
|
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when branches contain dots", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when branches contain dots", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -321,7 +321,7 @@ ava_1.default("getWorkflowErrors() when branches contain dots", (t) => {
|
|||||||
`));
|
`));
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
on:
|
on:
|
||||||
@@ -333,7 +333,7 @@ on:
|
|||||||
`));
|
`));
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||||
process.env.GITHUB_JOB = "test";
|
process.env.GITHUB_JOB = "test";
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
@@ -357,7 +357,7 @@ jobs:
|
|||||||
`));
|
`));
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||||
process.env.GITHUB_JOB = "test3";
|
process.env.GITHUB_JOB = "test3";
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
@@ -381,13 +381,13 @@ jobs:
|
|||||||
`));
|
`));
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on is missing", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() when on is missing", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
`));
|
`));
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() with a different on setup", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() with a different on setup", (t) => {
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
on: "workflow_dispatch"
|
on: "workflow_dispatch"
|
||||||
@@ -402,7 +402,7 @@ on:
|
|||||||
workflow_dispatch: {}
|
workflow_dispatch: {}
|
||||||
`)), []));
|
`)), []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
(0, ava_1.default)("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
on:
|
on:
|
||||||
@@ -414,16 +414,16 @@ name: "CodeQL"
|
|||||||
on: ["push"]
|
on: ["push"]
|
||||||
`)), []));
|
`)), []));
|
||||||
});
|
});
|
||||||
ava_1.default("initializeEnvironment", (t) => {
|
(0, ava_1.default)("initializeEnvironment", (t) => {
|
||||||
util_1.initializeEnvironment(util_1.Mode.actions, "1.2.3");
|
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
|
||||||
t.deepEqual(util_1.getMode(), util_1.Mode.actions);
|
t.deepEqual((0, util_1.getMode)(), util_1.Mode.actions);
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
||||||
util_1.initializeEnvironment(util_1.Mode.runner, "4.5.6");
|
(0, util_1.initializeEnvironment)(util_1.Mode.runner, "4.5.6");
|
||||||
t.deepEqual(util_1.getMode(), util_1.Mode.runner);
|
t.deepEqual((0, util_1.getMode)(), util_1.Mode.runner);
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "4.5.6");
|
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "4.5.6");
|
||||||
});
|
});
|
||||||
ava_1.default("isAnalyzingDefaultBranch()", async (t) => {
|
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
|
||||||
await util_1.withTmpDir(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const envFile = path.join(tmpDir, "event.json");
|
const envFile = path.join(tmpDir, "event.json");
|
||||||
fs.writeFileSync(envFile, JSON.stringify({
|
fs.writeFileSync(envFile, JSON.stringify({
|
||||||
repository: {
|
repository: {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
8
lib/analysis-paths.test.js
generated
8
lib/analysis-paths.test.js
generated
@@ -27,8 +27,8 @@ const ava_1 = __importDefault(require("ava"));
|
|||||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
ava_1.default("emptyPaths", async (t) => {
|
(0, ava_1.default)("emptyPaths", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const config = {
|
const config = {
|
||||||
languages: [],
|
languages: [],
|
||||||
@@ -49,7 +49,7 @@ ava_1.default("emptyPaths", async (t) => {
|
|||||||
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
|
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("nonEmptyPaths", async (t) => {
|
(0, ava_1.default)("nonEmptyPaths", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const config = {
|
const config = {
|
||||||
languages: [],
|
languages: [],
|
||||||
@@ -70,7 +70,7 @@ ava_1.default("nonEmptyPaths", async (t) => {
|
|||||||
t.is(process.env["LGTM_INDEX_FILTERS"], "include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**");
|
t.is(process.env["LGTM_INDEX_FILTERS"], "include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("exclude temp dir", async (t) => {
|
(0, ava_1.default)("exclude temp dir", async (t) => {
|
||||||
return await util.withTmpDir(async (toolCacheDir) => {
|
return await util.withTmpDir(async (toolCacheDir) => {
|
||||||
const tempDir = path.join(process.cwd(), "codeql-runner-temp");
|
const tempDir = path.join(process.cwd(), "codeql-runner-temp");
|
||||||
const config = {
|
const config = {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
20
lib/analyze-action.js
generated
20
lib/analyze-action.js
generated
@@ -24,6 +24,7 @@ const path = __importStar(require("path"));
|
|||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const analyze_1 = require("./analyze");
|
const analyze_1 = require("./analyze");
|
||||||
|
const codeql_1 = require("./codeql");
|
||||||
const config_utils_1 = require("./config-utils");
|
const config_utils_1 = require("./config-utils");
|
||||||
const database_upload_1 = require("./database-upload");
|
const database_upload_1 = require("./database-upload");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
@@ -53,23 +54,25 @@ async function run() {
|
|||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
|
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const logger = logging_1.getActionsLogger();
|
const logger = (0, logging_1.getActionsLogger)();
|
||||||
config = await config_utils_1.getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||||
if (config === undefined) {
|
if (config === undefined) {
|
||||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||||
}
|
}
|
||||||
|
await util.enrichEnvironment(util.Mode.actions, await (0, codeql_1.getCodeQL)(config.codeQLCmd));
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth: actionsUtil.getRequiredInput("token"),
|
auth: actionsUtil.getRequiredInput("token"),
|
||||||
url: util.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: util.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
const outputDir = actionsUtil.getRequiredInput("output");
|
const outputDir = actionsUtil.getRequiredInput("output");
|
||||||
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger);
|
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger);
|
||||||
await analyze_1.runFinalize(outputDir, threads, config, logger);
|
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram"));
|
||||||
|
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
|
||||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||||
runStats = await analyze_1.runQueries(outputDir, util.getMemoryFlag(actionsUtil.getOptionalInput("ram")), util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
|
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
|
||||||
}
|
}
|
||||||
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
||||||
await analyze_1.runCleanup(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
|
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
|
||||||
}
|
}
|
||||||
const dbLocations = {};
|
const dbLocations = {};
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
@@ -82,10 +85,11 @@ async function run() {
|
|||||||
else {
|
else {
|
||||||
logger.info("Not uploading results");
|
logger.info("Not uploading results");
|
||||||
}
|
}
|
||||||
const repositoryNwo = repository_1.parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||||
await database_upload_1.uploadDatabases(repositoryNwo, config, apiDetails, logger);
|
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger);
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (origError) {
|
||||||
|
const error = origError instanceof Error ? origError : new Error(String(origError));
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
console.log(error);
|
console.log(error);
|
||||||
if (error instanceof analyze_1.CodeQLAnalysisError) {
|
if (error instanceof analyze_1.CodeQLAnalysisError) {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAMmB;AACnB,iDAAmD;AACnD,uDAAoD;AACpD,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAE3C,6CAA+B;AAE/B,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAUvC,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;IAEb,MAAM,MAAM,GACV,CAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,WAAW,GAAmC,SAAS,CAAC;IAC5D,IAAI,QAAQ,GAAoC,SAAS,CAAC;IAC1D,IAAI,MAAM,GAAuB,SAAS,CAAC;IAC3C,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAE3D,IAAI;QACF,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,GAAG,MAAM,wBAAS,CAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;QACtE,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SACnD,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,OAAO,GAAG,IAAI,CAAC,cAAc,CACjC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EACvC,MAAM,CACP,CAAC;QACF,MAAM,qBAAW,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,KAAK,MAAM,EAAE;YAC3D,QAAQ,GAAG,MAAM,oBAAU,CACzB,SAAS,EACT,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EACvD,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,OAAO,EACP,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,MAAM,EACN,MAAM,CACP,CAAC;SACH;QAED,IAAI,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,KAAK,MAAM,EAAE;YAC5D,MAAM,oBAAU,CACd,MAAM,EACN,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,IAAI,QAAQ,EACzD,MAAM,CACP,CAAC;SACH;QAED,MAAM,WAAW,GAA+B,EAAE,CAAC;QACnD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACvC,WAAW,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;SACtE;QACD,IAAI,CAAC,SAAS,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;QAE5C,IAAI,QAAQ,IAAI,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EAAE;YACjE,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CAC9C,SAAS,EACT,MAAM,CAAC,aAAa,EACpB,UAAU,EACV,MAAM,CACP,CAAC;SACH;aAAM;YACL,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;SACtC;QAED,MAAM,aAAa,GAAG,+BAAkB,CACtC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAC9C,CAAC;QACF,MAAM,iCAAe,CAAC,aAAa,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;KAClE;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAEnB,IAAI,KAAK,YAAY,6BAAmB,EAAE;YACxC,MAAM,KAAK,GAAG,EAAE,GAAG,KAAK,CAAC,mBAAmB,EAAE,CAAC;YAC/C,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;SACjD;aAAM;YACL,MAAM,gBAAgB,CAAC,SAAS,EAAE,SAAS,EAAE,KAAK,CAAC,CAAC;SACrD;QAED,OAAO;KACR;YAAS;QACR,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,MAAM,KAAK,SAAS,EAAE;YAC1C,IAAI,CAAC,IAAI,CAAC,iDAAiD,CAAC,CAAC;YAC7D,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,MAAM,iBAAiB,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;gBACvE,MAAM,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;gBAE1D,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE;oBACnC,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;oBAC7D,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;wBAC3B,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;4BAClB,IAAI,CAAC,UAAU,CACb,uBAAuB,QAAQ,MAAM,KAAK,CAAC,IAAI,EAAE,CAClD,CAAC;4BACF,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAC/C,CAAC;4BACF,IAAI,CAAC,QAAQ,EAAE,CAAC;yBACjB;6BAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;4BAC9B,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;yBAC7C;qBACF;gBACH,CAAC,CAAC;gBACF,YAAY,CAAC,aAAa,CAAC,CAAC;aAC7B;SACF;KACF;IAED,IAAI,QAAQ,IAAI,WAAW,EAAE;QAC3B,MAAM,gBAAgB,CAAC,SAAS,EAAE,EAAE,GAAG,QAAQ,EAAE,GAAG,WAAW,EAAE,CAAC,CAAC;KACpE;SAAM,IAAI,QAAQ,EAAE;QACnB,MAAM,gBAAgB,CAAC,SAAS,EAAE,EAAE,GAAG,QAAQ,EAAE,CAAC,CAAC;KACpD;SAAM;QACL,MAAM,gBAAgB,CAAC,SAAS,EAAE,SAAS,CAAC,CAAC;KAC9C;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAMmB;AACnB,qCAAqC;AACrC,iDAAmD;AACnD,uDAAoD;AACpD,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAE3C,6CAA+B;AAE/B,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAUvC,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;IAEb,MAAM,MAAM,GACV,CAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,WAAW,GAAmC,SAAS,CAAC;IAC5D,IAAI,QAAQ,GAAoC,SAAS,CAAC;IAC1D,IAAI,MAAM,GAAuB,SAAS,CAAC;IAC3C,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAE3D,IAAI;QACF,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;QACtE,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,MAAM,IAAI,CAAC,iBAAiB,CAC1B,IAAI,CAAC,IAAI,CAAC,OAAO,EACjB,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAClC,CAAC;QAEF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SACnD,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,OAAO,GAAG,IAAI,CAAC,cAAc,CACjC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EACvC,MAAM,CACP,CAAC;QACF,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,CAAC;QACvE,MAAM,IAAA,qBAAW,EAAC,SAAS,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;QAC9D,IAAI,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,KAAK,MAAM,EAAE;YAC3D,QAAQ,GAAG,MAAM,IAAA,oBAAU,EACzB,SAAS,EACT,MAAM,EACN,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,OAAO,EACP,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,MAAM,EACN,MAAM,CACP,CAAC;SACH;QAED,IAAI,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,KAAK,MAAM,EAAE;YAC5D,MAAM,IAAA,oBAAU,EACd,MAAM,EACN,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,IAAI,QAAQ,EACzD,MAAM,CACP,CAAC;SACH;QAED,MAAM,WAAW,GAA+B,EAAE,CAAC;QACnD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACvC,WAAW,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;SACtE;QACD,IAAI,CAAC,SAAS,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;QAE5C,IAAI,QAAQ,IAAI,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EAAE;YACjE,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CAC9C,SAAS,EACT,MAAM,CAAC,aAAa,EACpB,UAAU,EACV,MAAM,CACP,CAAC;SACH;aAAM;YACL,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;SACtC;QAED,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAC9C,CAAC;QACF,MAAM,IAAA,iCAAe,EAAC,aAAa,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;KAClE;IAAC,OAAO,SAAS,EAAE;QAClB,MAAM,KAAK,GACT,SAAS,YAAY,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC;QACxE,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAEnB,IAAI,KAAK,YAAY,6BAAmB,EAAE;YACxC,MAAM,KAAK,GAAG,EAAE,GAAG,KAAK,CAAC,mBAAmB,EAAE,CAAC;YAC/C,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;SACjD;aAAM;YACL,MAAM,gBAAgB,CAAC,SAAS,EAAE,SAAS,EAAE,KAAK,CAAC,CAAC;SACrD;QAED,OAAO;KACR;YAAS;QACR,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,MAAM,KAAK,SAAS,EAAE;YAC1C,IAAI,CAAC,IAAI,CAAC,iDAAiD,CAAC,CAAC;YAC7D,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,MAAM,iBAAiB,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;gBACvE,MAAM,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;gBAE1D,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE;oBACnC,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;oBAC7D,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;wBAC3B,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;4BAClB,IAAI,CAAC,UAAU,CACb,uBAAuB,QAAQ,MAAM,KAAK,CAAC,IAAI,EAAE,CAClD,CAAC;4BACF,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAC/C,CAAC;4BACF,IAAI,CAAC,QAAQ,EAAE,CAAC;yBACjB;6BAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;4BAC9B,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;yBAC7C;qBACF;gBACH,CAAC,CAAC;gBACF,YAAY,CAAC,aAAa,CAAC,CAAC;aAC7B;SACF;KACF;IAED,IAAI,QAAQ,IAAI,WAAW,EAAE;QAC3B,MAAM,gBAAgB,CAAC,SAAS,EAAE,EAAE,GAAG,QAAQ,EAAE,GAAG,WAAW,EAAE,CAAC,CAAC;KACpE;SAAM,IAAI,QAAQ,EAAE;QACnB,MAAM,gBAAgB,CAAC,SAAS,EAAE,EAAE,GAAG,QAAQ,EAAE,CAAC,CAAC;KACpD;SAAM;QACL,MAAM,gBAAgB,CAAC,SAAS,EAAE,SAAS,CAAC,CAAC;KAC9C;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
75
lib/analyze.js
generated
75
lib/analyze.js
generated
@@ -67,9 +67,9 @@ async function createdDBForScannedLanguages(config, logger) {
|
|||||||
// Insert the LGTM_INDEX_X env vars at this point so they are set when
|
// Insert the LGTM_INDEX_X env vars at this point so they are set when
|
||||||
// we extract any scanned languages.
|
// we extract any scanned languages.
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
if (languages_1.isScannedLanguage(language) &&
|
if ((0, languages_1.isScannedLanguage)(language) &&
|
||||||
!dbIsFinalized(config, language, logger)) {
|
!dbIsFinalized(config, language, logger)) {
|
||||||
logger.startGroup(`Extracting ${language}`);
|
logger.startGroup(`Extracting ${language}`);
|
||||||
if (language === languages_1.Language.python) {
|
if (language === languages_1.Language.python) {
|
||||||
@@ -91,16 +91,16 @@ function dbIsFinalized(config, language, logger) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function finalizeDatabaseCreation(config, threadsFlag, logger) {
|
async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger) {
|
||||||
await createdDBForScannedLanguages(config, logger);
|
await createdDBForScannedLanguages(config, logger);
|
||||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
if (dbIsFinalized(config, language, logger)) {
|
if (dbIsFinalized(config, language, logger)) {
|
||||||
logger.info(`There is already a finalized database for ${language} at the location where the CodeQL Action places databases, so we did not create one.`);
|
logger.info(`There is already a finalized database for ${language} at the location where the CodeQL Action places databases, so we did not create one.`);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
logger.startGroup(`Finalizing ${language}`);
|
logger.startGroup(`Finalizing ${language}`);
|
||||||
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config, language), threadsFlag);
|
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config, language), threadsFlag, memoryFlag);
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -108,12 +108,19 @@ async function finalizeDatabaseCreation(config, threadsFlag, logger) {
|
|||||||
// Runs queries and creates sarif files in the given folder
|
// Runs queries and creates sarif files in the given folder
|
||||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger) {
|
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger) {
|
||||||
const statusReport = {};
|
const statusReport = {};
|
||||||
// count the number of lines in the background
|
let locPromise = Promise.resolve({});
|
||||||
const locPromise = count_loc_1.countLoc(path.resolve(),
|
const cliCanCountBaseline = await cliCanCountLoC();
|
||||||
// config.paths specifies external directories. the current
|
const debugMode = process.env["INTERNAL_CODEQL_ACTION_DEBUG_LOC"] ||
|
||||||
// directory is included in the analysis by default. Replicate
|
process.env["ACTIONS_RUNNER_DEBUG"] ||
|
||||||
// that here.
|
process.env["ACTIONS_STEP_DEBUG"];
|
||||||
config.paths, config.pathsIgnore, config.languages, logger);
|
if (!cliCanCountBaseline || debugMode) {
|
||||||
|
// count the number of lines in the background
|
||||||
|
locPromise = (0, count_loc_1.countLoc)(path.resolve(),
|
||||||
|
// config.paths specifies external directories. the current
|
||||||
|
// directory is included in the analysis by default. Replicate
|
||||||
|
// that here.
|
||||||
|
config.paths, config.pathsIgnore, config.languages, logger);
|
||||||
|
}
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
const queries = config.queries[language];
|
const queries = config.queries[language];
|
||||||
const packsWithVersion = config.packs[language] || [];
|
const packsWithVersion = config.packs[language] || [];
|
||||||
@@ -130,7 +137,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
logger.info("And should not be used in production yet.");
|
logger.info("And should not be used in production yet.");
|
||||||
logger.info("*************");
|
logger.info("*************");
|
||||||
logger.startGroup(`Downloading custom packs for ${language}`);
|
logger.startGroup(`Downloading custom packs for ${language}`);
|
||||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
const results = await codeql.packDownload(packsWithVersion);
|
const results = await codeql.packDownload(packsWithVersion);
|
||||||
logger.info(`Downloaded packs: ${results.packs
|
logger.info(`Downloaded packs: ${results.packs
|
||||||
.map((r) => `${r.name}@${r.version || "latest"}`)
|
.map((r) => `${r.name}@${r.version || "latest"}`)
|
||||||
@@ -166,16 +173,22 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
const startTimeInterpretResults = new Date().getTime();
|
const startTimeInterpretResults = new Date().getTime();
|
||||||
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
||||||
const analysisSummary = await runInterpretResults(language, querySuitePaths, sarifFile);
|
const analysisSummary = await runInterpretResults(language, querySuitePaths, sarifFile);
|
||||||
await injectLinesOfCode(sarifFile, language, locPromise);
|
if (!cliCanCountBaseline)
|
||||||
|
await injectLinesOfCode(sarifFile, language, locPromise);
|
||||||
statusReport[`interpret_results_${language}_duration_ms`] =
|
statusReport[`interpret_results_${language}_duration_ms`] =
|
||||||
new Date().getTime() - startTimeInterpretResults;
|
new Date().getTime() - startTimeInterpretResults;
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
logger.info(analysisSummary);
|
logger.info(analysisSummary);
|
||||||
printLinesOfCodeSummary(logger, language, await locPromise);
|
if (!cliCanCountBaseline || debugMode)
|
||||||
|
printLinesOfCodeSummary(logger, language, await locPromise);
|
||||||
|
if (cliCanCountBaseline)
|
||||||
|
logger.info(await runPrintLinesOfCode(language));
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.info(e);
|
logger.info(String(e));
|
||||||
logger.info(e.stack);
|
if (e instanceof Error) {
|
||||||
|
logger.info(e.stack);
|
||||||
|
}
|
||||||
statusReport.analyze_failure_language = language;
|
statusReport.analyze_failure_language = language;
|
||||||
throw new CodeQLAnalysisError(statusReport, `Error running analysis for ${language}: ${e}`);
|
throw new CodeQLAnalysisError(statusReport, `Error running analysis for ${language}: ${e}`);
|
||||||
}
|
}
|
||||||
@@ -183,9 +196,17 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
return statusReport;
|
return statusReport;
|
||||||
async function runInterpretResults(language, queries, sarifFile) {
|
async function runInterpretResults(language, queries, sarifFile) {
|
||||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId);
|
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId);
|
||||||
}
|
}
|
||||||
|
async function cliCanCountLoC() {
|
||||||
|
return await util.codeQlVersionAbove(await (0, codeql_1.getCodeQL)(config.codeQLCmd), codeql_1.CODEQL_VERSION_COUNTS_LINES);
|
||||||
|
}
|
||||||
|
async function runPrintLinesOfCode(language) {
|
||||||
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
|
return await codeql.databasePrintBaseline(databasePath);
|
||||||
|
}
|
||||||
async function runQueryGroup(language, type, querySuiteContents, searchPath) {
|
async function runQueryGroup(language, type, querySuiteContents, searchPath) {
|
||||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
// Pass the queries to codeql using a file instead of using the command
|
// Pass the queries to codeql using a file instead of using the command
|
||||||
@@ -193,7 +214,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
const querySuitePath = `${databasePath}-queries-${type}.qls`;
|
const querySuitePath = `${databasePath}-queries-${type}.qls`;
|
||||||
fs.writeFileSync(querySuitePath, querySuiteContents);
|
fs.writeFileSync(querySuitePath, querySuiteContents);
|
||||||
logger.debug(`Query suite file for ${language}-${type}...\n${querySuiteContents}`);
|
logger.debug(`Query suite file for ${language}-${type}...\n${querySuiteContents}`);
|
||||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
await codeql.databaseRunQueries(databasePath, searchPath, querySuitePath, memoryFlag, threadsFlag);
|
await codeql.databaseRunQueries(databasePath, searchPath, querySuitePath, memoryFlag, threadsFlag);
|
||||||
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||||
return querySuitePath;
|
return querySuitePath;
|
||||||
@@ -213,17 +234,17 @@ function packWithVersionToQuerySuiteEntry(pack) {
|
|||||||
}
|
}
|
||||||
return text;
|
return text;
|
||||||
}
|
}
|
||||||
async function runFinalize(outputDir, threadsFlag, config, logger) {
|
async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger) {
|
||||||
// Delete the tracer config env var to avoid tracing ourselves
|
// Delete the tracer config env var to avoid tracing ourselves
|
||||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||||
fs.mkdirSync(outputDir, { recursive: true });
|
fs.mkdirSync(outputDir, { recursive: true });
|
||||||
await finalizeDatabaseCreation(config, threadsFlag, logger);
|
await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger);
|
||||||
}
|
}
|
||||||
exports.runFinalize = runFinalize;
|
exports.runFinalize = runFinalize;
|
||||||
async function runCleanup(config, cleanupLevel, logger) {
|
async function runCleanup(config, cleanupLevel, logger) {
|
||||||
logger.startGroup("Cleaning up databases");
|
logger.startGroup("Cleaning up databases");
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
await codeql.databaseCleanup(databasePath, cleanupLevel);
|
await codeql.databaseCleanup(databasePath, cleanupLevel);
|
||||||
}
|
}
|
||||||
@@ -233,24 +254,14 @@ exports.runCleanup = runCleanup;
|
|||||||
async function injectLinesOfCode(sarifFile, language, locPromise) {
|
async function injectLinesOfCode(sarifFile, language, locPromise) {
|
||||||
var _a;
|
var _a;
|
||||||
const lineCounts = await locPromise;
|
const lineCounts = await locPromise;
|
||||||
const idPrefix = count_loc_1.getIdPrefix(language);
|
|
||||||
if (language in lineCounts) {
|
if (language in lineCounts) {
|
||||||
const sarif = JSON.parse(fs.readFileSync(sarifFile, "utf8"));
|
const sarif = JSON.parse(fs.readFileSync(sarifFile, "utf8"));
|
||||||
if (Array.isArray(sarif.runs)) {
|
if (Array.isArray(sarif.runs)) {
|
||||||
for (const run of sarif.runs) {
|
for (const run of sarif.runs) {
|
||||||
// Old style: Baseline is inserted when rule ID has suffix /summary/lines-of-code
|
|
||||||
const ruleId = `${idPrefix}/summary/lines-of-code`;
|
|
||||||
run.properties = run.properties || {};
|
run.properties = run.properties || {};
|
||||||
run.properties.metricResults = run.properties.metricResults || [];
|
run.properties.metricResults = run.properties.metricResults || [];
|
||||||
const rule = run.properties.metricResults.find(
|
|
||||||
// the rule id can be in either of two places
|
|
||||||
(r) => { var _a; return r.ruleId === ruleId || ((_a = r.rule) === null || _a === void 0 ? void 0 : _a.id) === ruleId; });
|
|
||||||
// only add the baseline value if the rule already exists
|
|
||||||
if (rule) {
|
|
||||||
rule.baseline = lineCounts[language];
|
|
||||||
}
|
|
||||||
// New style: Baseline is inserted when matching rule has tag lines-of-code
|
|
||||||
for (const metric of run.properties.metricResults) {
|
for (const metric of run.properties.metricResults) {
|
||||||
|
// Baseline is inserted when matching rule has tag lines-of-code
|
||||||
if (metric.rule && metric.rule.toolComponent) {
|
if (metric.rule && metric.rule.toolComponent) {
|
||||||
const matchingRule = run.tool.extensions[metric.rule.toolComponent.index].rules[metric.rule.index];
|
const matchingRule = run.tool.extensions[metric.rule.toolComponent.index].rules[metric.rule.index];
|
||||||
if ((_a = matchingRule.properties.tags) === null || _a === void 0 ? void 0 : _a.includes("lines-of-code")) {
|
if ((_a = matchingRule.properties.tags) === null || _a === void 0 ? void 0 : _a.includes("lines-of-code")) {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
70
lib/analyze.test.js
generated
70
lib/analyze.test.js
generated
@@ -27,29 +27,28 @@ const path = __importStar(require("path"));
|
|||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
const yaml = __importStar(require("js-yaml"));
|
||||||
const semver_1 = require("semver");
|
const semver_1 = require("semver");
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const analyze_1 = require("./analyze");
|
const analyze_1 = require("./analyze");
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const count_loc_1 = require("./count-loc");
|
|
||||||
const count = __importStar(require("./count-loc"));
|
const count = __importStar(require("./count-loc"));
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
// Checks that the duration fields are populated for the correct language
|
// Checks that the duration fields are populated for the correct language
|
||||||
// and correct case of builtin or custom. Also checks the correct search
|
// and correct case of builtin or custom. Also checks the correct search
|
||||||
// paths are set in the database analyze invocation.
|
// paths are set in the database analyze invocation.
|
||||||
ava_1.default("status report fields and search path setting", async (t) => {
|
(0, ava_1.default)("status report fields and search path setting", async (t) => {
|
||||||
const mockLinesOfCode = Object.values(languages_1.Language).reduce((obj, lang, i) => {
|
const mockLinesOfCode = Object.values(languages_1.Language).reduce((obj, lang, i) => {
|
||||||
// use a different line count for each language
|
// use a different line count for each language
|
||||||
obj[lang] = i + 1;
|
obj[lang] = i + 1;
|
||||||
return obj;
|
return obj;
|
||||||
}, {});
|
}, {});
|
||||||
sinon_1.default.stub(count, "countLoc").resolves(mockLinesOfCode);
|
sinon.stub(count, "countLoc").resolves(mockLinesOfCode);
|
||||||
let searchPathsUsed = [];
|
let searchPathsUsed = [];
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
const memoryFlag = "";
|
const memoryFlag = "";
|
||||||
const addSnippetsFlag = "";
|
const addSnippetsFlag = "";
|
||||||
const threadsFlag = "";
|
const threadsFlag = "";
|
||||||
@@ -57,18 +56,18 @@ ava_1.default("status report fields and search path setting", async (t) => {
|
|||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: [
|
||||||
{
|
{
|
||||||
packName: "a/b",
|
packName: "a/b",
|
||||||
version: semver_1.clean("1.0.0"),
|
version: (0, semver_1.clean)("1.0.0"),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
[languages_1.Language.java]: [
|
[languages_1.Language.java]: [
|
||||||
{
|
{
|
||||||
packName: "c/d",
|
packName: "c/d",
|
||||||
version: semver_1.clean("2.0.0"),
|
version: (0, semver_1.clean)("2.0.0"),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
for (const language of Object.values(languages_1.Language)) {
|
for (const language of Object.values(languages_1.Language)) {
|
||||||
codeql_1.setCodeQL({
|
(0, codeql_1.setCodeQL)({
|
||||||
packDownload: async () => ({ packs: [] }),
|
packDownload: async () => ({ packs: [] }),
|
||||||
databaseRunQueries: async (_db, searchPath) => {
|
databaseRunQueries: async (_db, searchPath) => {
|
||||||
searchPathsUsed.push(searchPath);
|
searchPathsUsed.push(searchPath);
|
||||||
@@ -76,31 +75,7 @@ ava_1.default("status report fields and search path setting", async (t) => {
|
|||||||
databaseInterpretResults: async (_db, _queriesRun, sarifFile) => {
|
databaseInterpretResults: async (_db, _queriesRun, sarifFile) => {
|
||||||
fs.writeFileSync(sarifFile, JSON.stringify({
|
fs.writeFileSync(sarifFile, JSON.stringify({
|
||||||
runs: [
|
runs: [
|
||||||
// variant 1 uses ruleId
|
// references a rule with the lines-of-code tag, so baseline should be injected
|
||||||
{
|
|
||||||
properties: {
|
|
||||||
metricResults: [
|
|
||||||
{
|
|
||||||
ruleId: `${count_loc_1.getIdPrefix(language)}/summary/lines-of-code`,
|
|
||||||
value: 123,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
// variant 2 uses rule.id
|
|
||||||
{
|
|
||||||
properties: {
|
|
||||||
metricResults: [
|
|
||||||
{
|
|
||||||
rule: {
|
|
||||||
id: `${count_loc_1.getIdPrefix(language)}/summary/lines-of-code`,
|
|
||||||
},
|
|
||||||
value: 123,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
// variant 3 references a rule with the lines-of-code tag
|
|
||||||
{
|
{
|
||||||
tool: {
|
tool: {
|
||||||
extensions: [
|
extensions: [
|
||||||
@@ -158,7 +133,7 @@ ava_1.default("status report fields and search path setting", async (t) => {
|
|||||||
builtin: ["foo.ql"],
|
builtin: ["foo.ql"],
|
||||||
custom: [],
|
custom: [],
|
||||||
};
|
};
|
||||||
const builtinStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, logging_1.getRunnerLogger(true));
|
const builtinStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true));
|
||||||
const hasPacks = language in packs;
|
const hasPacks = language in packs;
|
||||||
const statusReportKeys = Object.keys(builtinStatusReport).sort();
|
const statusReportKeys = Object.keys(builtinStatusReport).sort();
|
||||||
if (hasPacks) {
|
if (hasPacks) {
|
||||||
@@ -184,7 +159,7 @@ ava_1.default("status report fields and search path setting", async (t) => {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
const customStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, logging_1.getRunnerLogger(true));
|
const customStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true));
|
||||||
t.deepEqual(Object.keys(customStatusReport).length, 2);
|
t.deepEqual(Object.keys(customStatusReport).length, 2);
|
||||||
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
|
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
|
||||||
const expectedSearchPathsUsed = hasPacks
|
const expectedSearchPathsUsed = hasPacks
|
||||||
@@ -199,29 +174,12 @@ ava_1.default("status report fields and search path setting", async (t) => {
|
|||||||
function verifyLineCounts(tmpDir) {
|
function verifyLineCounts(tmpDir) {
|
||||||
// eslint-disable-next-line github/array-foreach
|
// eslint-disable-next-line github/array-foreach
|
||||||
Object.keys(languages_1.Language).forEach((lang, i) => {
|
Object.keys(languages_1.Language).forEach((lang, i) => {
|
||||||
verifyLineCountForFile(lang, path.join(tmpDir, `${lang}.sarif`), i + 1);
|
verifyLineCountForFile(path.join(tmpDir, `${lang}.sarif`), i + 1);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function verifyLineCountForFile(lang, filePath, lineCount) {
|
function verifyLineCountForFile(filePath, lineCount) {
|
||||||
const idPrefix = count_loc_1.getIdPrefix(lang);
|
|
||||||
const sarif = JSON.parse(fs.readFileSync(filePath, "utf8"));
|
const sarif = JSON.parse(fs.readFileSync(filePath, "utf8"));
|
||||||
t.deepEqual(sarif.runs[0].properties.metricResults, [
|
t.deepEqual(sarif.runs[0].properties.metricResults, [
|
||||||
{
|
|
||||||
ruleId: `${idPrefix}/summary/lines-of-code`,
|
|
||||||
value: 123,
|
|
||||||
baseline: lineCount,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
t.deepEqual(sarif.runs[1].properties.metricResults, [
|
|
||||||
{
|
|
||||||
rule: {
|
|
||||||
id: `${idPrefix}/summary/lines-of-code`,
|
|
||||||
},
|
|
||||||
value: 123,
|
|
||||||
baseline: lineCount,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
t.deepEqual(sarif.runs[2].properties.metricResults, [
|
|
||||||
{
|
{
|
||||||
rule: {
|
rule: {
|
||||||
index: 0,
|
index: 0,
|
||||||
@@ -234,7 +192,7 @@ ava_1.default("status report fields and search path setting", async (t) => {
|
|||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
// when the rule doesn't exist, it should not be added
|
// when the rule doesn't exist, it should not be added
|
||||||
t.deepEqual(sarif.runs[3].properties.metricResults, []);
|
t.deepEqual(sarif.runs[1].properties.metricResults, []);
|
||||||
}
|
}
|
||||||
function verifyQuerySuites(tmpDir) {
|
function verifyQuerySuites(tmpDir) {
|
||||||
const qlsContent = [
|
const qlsContent = [
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
10
lib/api-client.js
generated
10
lib/api-client.js
generated
@@ -41,8 +41,8 @@ const getApiClient = function (apiDetails, { allowExternal = false } = {}) {
|
|||||||
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
||||||
return new retryingOctokit(githubUtils.getOctokitOptions(auth, {
|
return new retryingOctokit(githubUtils.getOctokitOptions(auth, {
|
||||||
baseUrl: getApiUrl(apiDetails.url),
|
baseUrl: getApiUrl(apiDetails.url),
|
||||||
userAgent: `CodeQL-${util_1.getMode()}/${pkg.version}`,
|
userAgent: `CodeQL-${(0, util_1.getMode)()}/${pkg.version}`,
|
||||||
log: console_log_level_1.default({ level: "debug" }),
|
log: (0, console_log_level_1.default)({ level: "debug" }),
|
||||||
}));
|
}));
|
||||||
};
|
};
|
||||||
exports.getApiClient = getApiClient;
|
exports.getApiClient = getApiClient;
|
||||||
@@ -62,10 +62,10 @@ function getApiUrl(githubUrl) {
|
|||||||
// and called only from the action entrypoints.
|
// and called only from the action entrypoints.
|
||||||
function getActionsApiClient() {
|
function getActionsApiClient() {
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth: actions_util_1.getRequiredInput("token"),
|
auth: (0, actions_util_1.getRequiredInput)("token"),
|
||||||
url: util_1.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
return exports.getApiClient(apiDetails);
|
return (0, exports.getApiClient)(apiDetails);
|
||||||
}
|
}
|
||||||
exports.getActionsApiClient = getActionsApiClient;
|
exports.getActionsApiClient = getActionsApiClient;
|
||||||
//# sourceMappingURL=api-client.js.map
|
//# sourceMappingURL=api-client.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,iCAAsD;AAEtD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeM,MAAM,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,UAAU,cAAO,EAAE,IAAI,GAAG,CAAC,OAAO,EAAE;QAC/C,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAdW,QAAA,YAAY,gBAcvB;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB;IACjC,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,+BAAgB,CAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,0BAAmB,CAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,oBAAY,CAAC,UAAU,CAAC,CAAC;AAClC,CAAC;AAPD,kDAOC"}
|
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,iCAAsD;AAEtD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeM,MAAM,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,UAAU,IAAA,cAAO,GAAE,IAAI,GAAG,CAAC,OAAO,EAAE;QAC/C,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAdW,QAAA,YAAY,gBAcvB;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB;IACjC,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,IAAA,oBAAY,EAAC,UAAU,CAAC,CAAC;AAClC,CAAC;AAPD,kDAOC"}
|
||||||
20
lib/api-client.test.js
generated
20
lib/api-client.test.js
generated
@@ -24,22 +24,22 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const api_client_1 = require("./api-client");
|
const api_client_1 = require("./api-client");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
// eslint-disable-next-line import/no-commonjs
|
// eslint-disable-next-line import/no-commonjs
|
||||||
const pkg = require("../package.json");
|
const pkg = require("../package.json");
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
let pluginStub;
|
let pluginStub;
|
||||||
let githubStub;
|
let githubStub;
|
||||||
ava_1.default.beforeEach(() => {
|
ava_1.default.beforeEach(() => {
|
||||||
pluginStub = sinon_1.default.stub(githubUtils.GitHub, "plugin");
|
pluginStub = sinon.stub(githubUtils.GitHub, "plugin");
|
||||||
githubStub = sinon_1.default.stub();
|
githubStub = sinon.stub();
|
||||||
pluginStub.returns(githubStub);
|
pluginStub.returns(githubStub);
|
||||||
util_1.initializeEnvironment(util_1.Mode.actions, pkg.version);
|
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
|
||||||
});
|
});
|
||||||
ava_1.default("Get the client API", async (t) => {
|
(0, ava_1.default)("Get the client API", async (t) => {
|
||||||
doTest(t, {
|
doTest(t, {
|
||||||
auth: "xyz",
|
auth: "xyz",
|
||||||
externalRepoAuth: "abc",
|
externalRepoAuth: "abc",
|
||||||
@@ -50,7 +50,7 @@ ava_1.default("Get the client API", async (t) => {
|
|||||||
userAgent: `CodeQL-Action/${pkg.version}`,
|
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Get the client API external", async (t) => {
|
(0, ava_1.default)("Get the client API external", async (t) => {
|
||||||
doTest(t, {
|
doTest(t, {
|
||||||
auth: "xyz",
|
auth: "xyz",
|
||||||
externalRepoAuth: "abc",
|
externalRepoAuth: "abc",
|
||||||
@@ -61,7 +61,7 @@ ava_1.default("Get the client API external", async (t) => {
|
|||||||
userAgent: `CodeQL-Action/${pkg.version}`,
|
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Get the client API external not present", async (t) => {
|
(0, ava_1.default)("Get the client API external not present", async (t) => {
|
||||||
doTest(t, {
|
doTest(t, {
|
||||||
auth: "xyz",
|
auth: "xyz",
|
||||||
url: "http://hucairz",
|
url: "http://hucairz",
|
||||||
@@ -71,7 +71,7 @@ ava_1.default("Get the client API external not present", async (t) => {
|
|||||||
userAgent: `CodeQL-Action/${pkg.version}`,
|
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Get the client API with github url", async (t) => {
|
(0, ava_1.default)("Get the client API with github url", async (t) => {
|
||||||
doTest(t, {
|
doTest(t, {
|
||||||
auth: "xyz",
|
auth: "xyz",
|
||||||
url: "https://github.com/some/invalid/url",
|
url: "https://github.com/some/invalid/url",
|
||||||
@@ -82,7 +82,7 @@ ava_1.default("Get the client API with github url", async (t) => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
function doTest(t, clientArgs, clientOptions, expected) {
|
function doTest(t, clientArgs, clientOptions, expected) {
|
||||||
api_client_1.getApiClient(clientArgs, clientOptions);
|
(0, api_client_1.getApiClient)(clientArgs, clientOptions);
|
||||||
const firstCallArgs = githubStub.args[0];
|
const firstCallArgs = githubStub.args[0];
|
||||||
// log is a function, so we don't need to test for equality of it
|
// log is a function, so we don't need to test for equality of it
|
||||||
delete firstCallArgs[0].log;
|
delete firstCallArgs[0].log;
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,uEAAyD;AACzD,8CAA6C;AAC7C,kDAA0B;AAE1B,6CAA4C;AAC5C,mDAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,IAAI,UAA2B,CAAC;AAChC,IAAI,UAA2B,CAAC;AAEhC,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,UAAU,GAAG,eAAK,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACtD,UAAU,GAAG,eAAK,CAAC,IAAI,EAAE,CAAC;IAC1B,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IAC/B,4BAAqB,CAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;AACnD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,oBAAoB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrC,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,6BAA6B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9C,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,oCAAoC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrD,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,qCAAqC;KAC3C,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,wBAAwB;QACjC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,MAAM,CACb,CAA4B,EAC5B,UAAe,EACf,aAAkB,EAClB,QAAa;IAEb,yBAAY,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAExC,MAAM,aAAa,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IACzC,iEAAiE;IACjE,OAAO,aAAa,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;IAC5B,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzC,CAAC"}
|
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,uEAAyD;AACzD,8CAA6C;AAC7C,6CAA+B;AAE/B,6CAA4C;AAC5C,mDAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAI,UAA2B,CAAC;AAChC,IAAI,UAA2B,CAAC;AAEhC,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACtD,UAAU,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC;IAC1B,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IAC/B,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;AACnD,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oBAAoB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrC,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,6BAA6B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9C,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oCAAoC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrD,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,qCAAqC;KAC3C,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,wBAAwB;QACjC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,MAAM,CACb,CAA4B,EAC5B,UAAe,EACf,aAAkB,EAClB,QAAa;IAEb,IAAA,yBAAY,EAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAExC,MAAM,aAAa,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IACzC,iEAAiE;IACjE,OAAO,aAAa,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;IAC5B,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzC,CAAC"}
|
||||||
@@ -1 +1 @@
|
|||||||
{ "maximumVersion": "3.2", "minimumVersion": "2.22" }
|
{ "maximumVersion": "3.3", "minimumVersion": "3.0" }
|
||||||
|
|||||||
20
lib/autobuild-action.js
generated
20
lib/autobuild-action.js
generated
@@ -28,39 +28,39 @@ const util_1 = require("./util");
|
|||||||
// eslint-disable-next-line import/no-commonjs
|
// eslint-disable-next-line import/no-commonjs
|
||||||
const pkg = require("../package.json");
|
const pkg = require("../package.json");
|
||||||
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
||||||
util_1.initializeEnvironment(util_1.Mode.actions, pkg.version);
|
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
|
||||||
const status = failingLanguage !== undefined || cause !== undefined
|
const status = failingLanguage !== undefined || cause !== undefined
|
||||||
? "failure"
|
? "failure"
|
||||||
: "success";
|
: "success";
|
||||||
const statusReportBase = await actions_util_1.createStatusReportBase("autobuild", status, startedAt, cause === null || cause === void 0 ? void 0 : cause.message, cause === null || cause === void 0 ? void 0 : cause.stack);
|
const statusReportBase = await (0, actions_util_1.createStatusReportBase)("autobuild", status, startedAt, cause === null || cause === void 0 ? void 0 : cause.message, cause === null || cause === void 0 ? void 0 : cause.stack);
|
||||||
const statusReport = {
|
const statusReport = {
|
||||||
...statusReportBase,
|
...statusReportBase,
|
||||||
autobuild_languages: allLanguages.join(","),
|
autobuild_languages: allLanguages.join(","),
|
||||||
autobuild_failure: failingLanguage,
|
autobuild_failure: failingLanguage,
|
||||||
};
|
};
|
||||||
await actions_util_1.sendStatusReport(statusReport);
|
await (0, actions_util_1.sendStatusReport)(statusReport);
|
||||||
}
|
}
|
||||||
async function run() {
|
async function run() {
|
||||||
const logger = logging_1.getActionsLogger();
|
const logger = (0, logging_1.getActionsLogger)();
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
let language = undefined;
|
let language = undefined;
|
||||||
try {
|
try {
|
||||||
if (!(await actions_util_1.sendStatusReport(await actions_util_1.createStatusReportBase("autobuild", "starting", startedAt)))) {
|
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("autobuild", "starting", startedAt)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const config = await config_utils.getConfig(actions_util_1.getTemporaryDirectory(), logger);
|
const config = await config_utils.getConfig((0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||||
if (config === undefined) {
|
if (config === undefined) {
|
||||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||||
}
|
}
|
||||||
language = autobuild_1.determineAutobuildLanguage(config, logger);
|
language = (0, autobuild_1.determineAutobuildLanguage)(config, logger);
|
||||||
if (language !== undefined) {
|
if (language !== undefined) {
|
||||||
await autobuild_1.runAutobuild(language, config, logger);
|
await (0, autobuild_1.runAutobuild)(language, config, logger);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
core.setFailed(`We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error.message}`);
|
core.setFailed(`We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error instanceof Error ? error.message : String(error)}`);
|
||||||
console.log(error);
|
console.log(error);
|
||||||
await sendCompletedStatusReport(startedAt, language ? [language] : [], language, error);
|
await sendCompletedStatusReport(startedAt, language ? [language] : [], language, error instanceof Error ? error : new Error(String(error)));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
await sendCompletedStatusReport(startedAt, language ? [language] : []);
|
await sendCompletedStatusReport(startedAt, language ? [language] : []);
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAKwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,4BAAqB,CAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,qCAAsB,CACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,+BAAgB,CAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,+BAAgB,CACtB,MAAM,qCAAsB,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,oCAAqB,EAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,sCAA0B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,wBAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIAAmI,KAAK,CAAC,OAAO,EAAE,CACnJ,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAKwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,IAAA,sCAA0B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
2
lib/autobuild.js
generated
2
lib/autobuild.js
generated
@@ -25,7 +25,7 @@ function determineAutobuildLanguage(config, logger) {
|
|||||||
exports.determineAutobuildLanguage = determineAutobuildLanguage;
|
exports.determineAutobuildLanguage = determineAutobuildLanguage;
|
||||||
async function runAutobuild(language, config, logger) {
|
async function runAutobuild(language, config, logger) {
|
||||||
logger.startGroup(`Attempting to automatically build ${language} code`);
|
logger.startGroup(`Attempting to automatically build ${language} code`);
|
||||||
const codeQL = codeql_1.getCodeQL(config.codeQLCmd);
|
const codeQL = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
await codeQL.runAutobuild(language);
|
await codeQL.runAutobuild(language);
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;AAAA,qCAAqC;AAErC,2CAAyD;AAGzD,SAAgB,0BAA0B,CACxC,MAA2B,EAC3B,MAAc;IAEd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IACrE,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;IAEvC,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;IAE/D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QACjC,MAAM,CAAC,OAAO,CACZ,oCAAoC,QAAQ,8BAA8B,kBAAkB;aACzF,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CAAC,OAAO,CAAC,uDAAuD,CACxE,CAAC;KACH;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AA7BD,gEA6BC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}
|
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;AAAA,qCAAqC;AAErC,2CAAyD;AAGzD,SAAgB,0BAA0B,CACxC,MAA2B,EAC3B,MAAc;IAEd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IACrE,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;IAEvC,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;IAE/D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QACjC,MAAM,CAAC,OAAO,CACZ,oCAAoC,QAAQ,8BAA8B,kBAAkB;aACzF,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CAAC,OAAO,CAAC,uDAAuD,CACxE,CAAC;KACH;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AA7BD,gEA6BC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}
|
||||||
126
lib/codeql.js
generated
126
lib/codeql.js
generated
@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.getExtraOptions = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CommandInvocationError = void 0;
|
exports.getExtraOptions = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
@@ -33,6 +33,7 @@ const actions_util_1 = require("./actions-util");
|
|||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
||||||
const error_matcher_1 = require("./error-matcher");
|
const error_matcher_1 = require("./error-matcher");
|
||||||
|
const languages_1 = require("./languages");
|
||||||
const toolcache = __importStar(require("./toolcache"));
|
const toolcache = __importStar(require("./toolcache"));
|
||||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
@@ -51,6 +52,42 @@ exports.CommandInvocationError = CommandInvocationError;
|
|||||||
let cachedCodeQL = undefined;
|
let cachedCodeQL = undefined;
|
||||||
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
|
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
|
||||||
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||||
|
/**
|
||||||
|
* The oldest version of CodeQL that the Action will run with. This should be
|
||||||
|
* at least three minor versions behind the current version. The version flags
|
||||||
|
* below can be used to conditionally enable certain features on versions newer
|
||||||
|
* than this. Please record the reason we cannot support an older version.
|
||||||
|
*
|
||||||
|
* Reason: Changes to how the tracing environment is set up.
|
||||||
|
*/
|
||||||
|
const CODEQL_MINIMUM_VERSION = "2.3.1";
|
||||||
|
/**
|
||||||
|
* Versions of CodeQL that version-flag certain functionality in the Action.
|
||||||
|
* For convenience, please keep these in descending order. Once a version
|
||||||
|
* flag is older than the oldest supported version above, it may be removed.
|
||||||
|
*/
|
||||||
|
const CODEQL_VERSION_RAM_FINALIZE = "2.5.8";
|
||||||
|
const CODEQL_VERSION_DIAGNOSTICS = "2.5.6";
|
||||||
|
const CODEQL_VERSION_METRICS = "2.5.5";
|
||||||
|
const CODEQL_VERSION_GROUP_RULES = "2.5.5";
|
||||||
|
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
|
||||||
|
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
|
||||||
|
/**
|
||||||
|
* Version above which we use the CLI's indirect build tracing and
|
||||||
|
* multi-language tracing features.
|
||||||
|
*
|
||||||
|
* There are currently three blockers on the CLI's side to enabling this:
|
||||||
|
* (1) The logs directory should be created for a DB cluster, as some
|
||||||
|
* autobuilders expect it to be present.
|
||||||
|
* (2) The SEMMLE_PRELOAD_libtrace{32,64}? env variables need to be set.
|
||||||
|
* (3) The .environment and .win32env files need to be created next to
|
||||||
|
* the DB spec.
|
||||||
|
*
|
||||||
|
* Once _all_ of these are fixed, we can enable this by setting the
|
||||||
|
* version flag below to the earliest version of the CLI that resolved
|
||||||
|
* the above issues.
|
||||||
|
*/
|
||||||
|
exports.CODEQL_VERSION_NEW_TRACING = "99.99.99";
|
||||||
function getCodeQLBundleName() {
|
function getCodeQLBundleName() {
|
||||||
let platform;
|
let platform;
|
||||||
if (process.platform === "win32") {
|
if (process.platform === "win32") {
|
||||||
@@ -82,14 +119,14 @@ function getActionsCodeQLActionRepository(logger) {
|
|||||||
}
|
}
|
||||||
// The Actions Runner used with GitHub Enterprise Server 2.22 did not set the GITHUB_ACTION_REPOSITORY variable.
|
// The Actions Runner used with GitHub Enterprise Server 2.22 did not set the GITHUB_ACTION_REPOSITORY variable.
|
||||||
// This fallback logic can be removed after the end-of-support for 2.22 on 2021-09-23.
|
// This fallback logic can be removed after the end-of-support for 2.22 on 2021-09-23.
|
||||||
if (actions_util_1.isRunningLocalAction()) {
|
if ((0, actions_util_1.isRunningLocalAction)()) {
|
||||||
// This handles the case where the Action does not come from an Action repository,
|
// This handles the case where the Action does not come from an Action repository,
|
||||||
// e.g. our integration tests which use the Action code from the current checkout.
|
// e.g. our integration tests which use the Action code from the current checkout.
|
||||||
logger.info("The CodeQL Action is checked out locally. Using the default CodeQL Action repository.");
|
logger.info("The CodeQL Action is checked out locally. Using the default CodeQL Action repository.");
|
||||||
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||||
}
|
}
|
||||||
logger.info("GITHUB_ACTION_REPOSITORY environment variable was not set. Falling back to legacy method of finding the GitHub Action.");
|
logger.info("GITHUB_ACTION_REPOSITORY environment variable was not set. Falling back to legacy method of finding the GitHub Action.");
|
||||||
const relativeScriptPathParts = actions_util_1.getRelativeScriptPath().split(path.sep);
|
const relativeScriptPathParts = (0, actions_util_1.getRelativeScriptPath)().split(path.sep);
|
||||||
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
|
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
|
||||||
}
|
}
|
||||||
async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
|
async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
|
||||||
@@ -105,7 +142,7 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
|
|||||||
// We now filter out any duplicates.
|
// We now filter out any duplicates.
|
||||||
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
|
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
|
||||||
const uniqueDownloadSources = potentialDownloadSources.filter((source, index, self) => {
|
const uniqueDownloadSources = potentialDownloadSources.filter((source, index, self) => {
|
||||||
return !self.slice(0, index).some((other) => fast_deep_equal_1.default(source, other));
|
return !self.slice(0, index).some((other) => (0, fast_deep_equal_1.default)(source, other));
|
||||||
});
|
});
|
||||||
const codeQLBundleName = getCodeQLBundleName();
|
const codeQLBundleName = getCodeQLBundleName();
|
||||||
if (variant === util.GitHubVariant.GHAE) {
|
if (variant === util.GitHubVariant.GHAE) {
|
||||||
@@ -159,7 +196,7 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
|
|||||||
}
|
}
|
||||||
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
|
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
|
||||||
}
|
}
|
||||||
async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger) {
|
async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger, checkVersion) {
|
||||||
try {
|
try {
|
||||||
// We use the special value of 'latest' to prioritize the version in the
|
// We use the special value of 'latest' to prioritize the version in the
|
||||||
// defaults over any pinned cached version.
|
// defaults over any pinned cached version.
|
||||||
@@ -227,11 +264,11 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant
|
|||||||
else if (process.platform !== "linux" && process.platform !== "darwin") {
|
else if (process.platform !== "linux" && process.platform !== "darwin") {
|
||||||
throw new Error(`Unsupported platform: ${process.platform}`);
|
throw new Error(`Unsupported platform: ${process.platform}`);
|
||||||
}
|
}
|
||||||
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
|
cachedCodeQL = await getCodeQLForCmd(codeqlCmd, checkVersion);
|
||||||
return { codeql: cachedCodeQL, toolsVersion: codeqlURLVersion };
|
return { codeql: cachedCodeQL, toolsVersion: codeqlURLVersion };
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.error(e);
|
logger.error(e instanceof Error ? e : new Error(String(e)));
|
||||||
throw new Error("Unable to download and extract CodeQL CLI");
|
throw new Error("Unable to download and extract CodeQL CLI");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -259,9 +296,9 @@ exports.convertToSemVer = convertToSemVer;
|
|||||||
/**
|
/**
|
||||||
* Use the CodeQL executable located at the given path.
|
* Use the CodeQL executable located at the given path.
|
||||||
*/
|
*/
|
||||||
function getCodeQL(cmd) {
|
async function getCodeQL(cmd) {
|
||||||
if (cachedCodeQL === undefined) {
|
if (cachedCodeQL === undefined) {
|
||||||
cachedCodeQL = getCodeQLForCmd(cmd);
|
cachedCodeQL = await getCodeQLForCmd(cmd, true);
|
||||||
}
|
}
|
||||||
return cachedCodeQL;
|
return cachedCodeQL;
|
||||||
}
|
}
|
||||||
@@ -287,9 +324,11 @@ function resolveFunction(partialCodeql, methodName, defaultImplementation) {
|
|||||||
function setCodeQL(partialCodeql) {
|
function setCodeQL(partialCodeql) {
|
||||||
cachedCodeQL = {
|
cachedCodeQL = {
|
||||||
getPath: resolveFunction(partialCodeql, "getPath", () => "/tmp/dummy-path"),
|
getPath: resolveFunction(partialCodeql, "getPath", () => "/tmp/dummy-path"),
|
||||||
|
getVersion: resolveFunction(partialCodeql, "getVersion", () => new Promise((resolve) => resolve("1.0.0"))),
|
||||||
printVersion: resolveFunction(partialCodeql, "printVersion"),
|
printVersion: resolveFunction(partialCodeql, "printVersion"),
|
||||||
getTracerEnv: resolveFunction(partialCodeql, "getTracerEnv"),
|
getTracerEnv: resolveFunction(partialCodeql, "getTracerEnv"),
|
||||||
databaseInit: resolveFunction(partialCodeql, "databaseInit"),
|
databaseInit: resolveFunction(partialCodeql, "databaseInit"),
|
||||||
|
databaseInitCluster: resolveFunction(partialCodeql, "databaseInitCluster"),
|
||||||
runAutobuild: resolveFunction(partialCodeql, "runAutobuild"),
|
runAutobuild: resolveFunction(partialCodeql, "runAutobuild"),
|
||||||
extractScannedLanguage: resolveFunction(partialCodeql, "extractScannedLanguage"),
|
extractScannedLanguage: resolveFunction(partialCodeql, "extractScannedLanguage"),
|
||||||
finalizeDatabase: resolveFunction(partialCodeql, "finalizeDatabase"),
|
finalizeDatabase: resolveFunction(partialCodeql, "finalizeDatabase"),
|
||||||
@@ -300,6 +339,7 @@ function setCodeQL(partialCodeql) {
|
|||||||
databaseBundle: resolveFunction(partialCodeql, "databaseBundle"),
|
databaseBundle: resolveFunction(partialCodeql, "databaseBundle"),
|
||||||
databaseRunQueries: resolveFunction(partialCodeql, "databaseRunQueries"),
|
databaseRunQueries: resolveFunction(partialCodeql, "databaseRunQueries"),
|
||||||
databaseInterpretResults: resolveFunction(partialCodeql, "databaseInterpretResults"),
|
databaseInterpretResults: resolveFunction(partialCodeql, "databaseInterpretResults"),
|
||||||
|
databasePrintBaseline: resolveFunction(partialCodeql, "databasePrintBaseline"),
|
||||||
};
|
};
|
||||||
return cachedCodeQL;
|
return cachedCodeQL;
|
||||||
}
|
}
|
||||||
@@ -318,11 +358,17 @@ function getCachedCodeQL() {
|
|||||||
return cachedCodeQL;
|
return cachedCodeQL;
|
||||||
}
|
}
|
||||||
exports.getCachedCodeQL = getCachedCodeQL;
|
exports.getCachedCodeQL = getCachedCodeQL;
|
||||||
function getCodeQLForCmd(cmd) {
|
async function getCodeQLForCmd(cmd, checkVersion) {
|
||||||
return {
|
let cachedVersion = undefined;
|
||||||
|
const codeql = {
|
||||||
getPath() {
|
getPath() {
|
||||||
return cmd;
|
return cmd;
|
||||||
},
|
},
|
||||||
|
async getVersion() {
|
||||||
|
if (cachedVersion === undefined)
|
||||||
|
cachedVersion = runTool(cmd, ["version", "--format=terse"]);
|
||||||
|
return await cachedVersion;
|
||||||
|
},
|
||||||
async printVersion() {
|
async printVersion() {
|
||||||
await runTool(cmd, ["version", "--format=json"]);
|
await runTool(cmd, ["version", "--format=json"]);
|
||||||
},
|
},
|
||||||
@@ -376,6 +422,27 @@ function getCodeQLForCmd(cmd) {
|
|||||||
...getExtraOptionsFromEnv(["database", "init"]),
|
...getExtraOptionsFromEnv(["database", "init"]),
|
||||||
]);
|
]);
|
||||||
},
|
},
|
||||||
|
async databaseInitCluster(databasePath, languages, sourceRoot, processName, processLevel) {
|
||||||
|
const extraArgs = languages.map((language) => `--language=${language}`);
|
||||||
|
if (languages.filter(languages_1.isTracedLanguage).length > 0) {
|
||||||
|
extraArgs.push("--begin-tracing");
|
||||||
|
if (processName !== undefined) {
|
||||||
|
extraArgs.push(`--trace-process-name=${processName}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
extraArgs.push(`--trace-process-level=${processLevel || 3}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await runTool(cmd, [
|
||||||
|
"database",
|
||||||
|
"init",
|
||||||
|
"--db-cluster",
|
||||||
|
databasePath,
|
||||||
|
`--source-root=${sourceRoot}`,
|
||||||
|
...extraArgs,
|
||||||
|
...getExtraOptionsFromEnv(["database", "init"]),
|
||||||
|
]);
|
||||||
|
},
|
||||||
async runAutobuild(language) {
|
async runAutobuild(language) {
|
||||||
const cmdName = process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh";
|
const cmdName = process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh";
|
||||||
const autobuildCmd = path.join(path.dirname(cmd), language, "tools", cmdName);
|
const autobuildCmd = path.join(path.dirname(cmd), language, "tools", cmdName);
|
||||||
@@ -416,7 +483,7 @@ function getCodeQLForCmd(cmd) {
|
|||||||
const ext = process.platform === "win32" ? ".cmd" : ".sh";
|
const ext = process.platform === "win32" ? ".cmd" : ".sh";
|
||||||
const traceCommand = path.resolve(JSON.parse(extractorPath), "tools", `autobuild${ext}`);
|
const traceCommand = path.resolve(JSON.parse(extractorPath), "tools", `autobuild${ext}`);
|
||||||
// Run trace command
|
// Run trace command
|
||||||
await toolrunner_error_catcher_1.toolrunnerErrorCatcher(cmd, [
|
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, [
|
||||||
"database",
|
"database",
|
||||||
"trace-command",
|
"trace-command",
|
||||||
...getExtraOptionsFromEnv(["database", "trace-command"]),
|
...getExtraOptionsFromEnv(["database", "trace-command"]),
|
||||||
@@ -425,15 +492,18 @@ function getCodeQLForCmd(cmd) {
|
|||||||
traceCommand,
|
traceCommand,
|
||||||
], error_matcher_1.errorMatchers);
|
], error_matcher_1.errorMatchers);
|
||||||
},
|
},
|
||||||
async finalizeDatabase(databasePath, threadsFlag) {
|
async finalizeDatabase(databasePath, threadsFlag, memoryFlag) {
|
||||||
await toolrunner_error_catcher_1.toolrunnerErrorCatcher(cmd, [
|
const args = [
|
||||||
"database",
|
"database",
|
||||||
"finalize",
|
"finalize",
|
||||||
"--finalize-dataset",
|
"--finalize-dataset",
|
||||||
threadsFlag,
|
threadsFlag,
|
||||||
...getExtraOptionsFromEnv(["database", "finalize"]),
|
...getExtraOptionsFromEnv(["database", "finalize"]),
|
||||||
databasePath,
|
databasePath,
|
||||||
], error_matcher_1.errorMatchers);
|
];
|
||||||
|
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_RAM_FINALIZE))
|
||||||
|
args.push(memoryFlag);
|
||||||
|
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, args, error_matcher_1.errorMatchers);
|
||||||
},
|
},
|
||||||
async resolveLanguages() {
|
async resolveLanguages() {
|
||||||
const codeqlArgs = ["resolve", "languages", "--format=json"];
|
const codeqlArgs = ["resolve", "languages", "--format=json"];
|
||||||
@@ -487,21 +557,34 @@ function getCodeQLForCmd(cmd) {
|
|||||||
"interpret-results",
|
"interpret-results",
|
||||||
threadsFlag,
|
threadsFlag,
|
||||||
"--format=sarif-latest",
|
"--format=sarif-latest",
|
||||||
"--print-diagnostics-summary",
|
|
||||||
"--print-metrics-summary",
|
|
||||||
"--sarif-group-rules-by-pack",
|
|
||||||
"-v",
|
"-v",
|
||||||
`--output=${sarifFile}`,
|
`--output=${sarifFile}`,
|
||||||
addSnippetsFlag,
|
addSnippetsFlag,
|
||||||
...getExtraOptionsFromEnv(["database", "interpret-results"]),
|
...getExtraOptionsFromEnv(["database", "interpret-results"]),
|
||||||
];
|
];
|
||||||
if (automationDetailsId !== undefined) {
|
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_DIAGNOSTICS))
|
||||||
|
codeqlArgs.push("--print-diagnostics-summary");
|
||||||
|
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_METRICS))
|
||||||
|
codeqlArgs.push("--print-metrics-summary");
|
||||||
|
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_GROUP_RULES))
|
||||||
|
codeqlArgs.push("--sarif-group-rules-by-pack");
|
||||||
|
if (automationDetailsId !== undefined &&
|
||||||
|
(await util.codeQlVersionAbove(this, CODEQL_VERSION_SARIF_GROUP))) {
|
||||||
codeqlArgs.push("--sarif-category", automationDetailsId);
|
codeqlArgs.push("--sarif-category", automationDetailsId);
|
||||||
}
|
}
|
||||||
codeqlArgs.push(databasePath, ...querySuitePaths);
|
codeqlArgs.push(databasePath, ...querySuitePaths);
|
||||||
// capture stdout, which contains analysis summaries
|
// capture stdout, which contains analysis summaries
|
||||||
return await runTool(cmd, codeqlArgs);
|
return await runTool(cmd, codeqlArgs);
|
||||||
},
|
},
|
||||||
|
async databasePrintBaseline(databasePath) {
|
||||||
|
const codeqlArgs = [
|
||||||
|
"database",
|
||||||
|
"print-baseline",
|
||||||
|
...getExtraOptionsFromEnv(["database", "print-baseline"]),
|
||||||
|
databasePath,
|
||||||
|
];
|
||||||
|
return await runTool(cmd, codeqlArgs);
|
||||||
|
},
|
||||||
/**
|
/**
|
||||||
* Download specified packs into the package cache. If the specified
|
* Download specified packs into the package cache. If the specified
|
||||||
* package and version already exists (e.g., from a previous analysis run),
|
* package and version already exists (e.g., from a previous analysis run),
|
||||||
@@ -557,6 +640,11 @@ function getCodeQLForCmd(cmd) {
|
|||||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
if (checkVersion &&
|
||||||
|
!(await util.codeQlVersionAbove(codeql, CODEQL_MINIMUM_VERSION))) {
|
||||||
|
throw new Error(`Expected a CodeQL CLI with version at least ${CODEQL_MINIMUM_VERSION} but got version ${await codeql.getVersion()}`);
|
||||||
|
}
|
||||||
|
return codeql;
|
||||||
}
|
}
|
||||||
function packWithVersionToString(pack) {
|
function packWithVersionToString(pack) {
|
||||||
return pack.version ? `${pack.packName}@${pack.version}` : pack.packName;
|
return pack.version ? `${pack.packName}@${pack.version}` : pack.packName;
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
94
lib/codeql.test.js
generated
94
lib/codeql.test.js
generated
@@ -32,7 +32,7 @@ const logging_1 = require("./logging");
|
|||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
const sampleApiDetails = {
|
const sampleApiDetails = {
|
||||||
auth: "token",
|
auth: "token",
|
||||||
url: "https://github.com",
|
url: "https://github.com",
|
||||||
@@ -42,97 +42,97 @@ const sampleGHAEApiDetails = {
|
|||||||
url: "https://example.githubenterprise.com",
|
url: "https://example.githubenterprise.com",
|
||||||
};
|
};
|
||||||
ava_1.default.beforeEach(() => {
|
ava_1.default.beforeEach(() => {
|
||||||
util_1.initializeEnvironment(util_1.Mode.actions, "1.2.3");
|
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
|
||||||
});
|
});
|
||||||
ava_1.default("download codeql bundle cache", async (t) => {
|
(0, ava_1.default)("download codeql bundle cache", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
const versions = ["20200601", "20200610"];
|
const versions = ["20200601", "20200610"];
|
||||||
for (let i = 0; i < versions.length; i++) {
|
for (let i = 0; i < versions.length; i++) {
|
||||||
const version = versions[i];
|
const version = versions[i];
|
||||||
nock_1.default("https://example.com")
|
(0, nock_1.default)("https://example.com")
|
||||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
||||||
}
|
}
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 2);
|
t.is(cachedVersions.length, 2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("download codeql bundle cache explicitly requested with pinned different version cached", async (t) => {
|
(0, ava_1.default)("download codeql bundle cache explicitly requested with pinned different version cached", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
nock_1.default("https://example.com")
|
(0, nock_1.default)("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
nock_1.default("https://example.com")
|
(0, nock_1.default)("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("don't download codeql bundle cache with pinned different version cached", async (t) => {
|
(0, ava_1.default)("don't download codeql bundle cache with pinned different version cached", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
nock_1.default("https://example.com")
|
(0, nock_1.default)("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 1);
|
t.is(cachedVersions.length, 1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("download codeql bundle cache with different version cached (not pinned)", async (t) => {
|
(0, ava_1.default)("download codeql bundle cache with different version cached (not pinned)", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
nock_1.default("https://example.com")
|
(0, nock_1.default)("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
const platform = process.platform === "win32"
|
const platform = process.platform === "win32"
|
||||||
? "win64"
|
? "win64"
|
||||||
: process.platform === "linux"
|
: process.platform === "linux"
|
||||||
? "linux64"
|
? "linux64"
|
||||||
: "osx64";
|
: "osx64";
|
||||||
nock_1.default("https://github.com")
|
(0, nock_1.default)("https://github.com")
|
||||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 2);
|
t.is(cachedVersions.length, 2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default('download codeql bundle cache with pinned different version cached if "latest" tools specified', async (t) => {
|
(0, ava_1.default)('download codeql bundle cache with pinned different version cached if "latest" tools specified', async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
nock_1.default("https://example.com")
|
(0, nock_1.default)("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
const platform = process.platform === "win32"
|
const platform = process.platform === "win32"
|
||||||
? "win64"
|
? "win64"
|
||||||
: process.platform === "linux"
|
: process.platform === "linux"
|
||||||
? "linux64"
|
? "linux64"
|
||||||
: "osx64";
|
: "osx64";
|
||||||
nock_1.default("https://github.com")
|
(0, nock_1.default)("https://github.com")
|
||||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 2);
|
t.is(cachedVersions.length, 2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("download codeql bundle from github ae endpoint", async (t) => {
|
(0, ava_1.default)("download codeql bundle from github ae endpoint", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
const bundleAssetID = 10;
|
const bundleAssetID = 10;
|
||||||
const platform = process.platform === "win32"
|
const platform = process.platform === "win32"
|
||||||
? "win64"
|
? "win64"
|
||||||
@@ -140,28 +140,28 @@ ava_1.default("download codeql bundle from github ae endpoint", async (t) => {
|
|||||||
? "linux64"
|
? "linux64"
|
||||||
: "osx64";
|
: "osx64";
|
||||||
const codeQLBundleName = `codeql-bundle-${platform}.tar.gz`;
|
const codeQLBundleName = `codeql-bundle-${platform}.tar.gz`;
|
||||||
nock_1.default("https://example.githubenterprise.com")
|
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||||
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/find/${defaults.bundleVersion}`)
|
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/find/${defaults.bundleVersion}`)
|
||||||
.reply(200, {
|
.reply(200, {
|
||||||
assets: { [codeQLBundleName]: bundleAssetID },
|
assets: { [codeQLBundleName]: bundleAssetID },
|
||||||
});
|
});
|
||||||
nock_1.default("https://example.githubenterprise.com")
|
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||||
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/download/${bundleAssetID}`)
|
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/download/${bundleAssetID}`)
|
||||||
.reply(200, {
|
.reply(200, {
|
||||||
url: `https://example.githubenterprise.com/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`,
|
url: `https://example.githubenterprise.com/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`,
|
||||||
});
|
});
|
||||||
nock_1.default("https://example.githubenterprise.com")
|
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
|
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, tmpDir, util.GitHubVariant.GHAE, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, tmpDir, util.GitHubVariant.GHAE, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 1);
|
t.is(cachedVersions.length, 1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("parse codeql bundle url version", (t) => {
|
(0, ava_1.default)("parse codeql bundle url version", (t) => {
|
||||||
t.deepEqual(codeql.getCodeQLURLVersion("https://github.com/.../codeql-bundle-20200601/..."), "20200601");
|
t.deepEqual(codeql.getCodeQLURLVersion("https://github.com/.../codeql-bundle-20200601/..."), "20200601");
|
||||||
});
|
});
|
||||||
ava_1.default("convert to semver", (t) => {
|
(0, ava_1.default)("convert to semver", (t) => {
|
||||||
const tests = {
|
const tests = {
|
||||||
"20200601": "0.0.0-20200601",
|
"20200601": "0.0.0-20200601",
|
||||||
"20200601.0": "0.0.0-20200601.0",
|
"20200601.0": "0.0.0-20200601.0",
|
||||||
@@ -172,23 +172,23 @@ ava_1.default("convert to semver", (t) => {
|
|||||||
};
|
};
|
||||||
for (const [version, expectedVersion] of Object.entries(tests)) {
|
for (const [version, expectedVersion] of Object.entries(tests)) {
|
||||||
try {
|
try {
|
||||||
const parsedVersion = codeql.convertToSemVer(version, logging_1.getRunnerLogger(true));
|
const parsedVersion = codeql.convertToSemVer(version, (0, logging_1.getRunnerLogger)(true));
|
||||||
t.deepEqual(parsedVersion, expectedVersion);
|
t.deepEqual(parsedVersion, expectedVersion);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
t.fail(e.message);
|
t.fail(e instanceof Error ? e.message : String(e));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
ava_1.default("getExtraOptions works for explicit paths", (t) => {
|
(0, ava_1.default)("getExtraOptions works for explicit paths", (t) => {
|
||||||
t.deepEqual(codeql.getExtraOptions({}, ["foo"], []), []);
|
t.deepEqual(codeql.getExtraOptions({}, ["foo"], []), []);
|
||||||
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ["foo"], []), ["42"]);
|
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ["foo"], []), ["42"]);
|
||||||
t.deepEqual(codeql.getExtraOptions({ foo: { bar: [42] } }, ["foo", "bar"], []), ["42"]);
|
t.deepEqual(codeql.getExtraOptions({ foo: { bar: [42] } }, ["foo", "bar"], []), ["42"]);
|
||||||
});
|
});
|
||||||
ava_1.default("getExtraOptions works for wildcards", (t) => {
|
(0, ava_1.default)("getExtraOptions works for wildcards", (t) => {
|
||||||
t.deepEqual(codeql.getExtraOptions({ "*": [42] }, ["foo"], []), ["42"]);
|
t.deepEqual(codeql.getExtraOptions({ "*": [42] }, ["foo"], []), ["42"]);
|
||||||
});
|
});
|
||||||
ava_1.default("getExtraOptions works for wildcards and explicit paths", (t) => {
|
(0, ava_1.default)("getExtraOptions works for wildcards and explicit paths", (t) => {
|
||||||
const o1 = { "*": [42], foo: [87] };
|
const o1 = { "*": [42], foo: [87] };
|
||||||
t.deepEqual(codeql.getExtraOptions(o1, ["foo"], []), ["42", "87"]);
|
t.deepEqual(codeql.getExtraOptions(o1, ["foo"], []), ["42", "87"]);
|
||||||
const o2 = { "*": [42], foo: [87] };
|
const o2 = { "*": [42], foo: [87] };
|
||||||
@@ -197,17 +197,17 @@ ava_1.default("getExtraOptions works for wildcards and explicit paths", (t) => {
|
|||||||
const p = ["foo", "bar"];
|
const p = ["foo", "bar"];
|
||||||
t.deepEqual(codeql.getExtraOptions(o3, p, []), ["42", "87", "99"]);
|
t.deepEqual(codeql.getExtraOptions(o3, p, []), ["42", "87", "99"]);
|
||||||
});
|
});
|
||||||
ava_1.default("getExtraOptions throws for bad content", (t) => {
|
(0, ava_1.default)("getExtraOptions throws for bad content", (t) => {
|
||||||
t.throws(() => codeql.getExtraOptions({ "*": 42 }, ["foo"], []));
|
t.throws(() => codeql.getExtraOptions({ "*": 42 }, ["foo"], []));
|
||||||
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ["foo"], []));
|
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ["foo"], []));
|
||||||
t.throws(() => codeql.getExtraOptions({ "*": [42], foo: { "*": 87, bar: [99] } }, ["foo", "bar"], []));
|
t.throws(() => codeql.getExtraOptions({ "*": [42], foo: { "*": 87, bar: [99] } }, ["foo", "bar"], []));
|
||||||
});
|
});
|
||||||
ava_1.default("getCodeQLActionRepository", (t) => {
|
(0, ava_1.default)("getCodeQLActionRepository", (t) => {
|
||||||
const logger = logging_1.getRunnerLogger(true);
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
util_1.initializeEnvironment(util_1.Mode.runner, "1.2.3");
|
(0, util_1.initializeEnvironment)(util_1.Mode.runner, "1.2.3");
|
||||||
const repoActions = codeql.getCodeQLActionRepository(logger);
|
const repoActions = codeql.getCodeQLActionRepository(logger);
|
||||||
t.deepEqual(repoActions, "github/codeql-action");
|
t.deepEqual(repoActions, "github/codeql-action");
|
||||||
util_1.initializeEnvironment(util_1.Mode.actions, "1.2.3");
|
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
|
||||||
// isRunningLocalAction() === true
|
// isRunningLocalAction() === true
|
||||||
delete process.env["GITHUB_ACTION_REPOSITORY"];
|
delete process.env["GITHUB_ACTION_REPOSITORY"];
|
||||||
process.env["RUNNER_TEMP"] = path.dirname(__dirname);
|
process.env["RUNNER_TEMP"] = path.dirname(__dirname);
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
4
lib/config-utils.js
generated
4
lib/config-utils.js
generated
@@ -351,7 +351,7 @@ async function getLanguagesInRepo(repository, apiDetails, logger) {
|
|||||||
// into an array gives us an array of languages ordered by popularity
|
// into an array gives us an array of languages ordered by popularity
|
||||||
const languages = new Set();
|
const languages = new Set();
|
||||||
for (const lang of Object.keys(response.data)) {
|
for (const lang of Object.keys(response.data)) {
|
||||||
const parsedLang = languages_1.parseLanguage(lang);
|
const parsedLang = (0, languages_1.parseLanguage)(lang);
|
||||||
if (parsedLang !== undefined) {
|
if (parsedLang !== undefined) {
|
||||||
languages.add(parsedLang);
|
languages.add(parsedLang);
|
||||||
}
|
}
|
||||||
@@ -391,7 +391,7 @@ async function getLanguages(codeQL, languagesInput, repository, apiDetails, logg
|
|||||||
const parsedLanguages = [];
|
const parsedLanguages = [];
|
||||||
const unknownLanguages = [];
|
const unknownLanguages = [];
|
||||||
for (const language of languages) {
|
for (const language of languages) {
|
||||||
const parsedLanguage = languages_1.parseLanguage(language);
|
const parsedLanguage = (0, languages_1.parseLanguage)(language);
|
||||||
if (parsedLanguage === undefined) {
|
if (parsedLanguage === undefined) {
|
||||||
unknownLanguages.push(language);
|
unknownLanguages.push(language);
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
196
lib/config-utils.test.js
generated
196
lib/config-utils.test.js
generated
@@ -27,7 +27,7 @@ const path = __importStar(require("path"));
|
|||||||
const github = __importStar(require("@actions/github"));
|
const github = __importStar(require("@actions/github"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const semver_1 = require("semver");
|
const semver_1 = require("semver");
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
@@ -35,7 +35,7 @@ const languages_1 = require("./languages");
|
|||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
const sampleApiDetails = {
|
const sampleApiDetails = {
|
||||||
auth: "token",
|
auth: "token",
|
||||||
externalRepoAuth: "token",
|
externalRepoAuth: "token",
|
||||||
@@ -54,10 +54,10 @@ function mockGetContents(content) {
|
|||||||
const response = {
|
const response = {
|
||||||
data: content,
|
data: content,
|
||||||
};
|
};
|
||||||
const spyGetContents = sinon_1.default
|
const spyGetContents = sinon
|
||||||
.stub(client.repos, "getContent")
|
.stub(client.repos, "getContent")
|
||||||
.resolves(response);
|
.resolves(response);
|
||||||
sinon_1.default.stub(api, "getApiClient").value(() => client);
|
sinon.stub(api, "getApiClient").value(() => client);
|
||||||
return spyGetContents;
|
return spyGetContents;
|
||||||
}
|
}
|
||||||
function mockListLanguages(languages) {
|
function mockListLanguages(languages) {
|
||||||
@@ -69,14 +69,14 @@ function mockListLanguages(languages) {
|
|||||||
for (const language of languages) {
|
for (const language of languages) {
|
||||||
response.data[language] = 123;
|
response.data[language] = 123;
|
||||||
}
|
}
|
||||||
sinon_1.default.stub(client.repos, "listLanguages").resolves(response);
|
sinon.stub(client.repos, "listLanguages").resolves(response);
|
||||||
sinon_1.default.stub(api, "getApiClient").value(() => client);
|
sinon.stub(api, "getApiClient").value(() => client);
|
||||||
}
|
}
|
||||||
ava_1.default("load empty config", async (t) => {
|
(0, ava_1.default)("load empty config", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const logger = logging_1.getRunnerLogger(true);
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
const languages = "javascript,python";
|
const languages = "javascript,python";
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async resolveQueries() {
|
async resolveQueries() {
|
||||||
return {
|
return {
|
||||||
byLanguage: {
|
byLanguage: {
|
||||||
@@ -92,10 +92,10 @@ ava_1.default("load empty config", async (t) => {
|
|||||||
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger));
|
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("loading config saves config", async (t) => {
|
(0, ava_1.default)("loading config saves config", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const logger = logging_1.getRunnerLogger(true);
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async resolveQueries() {
|
async resolveQueries() {
|
||||||
return {
|
return {
|
||||||
byLanguage: {
|
byLanguage: {
|
||||||
@@ -119,10 +119,10 @@ ava_1.default("loading config saves config", async (t) => {
|
|||||||
t.deepEqual(config1, config2);
|
t.deepEqual(config1, config2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("load input outside of workspace", async (t) => {
|
(0, ava_1.default)("load input outside of workspace", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -130,12 +130,12 @@ ava_1.default("load input outside of workspace", async (t) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("load non-local input with invalid repo syntax", async (t) => {
|
(0, ava_1.default)("load non-local input with invalid repo syntax", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
// no filename given, just a repo
|
// no filename given, just a repo
|
||||||
const configFile = "octo-org/codeql-config@main";
|
const configFile = "octo-org/codeql-config@main";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -143,13 +143,13 @@ ava_1.default("load non-local input with invalid repo syntax", async (t) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("load non-existent input", async (t) => {
|
(0, ava_1.default)("load non-existent input", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const configFile = "input";
|
const configFile = "input";
|
||||||
t.false(fs.existsSync(path.join(tmpDir, configFile)));
|
t.false(fs.existsSync(path.join(tmpDir, configFile)));
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -157,9 +157,9 @@ ava_1.default("load non-existent input", async (t) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("load non-empty input", async (t) => {
|
(0, ava_1.default)("load non-empty input", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async resolveQueries() {
|
async resolveQueries() {
|
||||||
return {
|
return {
|
||||||
byLanguage: {
|
byLanguage: {
|
||||||
@@ -217,12 +217,12 @@ ava_1.default("load non-empty input", async (t) => {
|
|||||||
};
|
};
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||||
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
// Should exactly equal the object we constructed earlier
|
// Should exactly equal the object we constructed earlier
|
||||||
t.deepEqual(actualConfig, expectedConfig);
|
t.deepEqual(actualConfig, expectedConfig);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Default queries are used", async (t) => {
|
(0, ava_1.default)("Default queries are used", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
// Check that the default behaviour is to add the default queries.
|
// Check that the default behaviour is to add the default queries.
|
||||||
// In this case if a config file is specified but does not include
|
// In this case if a config file is specified but does not include
|
||||||
@@ -230,7 +230,7 @@ ava_1.default("Default queries are used", async (t) => {
|
|||||||
// We determine this by whether CodeQL.resolveQueries is called
|
// We determine this by whether CodeQL.resolveQueries is called
|
||||||
// with the correct arguments.
|
// with the correct arguments.
|
||||||
const resolveQueriesArgs = [];
|
const resolveQueriesArgs = [];
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async resolveQueries(queries, extraSearchPath) {
|
async resolveQueries(queries, extraSearchPath) {
|
||||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||||
return {
|
return {
|
||||||
@@ -253,7 +253,7 @@ ava_1.default("Default queries are used", async (t) => {
|
|||||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||||
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
// Check resolve queries was called correctly
|
// Check resolve queries was called correctly
|
||||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||||
t.deepEqual(resolveQueriesArgs[0].queries, [
|
t.deepEqual(resolveQueriesArgs[0].queries, [
|
||||||
@@ -280,7 +280,7 @@ function queriesToResolvedQueryForm(queries) {
|
|||||||
multipleDeclaredLanguages: {},
|
multipleDeclaredLanguages: {},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
ava_1.default("Queries can be specified in config file", async (t) => {
|
(0, ava_1.default)("Queries can be specified in config file", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const inputFileContents = `
|
const inputFileContents = `
|
||||||
name: my config
|
name: my config
|
||||||
@@ -289,14 +289,14 @@ ava_1.default("Queries can be specified in config file", async (t) => {
|
|||||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||||
const resolveQueriesArgs = [];
|
const resolveQueriesArgs = [];
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async resolveQueries(queries, extraSearchPath) {
|
async resolveQueries(queries, extraSearchPath) {
|
||||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||||
return queriesToResolvedQueryForm(queries);
|
return queriesToResolvedQueryForm(queries);
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
// Check resolveQueries was called correctly
|
// Check resolveQueries was called correctly
|
||||||
// It'll be called once for the default queries
|
// It'll be called once for the default queries
|
||||||
// and once for `./foo` from the config file.
|
// and once for `./foo` from the config file.
|
||||||
@@ -310,7 +310,7 @@ ava_1.default("Queries can be specified in config file", async (t) => {
|
|||||||
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/foo$/);
|
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/foo$/);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Queries from config file can be overridden in workflow file", async (t) => {
|
(0, ava_1.default)("Queries from config file can be overridden in workflow file", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const inputFileContents = `
|
const inputFileContents = `
|
||||||
name: my config
|
name: my config
|
||||||
@@ -322,14 +322,14 @@ ava_1.default("Queries from config file can be overridden in workflow file", asy
|
|||||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||||
fs.mkdirSync(path.join(tmpDir, "override"));
|
fs.mkdirSync(path.join(tmpDir, "override"));
|
||||||
const resolveQueriesArgs = [];
|
const resolveQueriesArgs = [];
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async resolveQueries(queries, extraSearchPath) {
|
async resolveQueries(queries, extraSearchPath) {
|
||||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||||
return queriesToResolvedQueryForm(queries);
|
return queriesToResolvedQueryForm(queries);
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
// Check resolveQueries was called correctly
|
// Check resolveQueries was called correctly
|
||||||
// It'll be called once for the default queries and once for `./override`,
|
// It'll be called once for the default queries and once for `./override`,
|
||||||
// but won't be called for './foo' from the config file.
|
// but won't be called for './foo' from the config file.
|
||||||
@@ -343,7 +343,7 @@ ava_1.default("Queries from config file can be overridden in workflow file", asy
|
|||||||
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/override$/);
|
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/override$/);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Queries in workflow file can be used in tandem with the 'disable default queries' option", async (t) => {
|
(0, ava_1.default)("Queries in workflow file can be used in tandem with the 'disable default queries' option", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
process.env["RUNNER_TEMP"] = tmpDir;
|
process.env["RUNNER_TEMP"] = tmpDir;
|
||||||
process.env["GITHUB_WORKSPACE"] = tmpDir;
|
process.env["GITHUB_WORKSPACE"] = tmpDir;
|
||||||
@@ -354,14 +354,14 @@ ava_1.default("Queries in workflow file can be used in tandem with the 'disable
|
|||||||
const testQueries = "./workflow-query";
|
const testQueries = "./workflow-query";
|
||||||
fs.mkdirSync(path.join(tmpDir, "workflow-query"));
|
fs.mkdirSync(path.join(tmpDir, "workflow-query"));
|
||||||
const resolveQueriesArgs = [];
|
const resolveQueriesArgs = [];
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async resolveQueries(queries, extraSearchPath) {
|
async resolveQueries(queries, extraSearchPath) {
|
||||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||||
return queriesToResolvedQueryForm(queries);
|
return queriesToResolvedQueryForm(queries);
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
// Check resolveQueries was called correctly
|
// Check resolveQueries was called correctly
|
||||||
// It'll be called once for `./workflow-query`,
|
// It'll be called once for `./workflow-query`,
|
||||||
// but won't be called for the default one since that was disabled
|
// but won't be called for the default one since that was disabled
|
||||||
@@ -374,20 +374,20 @@ ava_1.default("Queries in workflow file can be used in tandem with the 'disable
|
|||||||
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/workflow-query$/);
|
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/workflow-query$/);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Multiple queries can be specified in workflow file, no config file required", async (t) => {
|
(0, ava_1.default)("Multiple queries can be specified in workflow file, no config file required", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
fs.mkdirSync(path.join(tmpDir, "override1"));
|
fs.mkdirSync(path.join(tmpDir, "override1"));
|
||||||
fs.mkdirSync(path.join(tmpDir, "override2"));
|
fs.mkdirSync(path.join(tmpDir, "override2"));
|
||||||
const testQueries = "./override1,./override2";
|
const testQueries = "./override1,./override2";
|
||||||
const resolveQueriesArgs = [];
|
const resolveQueriesArgs = [];
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async resolveQueries(queries, extraSearchPath) {
|
async resolveQueries(queries, extraSearchPath) {
|
||||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||||
return queriesToResolvedQueryForm(queries);
|
return queriesToResolvedQueryForm(queries);
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
// Check resolveQueries was called correctly:
|
// Check resolveQueries was called correctly:
|
||||||
// It'll be called once for the default queries,
|
// It'll be called once for the default queries,
|
||||||
// and then once for each of the two queries from the workflow
|
// and then once for each of the two queries from the workflow
|
||||||
@@ -404,7 +404,7 @@ ava_1.default("Multiple queries can be specified in workflow file, no config fil
|
|||||||
t.regex(config.queries["javascript"].custom[1].queries[0], /.*\/override2$/);
|
t.regex(config.queries["javascript"].custom[1].queries[0], /.*\/override2$/);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Queries in workflow file can be added to the set of queries without overriding config file", async (t) => {
|
(0, ava_1.default)("Queries in workflow file can be added to the set of queries without overriding config file", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
process.env["RUNNER_TEMP"] = tmpDir;
|
process.env["RUNNER_TEMP"] = tmpDir;
|
||||||
process.env["GITHUB_WORKSPACE"] = tmpDir;
|
process.env["GITHUB_WORKSPACE"] = tmpDir;
|
||||||
@@ -419,14 +419,14 @@ ava_1.default("Queries in workflow file can be added to the set of queries witho
|
|||||||
fs.mkdirSync(path.join(tmpDir, "additional1"));
|
fs.mkdirSync(path.join(tmpDir, "additional1"));
|
||||||
fs.mkdirSync(path.join(tmpDir, "additional2"));
|
fs.mkdirSync(path.join(tmpDir, "additional2"));
|
||||||
const resolveQueriesArgs = [];
|
const resolveQueriesArgs = [];
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async resolveQueries(queries, extraSearchPath) {
|
async resolveQueries(queries, extraSearchPath) {
|
||||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||||
return queriesToResolvedQueryForm(queries);
|
return queriesToResolvedQueryForm(queries);
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
// Check resolveQueries was called correctly
|
// Check resolveQueries was called correctly
|
||||||
// It'll be called once for the default queries,
|
// It'll be called once for the default queries,
|
||||||
// once for each of additional1 and additional2,
|
// once for each of additional1 and additional2,
|
||||||
@@ -447,13 +447,13 @@ ava_1.default("Queries in workflow file can be added to the set of queries witho
|
|||||||
t.regex(config.queries["javascript"].custom[2].queries[0], /.*\/foo$/);
|
t.regex(config.queries["javascript"].custom[2].queries[0], /.*\/foo$/);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Invalid queries in workflow file handled correctly", async (t) => {
|
(0, ava_1.default)("Invalid queries in workflow file handled correctly", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const queries = "foo/bar@v1@v3";
|
const queries = "foo/bar@v1@v3";
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
// This function just needs to be type-correct; it doesn't need to do anything,
|
// This function just needs to be type-correct; it doesn't need to do anything,
|
||||||
// since we're deliberately passing in invalid data
|
// since we're deliberately passing in invalid data
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async resolveQueries() {
|
async resolveQueries() {
|
||||||
return {
|
return {
|
||||||
byLanguage: {
|
byLanguage: {
|
||||||
@@ -465,7 +465,7 @@ ava_1.default("Invalid queries in workflow file handled correctly", async (t) =>
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
t.fail("initConfig did not throw error");
|
t.fail("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -473,9 +473,9 @@ ava_1.default("Invalid queries in workflow file handled correctly", async (t) =>
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("API client used when reading remote config", async (t) => {
|
(0, ava_1.default)("API client used when reading remote config", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async resolveQueries() {
|
async resolveQueries() {
|
||||||
return {
|
return {
|
||||||
byLanguage: {
|
byLanguage: {
|
||||||
@@ -508,17 +508,17 @@ ava_1.default("API client used when reading remote config", async (t) => {
|
|||||||
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
|
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
|
||||||
const configFile = "octo-org/codeql-config/config.yaml@main";
|
const configFile = "octo-org/codeql-config/config.yaml@main";
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
t.assert(spyGetContents.called);
|
t.assert(spyGetContents.called);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Remote config handles the case where a directory is provided", async (t) => {
|
(0, ava_1.default)("Remote config handles the case where a directory is provided", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const dummyResponse = []; // directories are returned as arrays
|
const dummyResponse = []; // directories are returned as arrays
|
||||||
mockGetContents(dummyResponse);
|
mockGetContents(dummyResponse);
|
||||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -526,7 +526,7 @@ ava_1.default("Remote config handles the case where a directory is provided", as
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Invalid format of remote config handled correctly", async (t) => {
|
(0, ava_1.default)("Invalid format of remote config handled correctly", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const dummyResponse = {
|
const dummyResponse = {
|
||||||
// note no "content" property here
|
// note no "content" property here
|
||||||
@@ -534,7 +534,7 @@ ava_1.default("Invalid format of remote config handled correctly", async (t) =>
|
|||||||
mockGetContents(dummyResponse);
|
mockGetContents(dummyResponse);
|
||||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -542,16 +542,16 @@ ava_1.default("Invalid format of remote config handled correctly", async (t) =>
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("No detected languages", async (t) => {
|
(0, ava_1.default)("No detected languages", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
mockListLanguages([]);
|
mockListLanguages([]);
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async resolveLanguages() {
|
async resolveLanguages() {
|
||||||
return {};
|
return {};
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -559,11 +559,11 @@ ava_1.default("No detected languages", async (t) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Unknown languages", async (t) => {
|
(0, ava_1.default)("Unknown languages", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const languages = "rubbish,english";
|
const languages = "rubbish,english";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -571,9 +571,9 @@ ava_1.default("Unknown languages", async (t) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Config specifies packages", async (t) => {
|
(0, ava_1.default)("Config specifies packages", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async resolveQueries() {
|
async resolveQueries() {
|
||||||
return {
|
return {
|
||||||
byLanguage: {},
|
byLanguage: {},
|
||||||
@@ -591,20 +591,20 @@ ava_1.default("Config specifies packages", async (t) => {
|
|||||||
const configFile = path.join(tmpDir, "codeql-config.yaml");
|
const configFile = path.join(tmpDir, "codeql-config.yaml");
|
||||||
fs.writeFileSync(configFile, inputFileContents);
|
fs.writeFileSync(configFile, inputFileContents);
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
t.deepEqual(packs, {
|
t.deepEqual(packs, {
|
||||||
[languages_1.Language.javascript]: [
|
[languages_1.Language.javascript]: [
|
||||||
{
|
{
|
||||||
packName: "a/b",
|
packName: "a/b",
|
||||||
version: semver_1.clean("1.2.3"),
|
version: (0, semver_1.clean)("1.2.3"),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Config specifies packages for multiple languages", async (t) => {
|
(0, ava_1.default)("Config specifies packages for multiple languages", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async resolveQueries() {
|
async resolveQueries() {
|
||||||
return {
|
return {
|
||||||
byLanguage: {
|
byLanguage: {
|
||||||
@@ -630,18 +630,18 @@ ava_1.default("Config specifies packages for multiple languages", async (t) => {
|
|||||||
fs.writeFileSync(configFile, inputFileContents);
|
fs.writeFileSync(configFile, inputFileContents);
|
||||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||||
const languages = "javascript,python,cpp";
|
const languages = "javascript,python,cpp";
|
||||||
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
t.deepEqual(packs, {
|
t.deepEqual(packs, {
|
||||||
[languages_1.Language.javascript]: [
|
[languages_1.Language.javascript]: [
|
||||||
{
|
{
|
||||||
packName: "a/b",
|
packName: "a/b",
|
||||||
version: semver_1.clean("1.2.3"),
|
version: (0, semver_1.clean)("1.2.3"),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
[languages_1.Language.python]: [
|
[languages_1.Language.python]: [
|
||||||
{
|
{
|
||||||
packName: "c/d",
|
packName: "c/d",
|
||||||
version: semver_1.clean("1.2.3"),
|
version: (0, semver_1.clean)("1.2.3"),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
@@ -667,9 +667,9 @@ ava_1.default("Config specifies packages for multiple languages", async (t) => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
|
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
|
||||||
ava_1.default(`load invalid input - ${testName}`, async (t) => {
|
(0, ava_1.default)(`load invalid input - ${testName}`, async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async resolveQueries() {
|
async resolveQueries() {
|
||||||
return {
|
return {
|
||||||
byLanguage: {},
|
byLanguage: {},
|
||||||
@@ -683,7 +683,7 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
|
|||||||
const inputFile = path.join(tmpDir, configFile);
|
const inputFile = path.join(tmpDir, configFile);
|
||||||
fs.writeFileSync(inputFile, inputFileContents, "utf8");
|
fs.writeFileSync(inputFile, inputFileContents, "utf8");
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -733,25 +733,25 @@ const validPaths = [
|
|||||||
"/foo",
|
"/foo",
|
||||||
];
|
];
|
||||||
const invalidPaths = ["a/***/b", "a/**b", "a/b**", "**"];
|
const invalidPaths = ["a/***/b", "a/**b", "a/b**", "**"];
|
||||||
ava_1.default("path validations", (t) => {
|
(0, ava_1.default)("path validations", (t) => {
|
||||||
// Dummy values to pass to validateAndSanitisePath
|
// Dummy values to pass to validateAndSanitisePath
|
||||||
const propertyName = "paths";
|
const propertyName = "paths";
|
||||||
const configFile = "./.github/codeql/config.yml";
|
const configFile = "./.github/codeql/config.yml";
|
||||||
for (const validPath of validPaths) {
|
for (const validPath of validPaths) {
|
||||||
t.truthy(configUtils.validateAndSanitisePath(validPath, propertyName, configFile, logging_1.getRunnerLogger(true)));
|
t.truthy(configUtils.validateAndSanitisePath(validPath, propertyName, configFile, (0, logging_1.getRunnerLogger)(true)));
|
||||||
}
|
}
|
||||||
for (const invalidPath of invalidPaths) {
|
for (const invalidPath of invalidPaths) {
|
||||||
t.throws(() => configUtils.validateAndSanitisePath(invalidPath, propertyName, configFile, logging_1.getRunnerLogger(true)));
|
t.throws(() => configUtils.validateAndSanitisePath(invalidPath, propertyName, configFile, (0, logging_1.getRunnerLogger)(true)));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
ava_1.default("path sanitisation", (t) => {
|
(0, ava_1.default)("path sanitisation", (t) => {
|
||||||
// Dummy values to pass to validateAndSanitisePath
|
// Dummy values to pass to validateAndSanitisePath
|
||||||
const propertyName = "paths";
|
const propertyName = "paths";
|
||||||
const configFile = "./.github/codeql/config.yml";
|
const configFile = "./.github/codeql/config.yml";
|
||||||
// Valid paths are not modified
|
// Valid paths are not modified
|
||||||
t.deepEqual(configUtils.validateAndSanitisePath("foo/bar", propertyName, configFile, logging_1.getRunnerLogger(true)), "foo/bar");
|
t.deepEqual(configUtils.validateAndSanitisePath("foo/bar", propertyName, configFile, (0, logging_1.getRunnerLogger)(true)), "foo/bar");
|
||||||
// Trailing stars are stripped
|
// Trailing stars are stripped
|
||||||
t.deepEqual(configUtils.validateAndSanitisePath("foo/**", propertyName, configFile, logging_1.getRunnerLogger(true)), "foo/");
|
t.deepEqual(configUtils.validateAndSanitisePath("foo/**", propertyName, configFile, (0, logging_1.getRunnerLogger)(true)), "foo/");
|
||||||
});
|
});
|
||||||
/**
|
/**
|
||||||
* Test macro for ensuring the packs block is valid
|
* Test macro for ensuring the packs block is valid
|
||||||
@@ -778,40 +778,40 @@ function invalidPackNameMacro(t, name) {
|
|||||||
parsePacksErrorMacro(t, { [languages_1.Language.cpp]: [name] }, [languages_1.Language.cpp], new RegExp(`The configuration file "/a/b" is invalid: property "packs" "${name}" is not a valid pack`));
|
parsePacksErrorMacro(t, { [languages_1.Language.cpp]: [name] }, [languages_1.Language.cpp], new RegExp(`The configuration file "/a/b" is invalid: property "packs" "${name}" is not a valid pack`));
|
||||||
}
|
}
|
||||||
invalidPackNameMacro.title = (_, arg) => `Invalid pack string: ${arg}`;
|
invalidPackNameMacro.title = (_, arg) => `Invalid pack string: ${arg}`;
|
||||||
ava_1.default("no packs", parsePacksMacro, {}, [], {});
|
(0, ava_1.default)("no packs", parsePacksMacro, {}, [], {});
|
||||||
ava_1.default("two packs", parsePacksMacro, ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp], {
|
(0, ava_1.default)("two packs", parsePacksMacro, ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: [
|
||||||
{ packName: "a/b", version: undefined },
|
{ packName: "a/b", version: undefined },
|
||||||
{ packName: "c/d", version: semver_1.clean("1.2.3") },
|
{ packName: "c/d", version: (0, semver_1.clean)("1.2.3") },
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
ava_1.default("two packs with spaces", parsePacksMacro, [" a/b ", " c/d@1.2.3 "], [languages_1.Language.cpp], {
|
(0, ava_1.default)("two packs with spaces", parsePacksMacro, [" a/b ", " c/d@1.2.3 "], [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: [
|
||||||
{ packName: "a/b", version: undefined },
|
{ packName: "a/b", version: undefined },
|
||||||
{ packName: "c/d", version: semver_1.clean("1.2.3") },
|
{ packName: "c/d", version: (0, semver_1.clean)("1.2.3") },
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
ava_1.default("two packs with language", parsePacksMacro, {
|
(0, ava_1.default)("two packs with language", parsePacksMacro, {
|
||||||
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||||
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
|
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
|
||||||
}, [languages_1.Language.cpp, languages_1.Language.java, languages_1.Language.csharp], {
|
}, [languages_1.Language.cpp, languages_1.Language.java, languages_1.Language.csharp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: [
|
||||||
{ packName: "a/b", version: undefined },
|
{ packName: "a/b", version: undefined },
|
||||||
{ packName: "c/d", version: semver_1.clean("1.2.3") },
|
{ packName: "c/d", version: (0, semver_1.clean)("1.2.3") },
|
||||||
],
|
],
|
||||||
[languages_1.Language.java]: [
|
[languages_1.Language.java]: [
|
||||||
{ packName: "d/e", version: undefined },
|
{ packName: "d/e", version: undefined },
|
||||||
{ packName: "f/g", version: semver_1.clean("1.2.3") },
|
{ packName: "f/g", version: (0, semver_1.clean)("1.2.3") },
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
ava_1.default("no language", parsePacksErrorMacro, ["a/b@1.2.3"], [languages_1.Language.java, languages_1.Language.python], /The configuration file "\/a\/b" is invalid: property "packs" must split packages by language/);
|
(0, ava_1.default)("no language", parsePacksErrorMacro, ["a/b@1.2.3"], [languages_1.Language.java, languages_1.Language.python], /The configuration file "\/a\/b" is invalid: property "packs" must split packages by language/);
|
||||||
ava_1.default("invalid language", parsePacksErrorMacro, { [languages_1.Language.java]: ["c/d"] }, [languages_1.Language.cpp], /The configuration file "\/a\/b" is invalid: property "packs" has "java", but it is not one of the languages to analyze/);
|
(0, ava_1.default)("invalid language", parsePacksErrorMacro, { [languages_1.Language.java]: ["c/d"] }, [languages_1.Language.cpp], /The configuration file "\/a\/b" is invalid: property "packs" has "java", but it is not one of the languages to analyze/);
|
||||||
ava_1.default("not an array", parsePacksErrorMacro, { [languages_1.Language.cpp]: "c/d" }, [languages_1.Language.cpp], /The configuration file "\/a\/b" is invalid: property "packs" must be an array of non-empty strings/);
|
(0, ava_1.default)("not an array", parsePacksErrorMacro, { [languages_1.Language.cpp]: "c/d" }, [languages_1.Language.cpp], /The configuration file "\/a\/b" is invalid: property "packs" must be an array of non-empty strings/);
|
||||||
ava_1.default(invalidPackNameMacro, "c"); // all packs require at least a scope and a name
|
(0, ava_1.default)(invalidPackNameMacro, "c"); // all packs require at least a scope and a name
|
||||||
ava_1.default(invalidPackNameMacro, "c-/d");
|
(0, ava_1.default)(invalidPackNameMacro, "c-/d");
|
||||||
ava_1.default(invalidPackNameMacro, "-c/d");
|
(0, ava_1.default)(invalidPackNameMacro, "-c/d");
|
||||||
ava_1.default(invalidPackNameMacro, "c/d_d");
|
(0, ava_1.default)(invalidPackNameMacro, "c/d_d");
|
||||||
ava_1.default(invalidPackNameMacro, "c/d@x");
|
(0, ava_1.default)(invalidPackNameMacro, "c/d@x");
|
||||||
/**
|
/**
|
||||||
* Test macro for testing the packs block and the packs input
|
* Test macro for testing the packs block and the packs input
|
||||||
*/
|
*/
|
||||||
@@ -827,34 +827,34 @@ function parseInputAndConfigErrorMacro(t, packsFromConfig, packsFromInput, langu
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and config Error: ${providedTitle}`;
|
parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and config Error: ${providedTitle}`;
|
||||||
ava_1.default("input only", parseInputAndConfigMacro, {}, " c/d ", [languages_1.Language.cpp], {
|
(0, ava_1.default)("input only", parseInputAndConfigMacro, {}, " c/d ", [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [{ packName: "c/d", version: undefined }],
|
[languages_1.Language.cpp]: [{ packName: "c/d", version: undefined }],
|
||||||
});
|
});
|
||||||
ava_1.default("input only with multiple", parseInputAndConfigMacro, {}, "a/b , c/d@1.2.3", [languages_1.Language.cpp], {
|
(0, ava_1.default)("input only with multiple", parseInputAndConfigMacro, {}, "a/b , c/d@1.2.3", [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: [
|
||||||
{ packName: "a/b", version: undefined },
|
{ packName: "a/b", version: undefined },
|
||||||
{ packName: "c/d", version: "1.2.3" },
|
{ packName: "c/d", version: "1.2.3" },
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
ava_1.default("input only with +", parseInputAndConfigMacro, {}, " + a/b , c/d@1.2.3 ", [languages_1.Language.cpp], {
|
(0, ava_1.default)("input only with +", parseInputAndConfigMacro, {}, " + a/b , c/d@1.2.3 ", [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: [
|
||||||
{ packName: "a/b", version: undefined },
|
{ packName: "a/b", version: undefined },
|
||||||
{ packName: "c/d", version: "1.2.3" },
|
{ packName: "c/d", version: "1.2.3" },
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
ava_1.default("config only", parseInputAndConfigMacro, ["a/b", "c/d"], " ", [languages_1.Language.cpp], {
|
(0, ava_1.default)("config only", parseInputAndConfigMacro, ["a/b", "c/d"], " ", [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: [
|
||||||
{ packName: "a/b", version: undefined },
|
{ packName: "a/b", version: undefined },
|
||||||
{ packName: "c/d", version: undefined },
|
{ packName: "c/d", version: undefined },
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
ava_1.default("input overrides", parseInputAndConfigMacro, ["a/b", "c/d"], " e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
|
(0, ava_1.default)("input overrides", parseInputAndConfigMacro, ["a/b", "c/d"], " e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: [
|
||||||
{ packName: "e/f", version: undefined },
|
{ packName: "e/f", version: undefined },
|
||||||
{ packName: "g/h", version: "1.2.3" },
|
{ packName: "g/h", version: "1.2.3" },
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
ava_1.default("input and config", parseInputAndConfigMacro, ["a/b", "c/d"], " +e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
|
(0, ava_1.default)("input and config", parseInputAndConfigMacro, ["a/b", "c/d"], " +e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: [
|
||||||
{ packName: "e/f", version: undefined },
|
{ packName: "e/f", version: undefined },
|
||||||
{ packName: "g/h", version: "1.2.3" },
|
{ packName: "g/h", version: "1.2.3" },
|
||||||
@@ -862,10 +862,10 @@ ava_1.default("input and config", parseInputAndConfigMacro, ["a/b", "c/d"], " +e
|
|||||||
{ packName: "c/d", version: undefined },
|
{ packName: "c/d", version: undefined },
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
ava_1.default("input with no language", parseInputAndConfigErrorMacro, {}, "c/d", [], /No languages specified/);
|
(0, ava_1.default)("input with no language", parseInputAndConfigErrorMacro, {}, "c/d", [], /No languages specified/);
|
||||||
ava_1.default("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
|
(0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
|
||||||
ava_1.default("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], /remove the '\+'/);
|
(0, ava_1.default)("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], /remove the '\+'/);
|
||||||
ava_1.default("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], /"xxx" is not a valid pack/);
|
(0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], /"xxx" is not a valid pack/);
|
||||||
// errors
|
// errors
|
||||||
// input w invalid pack name
|
// input w invalid pack name
|
||||||
//# sourceMappingURL=config-utils.test.js.map
|
//# sourceMappingURL=config-utils.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
24
lib/count-loc.js
generated
24
lib/count-loc.js
generated
@@ -1,9 +1,8 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.countLoc = exports.getIdPrefix = void 0;
|
exports.countLoc = void 0;
|
||||||
const github_linguist_1 = require("github-linguist");
|
const github_linguist_1 = require("github-linguist");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const util_1 = require("./util");
|
|
||||||
// Map from linguist language names to language prefixes used in the action and codeql
|
// Map from linguist language names to language prefixes used in the action and codeql
|
||||||
const linguistToMetrics = {
|
const linguistToMetrics = {
|
||||||
c: languages_1.Language.cpp,
|
c: languages_1.Language.cpp,
|
||||||
@@ -23,27 +22,6 @@ const nameToLinguist = Object.entries(linguistToMetrics).reduce((obj, [key, name
|
|||||||
obj[name].push(key);
|
obj[name].push(key);
|
||||||
return obj;
|
return obj;
|
||||||
}, {});
|
}, {});
|
||||||
function getIdPrefix(language) {
|
|
||||||
switch (language) {
|
|
||||||
case languages_1.Language.cpp:
|
|
||||||
return "cpp";
|
|
||||||
case languages_1.Language.csharp:
|
|
||||||
return "cs";
|
|
||||||
case languages_1.Language.go:
|
|
||||||
return "go";
|
|
||||||
case languages_1.Language.java:
|
|
||||||
return "java";
|
|
||||||
case languages_1.Language.javascript:
|
|
||||||
return "js";
|
|
||||||
case languages_1.Language.python:
|
|
||||||
return "py";
|
|
||||||
case languages_1.Language.ruby:
|
|
||||||
return "rb";
|
|
||||||
default:
|
|
||||||
util_1.assertNever(language);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.getIdPrefix = getIdPrefix;
|
|
||||||
/**
|
/**
|
||||||
* Count the lines of code of the specified language using the include
|
* Count the lines of code of the specified language using the include
|
||||||
* and exclude glob paths.
|
* and exclude glob paths.
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"count-loc.js","sourceRoot":"","sources":["../src/count-loc.ts"],"names":[],"mappings":";;;AAAA,qDAAyC;AAEzC,2CAAuC;AAEvC,iCAAqC;AAKrC,sFAAsF;AACtF,MAAM,iBAAiB,GAA6B;IAClD,CAAC,EAAE,oBAAQ,CAAC,GAAG;IACf,KAAK,EAAE,oBAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,oBAAQ,CAAC,MAAM;IACrB,EAAE,EAAE,oBAAQ,CAAC,EAAE;IACf,IAAI,EAAE,oBAAQ,CAAC,IAAI;IACnB,UAAU,EAAE,oBAAQ,CAAC,UAAU;IAC/B,MAAM,EAAE,oBAAQ,CAAC,MAAM;IACvB,IAAI,EAAE,oBAAQ,CAAC,IAAI;IACnB,UAAU,EAAE,oBAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,MAAM,cAAc,GAAG,MAAM,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC,MAAM,CAC7D,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,IAAI,CAAC,EAAE,EAAE;IACnB,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QACd,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC;KAChB;IACD,GAAG,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACpB,OAAO,GAAG,CAAC;AACb,CAAC,EACD,EAAgC,CACjC,CAAC;AAEF,SAAgB,WAAW,CAAC,QAAkB;IAC5C,QAAQ,QAAQ,EAAE;QAChB,KAAK,oBAAQ,CAAC,GAAG;YACf,OAAO,KAAK,CAAC;QACf,KAAK,oBAAQ,CAAC,MAAM;YAClB,OAAO,IAAI,CAAC;QACd,KAAK,oBAAQ,CAAC,EAAE;YACd,OAAO,IAAI,CAAC;QACd,KAAK,oBAAQ,CAAC,IAAI;YAChB,OAAO,MAAM,CAAC;QAChB,KAAK,oBAAQ,CAAC,UAAU;YACtB,OAAO,IAAI,CAAC;QACd,KAAK,oBAAQ,CAAC,MAAM;YAClB,OAAO,IAAI,CAAC;QACd,KAAK,oBAAQ,CAAC,IAAI;YAChB,OAAO,IAAI,CAAC;QAEd;YACE,kBAAW,CAAC,QAAQ,CAAC,CAAC;KACzB;AACH,CAAC;AApBD,kCAoBC;AAED;;;;;;;;;GASG;AACI,KAAK,UAAU,QAAQ,CAC5B,GAAW,EACX,OAAiB,EACjB,OAAiB,EACjB,WAAuB,EACvB,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAI,wBAAM,CAAC;QAC9B,GAAG;QACH,OAAO,EAAE,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QACxE,OAAO;QACP,iBAAiB,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC;KACvE,CAAC,CAAC,QAAQ,EAAE,CAAC;IAEd,uDAAuD;IACvD,uDAAuD;IACvD,2DAA2D;IAC3D,MAAM,UAAU,GAAG,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,MAAM,CACxD,CAAC,GAAG,EAAE,CAAC,QAAQ,EAAE,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE;QAC5B,MAAM,eAAe,GAAG,iBAAiB,CAAC,QAAQ,CAAC,CAAC;QACpD,IAAI,eAAe,IAAI,WAAW,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE;YAC5D,GAAG,CAAC,eAAe,CAAC,GAAG,IAAI,GAAG,CAAC,GAAG,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC;SAC3D;QACD,OAAO,GAAG,CAAC;IACb,CAAC,EACD,EAA8B,CAC/B,CAAC;IAEF,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,EAAE;QAClC,MAAM,CAAC,KAAK,CAAC,sBAAsB,CAAC,CAAC;QACrC,KAAK,MAAM,CAAC,QAAQ,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;YAC1D,MAAM,CAAC,KAAK,CAAC,KAAK,QAAQ,KAAK,KAAK,EAAE,CAAC,CAAC;SACzC;KACF;SAAM;QACL,MAAM,CAAC,IAAI,CACT,2EAA2E;YACzE,gEAAgE;YAChE,4EAA4E;YAC5E,4EAA4E;YAC5E,2EAA2E;YAC3E,6EAA6E;YAC7E,2CAA2C,CAC9C,CAAC;KACH;IAED,OAAO,UAAU,CAAC;AACpB,CAAC;AA9CD,4BA8CC"}
|
{"version":3,"file":"count-loc.js","sourceRoot":"","sources":["../src/count-loc.ts"],"names":[],"mappings":";;;AAAA,qDAAyC;AAEzC,2CAAuC;AAGvC,sFAAsF;AACtF,MAAM,iBAAiB,GAA6B;IAClD,CAAC,EAAE,oBAAQ,CAAC,GAAG;IACf,KAAK,EAAE,oBAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,oBAAQ,CAAC,MAAM;IACrB,EAAE,EAAE,oBAAQ,CAAC,EAAE;IACf,IAAI,EAAE,oBAAQ,CAAC,IAAI;IACnB,UAAU,EAAE,oBAAQ,CAAC,UAAU;IAC/B,MAAM,EAAE,oBAAQ,CAAC,MAAM;IACvB,IAAI,EAAE,oBAAQ,CAAC,IAAI;IACnB,UAAU,EAAE,oBAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,MAAM,cAAc,GAAG,MAAM,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC,MAAM,CAC7D,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,IAAI,CAAC,EAAE,EAAE;IACnB,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QACd,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC;KAChB;IACD,GAAG,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACpB,OAAO,GAAG,CAAC;AACb,CAAC,EACD,EAAgC,CACjC,CAAC;AAEF;;;;;;;;;GASG;AACI,KAAK,UAAU,QAAQ,CAC5B,GAAW,EACX,OAAiB,EACjB,OAAiB,EACjB,WAAuB,EACvB,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAI,wBAAM,CAAC;QAC9B,GAAG;QACH,OAAO,EAAE,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QACxE,OAAO;QACP,iBAAiB,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC;KACvE,CAAC,CAAC,QAAQ,EAAE,CAAC;IAEd,uDAAuD;IACvD,uDAAuD;IACvD,2DAA2D;IAC3D,MAAM,UAAU,GAAG,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,MAAM,CACxD,CAAC,GAAG,EAAE,CAAC,QAAQ,EAAE,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE;QAC5B,MAAM,eAAe,GAAG,iBAAiB,CAAC,QAAQ,CAAC,CAAC;QACpD,IAAI,eAAe,IAAI,WAAW,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE;YAC5D,GAAG,CAAC,eAAe,CAAC,GAAG,IAAI,GAAG,CAAC,GAAG,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC;SAC3D;QACD,OAAO,GAAG,CAAC;IACb,CAAC,EACD,EAA8B,CAC/B,CAAC;IAEF,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,EAAE;QAClC,MAAM,CAAC,KAAK,CAAC,sBAAsB,CAAC,CAAC;QACrC,KAAK,MAAM,CAAC,QAAQ,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;YAC1D,MAAM,CAAC,KAAK,CAAC,KAAK,QAAQ,KAAK,KAAK,EAAE,CAAC,CAAC;SACzC;KACF;SAAM;QACL,MAAM,CAAC,IAAI,CACT,2EAA2E;YACzE,gEAAgE;YAChE,4EAA4E;YAC5E,4EAA4E;YAC5E,2EAA2E;YAC3E,6EAA6E;YAC7E,2CAA2C,CAC9C,CAAC;KACH;IAED,OAAO,UAAU,CAAC;AACpB,CAAC;AA9CD,4BA8CC"}
|
||||||
30
lib/count-loc.test.js
generated
30
lib/count-loc.test.js
generated
@@ -28,49 +28,49 @@ const count_loc_1 = require("./count-loc");
|
|||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
ava_1.default("ensure lines of code works for cpp and js", async (t) => {
|
(0, ava_1.default)("ensure lines of code works for cpp and js", async (t) => {
|
||||||
const results = await count_loc_1.countLoc(path.join(__dirname, "../tests/multi-language-repo"), [], [], [languages_1.Language.cpp, languages_1.Language.javascript], logging_1.getRunnerLogger(true));
|
const results = await (0, count_loc_1.countLoc)(path.join(__dirname, "../tests/multi-language-repo"), [], [], [languages_1.Language.cpp, languages_1.Language.javascript], (0, logging_1.getRunnerLogger)(true));
|
||||||
t.deepEqual(results, {
|
t.deepEqual(results, {
|
||||||
cpp: 6,
|
cpp: 6,
|
||||||
javascript: 9,
|
javascript: 9,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("ensure lines of code works for csharp", async (t) => {
|
(0, ava_1.default)("ensure lines of code works for csharp", async (t) => {
|
||||||
const results = await count_loc_1.countLoc(path.join(__dirname, "../tests/multi-language-repo"), [], [], [languages_1.Language.csharp], logging_1.getRunnerLogger(true));
|
const results = await (0, count_loc_1.countLoc)(path.join(__dirname, "../tests/multi-language-repo"), [], [], [languages_1.Language.csharp], (0, logging_1.getRunnerLogger)(true));
|
||||||
t.deepEqual(results, {
|
t.deepEqual(results, {
|
||||||
csharp: 10,
|
csharp: 10,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("ensure lines of code can handle undefined language", async (t) => {
|
(0, ava_1.default)("ensure lines of code can handle undefined language", async (t) => {
|
||||||
const results = await count_loc_1.countLoc(path.join(__dirname, "../tests/multi-language-repo"), [], [], [languages_1.Language.javascript, languages_1.Language.python, "hucairz"], logging_1.getRunnerLogger(true));
|
const results = await (0, count_loc_1.countLoc)(path.join(__dirname, "../tests/multi-language-repo"), [], [], [languages_1.Language.javascript, languages_1.Language.python, "hucairz"], (0, logging_1.getRunnerLogger)(true));
|
||||||
t.deepEqual(results, {
|
t.deepEqual(results, {
|
||||||
javascript: 9,
|
javascript: 9,
|
||||||
python: 5,
|
python: 5,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("ensure lines of code can handle empty languages", async (t) => {
|
(0, ava_1.default)("ensure lines of code can handle empty languages", async (t) => {
|
||||||
const results = await count_loc_1.countLoc(path.join(__dirname, "../tests/multi-language-repo"), [], [], [], logging_1.getRunnerLogger(true));
|
const results = await (0, count_loc_1.countLoc)(path.join(__dirname, "../tests/multi-language-repo"), [], [], [], (0, logging_1.getRunnerLogger)(true));
|
||||||
t.deepEqual(results, {});
|
t.deepEqual(results, {});
|
||||||
});
|
});
|
||||||
ava_1.default("ensure lines of code can handle includes", async (t) => {
|
(0, ava_1.default)("ensure lines of code can handle includes", async (t) => {
|
||||||
// note that "**" is always included. The includes are for extra
|
// note that "**" is always included. The includes are for extra
|
||||||
// directories outside the normal structure.
|
// directories outside the normal structure.
|
||||||
const results = await count_loc_1.countLoc(path.join(__dirname, "../tests/multi-language-repo"), ["../../src/testdata"], [], [languages_1.Language.javascript], logging_1.getRunnerLogger(true));
|
const results = await (0, count_loc_1.countLoc)(path.join(__dirname, "../tests/multi-language-repo"), ["../../src/testdata"], [], [languages_1.Language.javascript], (0, logging_1.getRunnerLogger)(true));
|
||||||
t.deepEqual(results, {
|
t.deepEqual(results, {
|
||||||
javascript: 12,
|
javascript: 12,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("ensure lines of code can handle empty includes", async (t) => {
|
(0, ava_1.default)("ensure lines of code can handle empty includes", async (t) => {
|
||||||
// note that "**" is always included. The includes are for extra
|
// note that "**" is always included. The includes are for extra
|
||||||
// directories outside the normal structure.
|
// directories outside the normal structure.
|
||||||
const results = await count_loc_1.countLoc(path.join(__dirname, "../tests/multi-language-repo"), ["idontexist"], [], [languages_1.Language.javascript], logging_1.getRunnerLogger(true));
|
const results = await (0, count_loc_1.countLoc)(path.join(__dirname, "../tests/multi-language-repo"), ["idontexist"], [], [languages_1.Language.javascript], (0, logging_1.getRunnerLogger)(true));
|
||||||
t.deepEqual(results, {
|
t.deepEqual(results, {
|
||||||
// should get no results
|
// should get no results
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("ensure lines of code can handle exclude", async (t) => {
|
(0, ava_1.default)("ensure lines of code can handle exclude", async (t) => {
|
||||||
const results = await count_loc_1.countLoc(path.join(__dirname, "../tests/multi-language-repo"), [], ["**/*.py"], [languages_1.Language.javascript, languages_1.Language.python], logging_1.getRunnerLogger(true));
|
const results = await (0, count_loc_1.countLoc)(path.join(__dirname, "../tests/multi-language-repo"), [], ["**/*.py"], [languages_1.Language.javascript, languages_1.Language.python], (0, logging_1.getRunnerLogger)(true));
|
||||||
t.deepEqual(results, {
|
t.deepEqual(results, {
|
||||||
javascript: 9,
|
javascript: 9,
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"count-loc.test.js","sourceRoot":"","sources":["../src/count-loc.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,2CAAuC;AACvC,2CAAuC;AACvC,uCAA4C;AAC5C,mDAA6C;AAE7C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,2CAA2C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC5D,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,CAAC,oBAAQ,CAAC,GAAG,EAAE,oBAAQ,CAAC,UAAU,CAAC,EACnC,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,GAAG,EAAE,CAAC;QACN,UAAU,EAAE,CAAC;KACd,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,uCAAuC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACxD,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,CAAC,oBAAQ,CAAC,MAAM,CAAC,EACjB,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,MAAM,EAAE,EAAE;KACX,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,oDAAoD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrE,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,CAAC,oBAAQ,CAAC,UAAU,EAAE,oBAAQ,CAAC,MAAM,EAAE,SAAqB,CAAC,EAC7D,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,UAAU,EAAE,CAAC;QACb,MAAM,EAAE,CAAC;KACV,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iDAAiD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClE,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,EAAE,EACF,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;AAC3B,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,0CAA0C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC3D,gEAAgE;IAChE,4CAA4C;IAC5C,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,CAAC,oBAAoB,CAAC,EACtB,EAAE,EACF,CAAC,oBAAQ,CAAC,UAAU,CAAC,EACrB,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,UAAU,EAAE,EAAE;KACf,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,gDAAgD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACjE,gEAAgE;IAChE,4CAA4C;IAC5C,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,CAAC,YAAY,CAAC,EACd,EAAE,EACF,CAAC,oBAAQ,CAAC,UAAU,CAAC,EACrB,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;IACnB,wBAAwB;KACzB,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,CAAC,SAAS,CAAC,EACX,CAAC,oBAAQ,CAAC,UAAU,EAAE,oBAAQ,CAAC,MAAM,CAAC,EACtC,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,UAAU,EAAE,CAAC;KACd,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"count-loc.test.js","sourceRoot":"","sources":["../src/count-loc.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,2CAAuC;AACvC,2CAAuC;AACvC,uCAA4C;AAC5C,mDAA6C;AAE7C,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,2CAA2C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC5D,MAAM,OAAO,GAAG,MAAM,IAAA,oBAAQ,EAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,CAAC,oBAAQ,CAAC,GAAG,EAAE,oBAAQ,CAAC,UAAU,CAAC,EACnC,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,GAAG,EAAE,CAAC;QACN,UAAU,EAAE,CAAC;KACd,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,uCAAuC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACxD,MAAM,OAAO,GAAG,MAAM,IAAA,oBAAQ,EAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,CAAC,oBAAQ,CAAC,MAAM,CAAC,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,MAAM,EAAE,EAAE;KACX,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oDAAoD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrE,MAAM,OAAO,GAAG,MAAM,IAAA,oBAAQ,EAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,CAAC,oBAAQ,CAAC,UAAU,EAAE,oBAAQ,CAAC,MAAM,EAAE,SAAqB,CAAC,EAC7D,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,UAAU,EAAE,CAAC;QACb,MAAM,EAAE,CAAC;KACV,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iDAAiD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClE,MAAM,OAAO,GAAG,MAAM,IAAA,oBAAQ,EAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,EAAE,EACF,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;AAC3B,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,0CAA0C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC3D,gEAAgE;IAChE,4CAA4C;IAC5C,MAAM,OAAO,GAAG,MAAM,IAAA,oBAAQ,EAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,CAAC,oBAAoB,CAAC,EACtB,EAAE,EACF,CAAC,oBAAQ,CAAC,UAAU,CAAC,EACrB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,UAAU,EAAE,EAAE;KACf,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,gDAAgD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACjE,gEAAgE;IAChE,4CAA4C;IAC5C,MAAM,OAAO,GAAG,MAAM,IAAA,oBAAQ,EAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,CAAC,YAAY,CAAC,EACd,EAAE,EACF,CAAC,oBAAQ,CAAC,UAAU,CAAC,EACrB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;IACnB,wBAAwB;KACzB,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,OAAO,GAAG,MAAM,IAAA,oBAAQ,EAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,CAAC,SAAS,CAAC,EACX,CAAC,oBAAQ,CAAC,UAAU,EAAE,oBAAQ,CAAC,MAAM,CAAC,EACtC,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,UAAU,EAAE,CAAC;KACd,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
4
lib/database-upload.js
generated
4
lib/database-upload.js
generated
@@ -40,7 +40,7 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
|||||||
logger.debug("Not analyzing default branch. Skipping upload.");
|
logger.debug("Not analyzing default branch. Skipping upload.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const client = api_client_1.getApiClient(apiDetails);
|
const client = (0, api_client_1.getApiClient)(apiDetails);
|
||||||
try {
|
try {
|
||||||
await client.request("GET /repos/:owner/:repo/code-scanning/codeql/databases", {
|
await client.request("GET /repos/:owner/:repo/code-scanning/codeql/databases", {
|
||||||
owner: repositoryNwo.owner,
|
owner: repositoryNwo.owner,
|
||||||
@@ -57,7 +57,7 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
|||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
// Bundle the database up into a single zip file
|
// Bundle the database up into a single zip file
|
||||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAExB,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,yBAAY,CAAC,UAAU,CAAC,CAAC;IACxC,IAAI;QACF,MAAM,MAAM,CAAC,OAAO,CAClB,wDAAwD,EACxD;YACE,KAAK,EAAE,aAAa,CAAC,KAAK;YAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;SACzB,CACF,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3C,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;SACH;aAAM;YACL,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,MAAM,CAAC,IAAI,CAAC,kDAAkD,CAAC,EAAE,CAAC,CAAC;SACpE;QACD,OAAO;KACR;IAED,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,gDAAgD;QAChD,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAClE,MAAM,kBAAkB,GAAG,GAAG,YAAY,MAAM,CAAC;QACjD,MAAM,MAAM,CAAC,cAAc,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC;QAE9D,6BAA6B;QAC7B,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,kBAAkB,CAAC,CAAC;QACpD,IAAI;YACF,MAAM,MAAM,CAAC,OAAO,CAClB,kEAAkE,EAClE;gBACE,KAAK,EAAE,aAAa,CAAC,KAAK;gBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;gBACxB,QAAQ;gBACR,IAAI,EAAE,OAAO;aACd,CACF,CAAC;YACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAtED,0CAsEC"}
|
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAExB,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,IAAI;QACF,MAAM,MAAM,CAAC,OAAO,CAClB,wDAAwD,EACxD;YACE,KAAK,EAAE,aAAa,CAAC,KAAK;YAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;SACzB,CACF,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3C,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;SACH;aAAM;YACL,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,MAAM,CAAC,IAAI,CAAC,kDAAkD,CAAC,EAAE,CAAC,CAAC;SACpE;QACD,OAAO;KACR;IAED,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,gDAAgD;QAChD,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAClE,MAAM,kBAAkB,GAAG,GAAG,YAAY,MAAM,CAAC;QACjD,MAAM,MAAM,CAAC,cAAc,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC;QAE9D,6BAA6B;QAC7B,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,kBAAkB,CAAC,CAAC;QACpD,IAAI;YACF,MAAM,MAAM,CAAC,OAAO,CAClB,kEAAkE,EAClE;gBACE,KAAK,EAAE,aAAa,CAAC,KAAK;gBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;gBACxB,QAAQ;gBACR,IAAI,EAAE,OAAO;aACd,CACF,CAAC;YACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAtED,0CAsEC"}
|
||||||
114
lib/database-upload.test.js
generated
114
lib/database-upload.test.js
generated
@@ -25,7 +25,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const github = __importStar(require("@actions/github"));
|
const github = __importStar(require("@actions/github"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const apiClient = __importStar(require("./api-client"));
|
const apiClient = __importStar(require("./api-client"));
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
@@ -33,9 +33,9 @@ const database_upload_1 = require("./database-upload");
|
|||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
ava_1.default.beforeEach(() => {
|
ava_1.default.beforeEach(() => {
|
||||||
util_1.initializeEnvironment(util_1.Mode.actions, "1.2.3");
|
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
|
||||||
});
|
});
|
||||||
const testRepoName = { owner: "github", repo: "example" };
|
const testRepoName = { owner: "github", repo: "example" };
|
||||||
const testApiDetails = {
|
const testApiDetails = {
|
||||||
@@ -83,7 +83,7 @@ function getRecordingLogger(messages) {
|
|||||||
function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
||||||
// Passing an auth token is required, so we just use a dummy value
|
// Passing an auth token is required, so we just use a dummy value
|
||||||
const client = github.getOctokit("123");
|
const client = github.getOctokit("123");
|
||||||
const requestSpy = sinon_1.default.stub(client, "request");
|
const requestSpy = sinon.stub(client, "request");
|
||||||
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql/databases");
|
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql/databases");
|
||||||
if (optInStatusCode < 300) {
|
if (optInStatusCode < 300) {
|
||||||
optInSpy.resolves(undefined);
|
optInSpy.resolves(undefined);
|
||||||
@@ -100,147 +100,147 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
|||||||
databaseUploadSpy.throws(new util_1.HTTPError("some error message", databaseUploadStatusCode));
|
databaseUploadSpy.throws(new util_1.HTTPError("some error message", databaseUploadStatusCode));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sinon_1.default.stub(apiClient, "getApiClient").value(() => client);
|
sinon.stub(apiClient, "getApiClient").value(() => client);
|
||||||
}
|
}
|
||||||
ava_1.default("Abort database upload if 'upload-database' input set to false", async (t) => {
|
(0, ava_1.default)("Abort database upload if 'upload-database' input set to false", async (t) => {
|
||||||
await util_1.withTmpDir(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
sinon_1.default
|
sinon
|
||||||
.stub(actionsUtil, "getRequiredInput")
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
.withArgs("upload-database")
|
.withArgs("upload-database")
|
||||||
.returns("false");
|
.returns("false");
|
||||||
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
v.message === "Database upload disabled in workflow. Skipping upload.") !== undefined);
|
v.message === "Database upload disabled in workflow. Skipping upload.") !== undefined);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Abort database upload if running against GHES", async (t) => {
|
(0, ava_1.default)("Abort database upload if running against GHES", async (t) => {
|
||||||
await util_1.withTmpDir(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
sinon_1.default
|
sinon
|
||||||
.stub(actionsUtil, "getRequiredInput")
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
.withArgs("upload-database")
|
.withArgs("upload-database")
|
||||||
.returns("true");
|
.returns("true");
|
||||||
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
const config = getTestConfig(tmpDir);
|
const config = getTestConfig(tmpDir);
|
||||||
config.gitHubVersion = { type: util_1.GitHubVariant.GHES, version: "3.0" };
|
config.gitHubVersion = { type: util_1.GitHubVariant.GHES, version: "3.0" };
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
await database_upload_1.uploadDatabases(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
|
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
v.message === "Not running against github.com. Skipping upload.") !== undefined);
|
v.message === "Not running against github.com. Skipping upload.") !== undefined);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Abort database upload if running against GHAE", async (t) => {
|
(0, ava_1.default)("Abort database upload if running against GHAE", async (t) => {
|
||||||
await util_1.withTmpDir(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
sinon_1.default
|
sinon
|
||||||
.stub(actionsUtil, "getRequiredInput")
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
.withArgs("upload-database")
|
.withArgs("upload-database")
|
||||||
.returns("true");
|
.returns("true");
|
||||||
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
const config = getTestConfig(tmpDir);
|
const config = getTestConfig(tmpDir);
|
||||||
config.gitHubVersion = { type: util_1.GitHubVariant.GHAE };
|
config.gitHubVersion = { type: util_1.GitHubVariant.GHAE };
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
await database_upload_1.uploadDatabases(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
|
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
v.message === "Not running against github.com. Skipping upload.") !== undefined);
|
v.message === "Not running against github.com. Skipping upload.") !== undefined);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Abort database upload if not analyzing default branch", async (t) => {
|
(0, ava_1.default)("Abort database upload if not analyzing default branch", async (t) => {
|
||||||
await util_1.withTmpDir(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
sinon_1.default
|
sinon
|
||||||
.stub(actionsUtil, "getRequiredInput")
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
.withArgs("upload-database")
|
.withArgs("upload-database")
|
||||||
.returns("true");
|
.returns("true");
|
||||||
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
v.message === "Not analyzing default branch. Skipping upload.") !== undefined);
|
v.message === "Not analyzing default branch. Skipping upload.") !== undefined);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Abort database upload if opt-in request returns 404", async (t) => {
|
(0, ava_1.default)("Abort database upload if opt-in request returns 404", async (t) => {
|
||||||
await util_1.withTmpDir(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
sinon_1.default
|
sinon
|
||||||
.stub(actionsUtil, "getRequiredInput")
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
.withArgs("upload-database")
|
.withArgs("upload-database")
|
||||||
.returns("true");
|
.returns("true");
|
||||||
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
mockHttpRequests(404);
|
mockHttpRequests(404);
|
||||||
codeql_1.setCodeQL({
|
(0, codeql_1.setCodeQL)({
|
||||||
async databaseBundle() {
|
async databaseBundle() {
|
||||||
return;
|
return;
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
v.message ===
|
v.message ===
|
||||||
"Repository is not opted in to database uploads. Skipping upload.") !== undefined);
|
"Repository is not opted in to database uploads. Skipping upload.") !== undefined);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Abort database upload if opt-in request fails with something other than 404", async (t) => {
|
(0, ava_1.default)("Abort database upload if opt-in request fails with something other than 404", async (t) => {
|
||||||
await util_1.withTmpDir(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
sinon_1.default
|
sinon
|
||||||
.stub(actionsUtil, "getRequiredInput")
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
.withArgs("upload-database")
|
.withArgs("upload-database")
|
||||||
.returns("true");
|
.returns("true");
|
||||||
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
mockHttpRequests(500);
|
mockHttpRequests(500);
|
||||||
codeql_1.setCodeQL({
|
(0, codeql_1.setCodeQL)({
|
||||||
async databaseBundle() {
|
async databaseBundle() {
|
||||||
return;
|
return;
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
t.assert(loggedMessages.find((v) => v.type === "info" &&
|
t.assert(loggedMessages.find((v) => v.type === "info" &&
|
||||||
v.message ===
|
v.message ===
|
||||||
"Skipping database upload due to unknown error: Error: some error message") !== undefined);
|
"Skipping database upload due to unknown error: Error: some error message") !== undefined);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Don't crash if uploading a database fails", async (t) => {
|
(0, ava_1.default)("Don't crash if uploading a database fails", async (t) => {
|
||||||
await util_1.withTmpDir(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
sinon_1.default
|
sinon
|
||||||
.stub(actionsUtil, "getRequiredInput")
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
.withArgs("upload-database")
|
.withArgs("upload-database")
|
||||||
.returns("true");
|
.returns("true");
|
||||||
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
mockHttpRequests(204, 500);
|
mockHttpRequests(204, 500);
|
||||||
codeql_1.setCodeQL({
|
(0, codeql_1.setCodeQL)({
|
||||||
async databaseBundle(_, outputFilePath) {
|
async databaseBundle(_, outputFilePath) {
|
||||||
fs.writeFileSync(outputFilePath, "");
|
fs.writeFileSync(outputFilePath, "");
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
t.assert(loggedMessages.find((v) => v.type === "warning" &&
|
t.assert(loggedMessages.find((v) => v.type === "warning" &&
|
||||||
v.message ===
|
v.message ===
|
||||||
"Failed to upload database for javascript: Error: some error message") !== undefined);
|
"Failed to upload database for javascript: Error: some error message") !== undefined);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Successfully uploading a database", async (t) => {
|
(0, ava_1.default)("Successfully uploading a database", async (t) => {
|
||||||
await util_1.withTmpDir(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
sinon_1.default
|
sinon
|
||||||
.stub(actionsUtil, "getRequiredInput")
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
.withArgs("upload-database")
|
.withArgs("upload-database")
|
||||||
.returns("true");
|
.returns("true");
|
||||||
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
mockHttpRequests(204, 201);
|
mockHttpRequests(204, 201);
|
||||||
codeql_1.setCodeQL({
|
(0, codeql_1.setCodeQL)({
|
||||||
async databaseBundle(_, outputFilePath) {
|
async databaseBundle(_, outputFilePath) {
|
||||||
fs.writeFileSync(outputFilePath, "");
|
fs.writeFileSync(outputFilePath, "");
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
v.message === "Successfully uploaded database for javascript") !== undefined);
|
v.message === "Successfully uploaded database for javascript") !== undefined);
|
||||||
});
|
});
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -1,3 +1,3 @@
|
|||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-20210726"
|
"bundleVersion": "codeql-bundle-20211013"
|
||||||
}
|
}
|
||||||
|
|||||||
2
lib/error-matcher.test.js
generated
2
lib/error-matcher.test.js
generated
@@ -8,7 +8,7 @@ const error_matcher_1 = require("./error-matcher");
|
|||||||
/*
|
/*
|
||||||
NB We test the regexes for all the matchers against example log output snippets.
|
NB We test the regexes for all the matchers against example log output snippets.
|
||||||
*/
|
*/
|
||||||
ava_1.default("noSourceCodeFound matches against example javascript output", async (t) => {
|
(0, ava_1.default)("noSourceCodeFound matches against example javascript output", async (t) => {
|
||||||
t.assert(testErrorMatcher("noSourceCodeFound", `
|
t.assert(testErrorMatcher("noSourceCodeFound", `
|
||||||
2020-09-07T17:39:53.9050522Z [2020-09-07 17:39:53] [build] Done extracting /opt/hostedtoolcache/CodeQL/0.0.0-20200630/x64/codeql/javascript/tools/data/externs/web/ie_vml.js (3 ms)
|
2020-09-07T17:39:53.9050522Z [2020-09-07 17:39:53] [build] Done extracting /opt/hostedtoolcache/CodeQL/0.0.0-20200630/x64/codeql/javascript/tools/data/externs/web/ie_vml.js (3 ms)
|
||||||
2020-09-07T17:39:53.9051849Z [2020-09-07 17:39:53] [build-err] No JavaScript or TypeScript code found.
|
2020-09-07T17:39:53.9051849Z [2020-09-07 17:39:53] [build-err] No JavaScript or TypeScript code found.
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"error-matcher.test.js","sourceRoot":"","sources":["../src/error-matcher.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,mDAA0D;AAE1D;;EAEE;AAEF,aAAI,CAAC,6DAA6D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9E,CAAC,CAAC,MAAM,CACN,gBAAgB,CACd,mBAAmB,EACnB;;;;;GAKH,CACE,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,gBAAgB,CAAC,WAAmB,EAAE,SAAiB;IAC9D,IAAI,CAAC,CAAC,WAAW,IAAI,uCAAuB,CAAC,EAAE;QAC7C,MAAM,IAAI,KAAK,CAAC,mBAAmB,WAAW,EAAE,CAAC,CAAC;KACnD;IACD,MAAM,KAAK,GAAG,uCAAuB,CAAC,WAAW,CAAC,CAAC,WAAW,CAAC;IAC/D,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,uBAAuB,WAAW,kBAAkB,CAAC,CAAC;KACvE;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AAC/B,CAAC"}
|
{"version":3,"file":"error-matcher.test.js","sourceRoot":"","sources":["../src/error-matcher.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,mDAA0D;AAE1D;;EAEE;AAEF,IAAA,aAAI,EAAC,6DAA6D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9E,CAAC,CAAC,MAAM,CACN,gBAAgB,CACd,mBAAmB,EACnB;;;;;GAKH,CACE,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,gBAAgB,CAAC,WAAmB,EAAE,SAAiB;IAC9D,IAAI,CAAC,CAAC,WAAW,IAAI,uCAAuB,CAAC,EAAE;QAC7C,MAAM,IAAI,KAAK,CAAC,mBAAmB,WAAW,EAAE,CAAC,CAAC;KACnD;IACD,MAAM,KAAK,GAAG,uCAAuB,CAAC,WAAW,CAAC,CAAC,WAAW,CAAC;IAC/D,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,uBAAuB,WAAW,kBAAkB,CAAC,CAAC;KACvE;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AAC/B,CAAC"}
|
||||||
10
lib/external-queries.test.js
generated
10
lib/external-queries.test.js
generated
@@ -31,8 +31,8 @@ const externalQueries = __importStar(require("./external-queries"));
|
|||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
ava_1.default("checkoutExternalQueries", async (t) => {
|
(0, ava_1.default)("checkoutExternalQueries", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
// Create a test repo in a subdir of the temp dir.
|
// Create a test repo in a subdir of the temp dir.
|
||||||
// It should have a default branch with two commits after the initial commit, where
|
// It should have a default branch with two commits after the initial commit, where
|
||||||
@@ -93,20 +93,20 @@ ava_1.default("checkoutExternalQueries", async (t) => {
|
|||||||
const commit2Sha = await runGit(["rev-parse", "HEAD"]);
|
const commit2Sha = await runGit(["rev-parse", "HEAD"]);
|
||||||
// Checkout the first commit, which should contain 'a' and 'b'
|
// Checkout the first commit, which should contain 'a' and 'b'
|
||||||
t.false(fs.existsSync(path.join(tmpDir, repoName)));
|
t.false(fs.existsSync(path.join(tmpDir, repoName)));
|
||||||
await externalQueries.checkoutExternalRepository(repoName, commit1Sha, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" }, tmpDir, logging_1.getRunnerLogger(true));
|
await externalQueries.checkoutExternalRepository(repoName, commit1Sha, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" }, tmpDir, (0, logging_1.getRunnerLogger)(true));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName)));
|
t.true(fs.existsSync(path.join(tmpDir, repoName)));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha)));
|
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha)));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "a")));
|
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "a")));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "b")));
|
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "b")));
|
||||||
// Checkout the second commit as well, which should only contain 'a'
|
// Checkout the second commit as well, which should only contain 'a'
|
||||||
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
|
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
|
||||||
await externalQueries.checkoutExternalRepository(repoName, commit2Sha, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" }, tmpDir, logging_1.getRunnerLogger(true));
|
await externalQueries.checkoutExternalRepository(repoName, commit2Sha, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" }, tmpDir, (0, logging_1.getRunnerLogger)(true));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
|
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "a")));
|
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "a")));
|
||||||
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
|
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("buildCheckoutURL", (t) => {
|
(0, ava_1.default)("buildCheckoutURL", (t) => {
|
||||||
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
||||||
url: "https://github.com",
|
url: "https://github.com",
|
||||||
externalRepoAuth: undefined,
|
externalRepoAuth: undefined,
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AACpD,8CAAuB;AAEvB,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAChC,OAAO,EACP;oBACE,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CACF,CAAC,IAAI,EAAE,CAAC;aACV;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,gBAAgB,EAAE,OAAO,CAAC,CAAC,CAAC;QAEpD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,EAAE;IAC7B,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,4BAA4B,CAC7B,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,oCAAoC,CACrC,CAAC;IAEF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,+CAA+C,CAChD,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,uDAAuD,CACxD,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AACpD,8CAAuB;AAEvB,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAChC,OAAO,EACP;oBACE,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CACF,CAAC,IAAI,EAAE,CAAC;aACV;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,gBAAgB,EAAE,OAAO,CAAC,CAAC,CAAC;QAEpD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,EAAE;IAC7B,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,4BAA4B,CAC7B,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,oCAAoC,CACrC,CAAC;IAEF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,+CAA+C,CAChD,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,uDAAuD,CACxD,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
||||||
16
lib/fingerprints.test.js
generated
16
lib/fingerprints.test.js
generated
@@ -29,7 +29,7 @@ const fingerprints = __importStar(require("./fingerprints"));
|
|||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
async function testHash(t, input, expectedHashes) {
|
async function testHash(t, input, expectedHashes) {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const tmpFile = path.resolve(tmpDir, "testfile");
|
const tmpFile = path.resolve(tmpDir, "testfile");
|
||||||
@@ -44,7 +44,7 @@ async function testHash(t, input, expectedHashes) {
|
|||||||
t.is(index, input.split(/\r\n|\r|\n/).length);
|
t.is(index, input.split(/\r\n|\r|\n/).length);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
ava_1.default("hash", async (t) => {
|
(0, ava_1.default)("hash", async (t) => {
|
||||||
// Try empty file
|
// Try empty file
|
||||||
await testHash(t, "", ["c129715d7a2bc9a3:1"]);
|
await testHash(t, "", ["c129715d7a2bc9a3:1"]);
|
||||||
// Try various combinations of newline characters
|
// Try various combinations of newline characters
|
||||||
@@ -128,9 +128,9 @@ function testResolveUriToFile(uri, index, artifactsURIs) {
|
|||||||
const artifacts = artifactsURIs.map((artifactURI) => ({
|
const artifacts = artifactsURIs.map((artifactURI) => ({
|
||||||
location: { uri: artifactURI },
|
location: { uri: artifactURI },
|
||||||
}));
|
}));
|
||||||
return fingerprints.resolveUriToFile(location, artifacts, process.cwd(), logging_1.getRunnerLogger(true));
|
return fingerprints.resolveUriToFile(location, artifacts, process.cwd(), (0, logging_1.getRunnerLogger)(true));
|
||||||
}
|
}
|
||||||
ava_1.default("resolveUriToFile", (t) => {
|
(0, ava_1.default)("resolveUriToFile", (t) => {
|
||||||
// The resolveUriToFile method checks that the file exists and is in the right directory
|
// The resolveUriToFile method checks that the file exists and is in the right directory
|
||||||
// so we need to give it real files to look at. We will use this file as an example.
|
// so we need to give it real files to look at. We will use this file as an example.
|
||||||
// For this to work we require the current working directory to be a parent, but this
|
// For this to work we require the current working directory to be a parent, but this
|
||||||
@@ -167,7 +167,7 @@ ava_1.default("resolveUriToFile", (t) => {
|
|||||||
t.is(testResolveUriToFile(dirpath, undefined, []), undefined);
|
t.is(testResolveUriToFile(dirpath, undefined, []), undefined);
|
||||||
t.is(testResolveUriToFile(`file://${dirpath}`, undefined, []), undefined);
|
t.is(testResolveUriToFile(`file://${dirpath}`, undefined, []), undefined);
|
||||||
});
|
});
|
||||||
ava_1.default("addFingerprints", async (t) => {
|
(0, ava_1.default)("addFingerprints", async (t) => {
|
||||||
// Run an end-to-end test on a test file
|
// Run an end-to-end test on a test file
|
||||||
let input = fs
|
let input = fs
|
||||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
|
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
|
||||||
@@ -180,9 +180,9 @@ ava_1.default("addFingerprints", async (t) => {
|
|||||||
expected = JSON.stringify(JSON.parse(expected));
|
expected = JSON.stringify(JSON.parse(expected));
|
||||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||||
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
|
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
|
||||||
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, logging_1.getRunnerLogger(true)), expected);
|
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, (0, logging_1.getRunnerLogger)(true)), expected);
|
||||||
});
|
});
|
||||||
ava_1.default("missingRegions", async (t) => {
|
(0, ava_1.default)("missingRegions", async (t) => {
|
||||||
// Run an end-to-end test on a test file
|
// Run an end-to-end test on a test file
|
||||||
let input = fs
|
let input = fs
|
||||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
|
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
|
||||||
@@ -195,6 +195,6 @@ ava_1.default("missingRegions", async (t) => {
|
|||||||
expected = JSON.stringify(JSON.parse(expected));
|
expected = JSON.stringify(JSON.parse(expected));
|
||||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||||
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
|
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
|
||||||
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, logging_1.getRunnerLogger(true)), expected);
|
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, (0, logging_1.getRunnerLogger)(true)), expected);
|
||||||
});
|
});
|
||||||
//# sourceMappingURL=fingerprints.test.js.map
|
//# sourceMappingURL=fingerprints.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
59
lib/init-action.js
generated
59
lib/init-action.js
generated
@@ -22,6 +22,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
|
const codeql_1 = require("./codeql");
|
||||||
const init_1 = require("./init");
|
const init_1 = require("./init");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
@@ -31,16 +32,16 @@ const util_1 = require("./util");
|
|||||||
const pkg = require("../package.json");
|
const pkg = require("../package.json");
|
||||||
async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
|
async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
|
||||||
var _a;
|
var _a;
|
||||||
const statusReportBase = await actions_util_1.createStatusReportBase("init", "success", startedAt);
|
const statusReportBase = await (0, actions_util_1.createStatusReportBase)("init", "success", startedAt);
|
||||||
const languages = config.languages.join(",");
|
const languages = config.languages.join(",");
|
||||||
const workflowLanguages = actions_util_1.getOptionalInput("languages");
|
const workflowLanguages = (0, actions_util_1.getOptionalInput)("languages");
|
||||||
const paths = (config.originalUserInput.paths || []).join(",");
|
const paths = (config.originalUserInput.paths || []).join(",");
|
||||||
const pathsIgnore = (config.originalUserInput["paths-ignore"] || []).join(",");
|
const pathsIgnore = (config.originalUserInput["paths-ignore"] || []).join(",");
|
||||||
const disableDefaultQueries = config.originalUserInput["disable-default-queries"]
|
const disableDefaultQueries = config.originalUserInput["disable-default-queries"]
|
||||||
? languages
|
? languages
|
||||||
: "";
|
: "";
|
||||||
const queries = [];
|
const queries = [];
|
||||||
let queriesInput = (_a = actions_util_1.getOptionalInput("queries")) === null || _a === void 0 ? void 0 : _a.trim();
|
let queriesInput = (_a = (0, actions_util_1.getOptionalInput)("queries")) === null || _a === void 0 ? void 0 : _a.trim();
|
||||||
if (queriesInput === undefined || queriesInput.startsWith("+")) {
|
if (queriesInput === undefined || queriesInput.startsWith("+")) {
|
||||||
queries.push(...(config.originalUserInput.queries || []).map((q) => q.uses));
|
queries.push(...(config.originalUserInput.queries || []).map((q) => q.uses));
|
||||||
}
|
}
|
||||||
@@ -58,48 +59,51 @@ async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
|
|||||||
paths_ignore: pathsIgnore,
|
paths_ignore: pathsIgnore,
|
||||||
disable_default_queries: disableDefaultQueries,
|
disable_default_queries: disableDefaultQueries,
|
||||||
queries: queries.join(","),
|
queries: queries.join(","),
|
||||||
tools_input: actions_util_1.getOptionalInput("tools") || "",
|
tools_input: (0, actions_util_1.getOptionalInput)("tools") || "",
|
||||||
tools_resolved_version: toolsVersion,
|
tools_resolved_version: toolsVersion,
|
||||||
};
|
};
|
||||||
await actions_util_1.sendStatusReport(statusReport);
|
await (0, actions_util_1.sendStatusReport)(statusReport);
|
||||||
}
|
}
|
||||||
async function run() {
|
async function run() {
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
const logger = logging_1.getActionsLogger();
|
const logger = (0, logging_1.getActionsLogger)();
|
||||||
util_1.initializeEnvironment(util_1.Mode.actions, pkg.version);
|
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
|
||||||
let config;
|
let config;
|
||||||
let codeql;
|
let codeql;
|
||||||
let toolsVersion;
|
let toolsVersion;
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth: actions_util_1.getRequiredInput("token"),
|
auth: (0, actions_util_1.getRequiredInput)("token"),
|
||||||
externalRepoAuth: actions_util_1.getOptionalInput("external-repository-token"),
|
externalRepoAuth: (0, actions_util_1.getOptionalInput)("external-repository-token"),
|
||||||
url: util_1.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
|
||||||
util_1.checkGitHubVersionInRange(gitHubVersion, logger, util_1.Mode.actions);
|
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.actions);
|
||||||
try {
|
try {
|
||||||
const workflowErrors = await actions_util_1.validateWorkflow();
|
const workflowErrors = await (0, actions_util_1.validateWorkflow)();
|
||||||
if (!(await actions_util_1.sendStatusReport(await actions_util_1.createStatusReportBase("init", "starting", startedAt, workflowErrors)))) {
|
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const initCodeQLResult = await init_1.initCodeQL(actions_util_1.getOptionalInput("tools"), apiDetails, actions_util_1.getTemporaryDirectory(), actions_util_1.getToolCacheDirectory(), gitHubVersion.type, logger);
|
const initCodeQLResult = await (0, init_1.initCodeQL)((0, actions_util_1.getOptionalInput)("tools"), apiDetails, (0, actions_util_1.getTemporaryDirectory)(), (0, actions_util_1.getToolCacheDirectory)(), gitHubVersion.type, logger);
|
||||||
codeql = initCodeQLResult.codeql;
|
codeql = initCodeQLResult.codeql;
|
||||||
toolsVersion = initCodeQLResult.toolsVersion;
|
toolsVersion = initCodeQLResult.toolsVersion;
|
||||||
config = await init_1.initConfig(actions_util_1.getOptionalInput("languages"), actions_util_1.getOptionalInput("queries"), actions_util_1.getOptionalInput("packs"), actions_util_1.getOptionalInput("config-file"), actions_util_1.getOptionalInput("db-location"), repository_1.parseRepositoryNwo(util_1.getRequiredEnvParam("GITHUB_REPOSITORY")), actions_util_1.getTemporaryDirectory(), util_1.getRequiredEnvParam("RUNNER_TOOL_CACHE"), codeql, util_1.getRequiredEnvParam("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, logger);
|
await (0, util_1.enrichEnvironment)(util_1.Mode.actions, codeql);
|
||||||
|
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), (0, actions_util_1.getTemporaryDirectory)(), (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE"), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, logger);
|
||||||
if (config.languages.includes(languages_1.Language.python) &&
|
if (config.languages.includes(languages_1.Language.python) &&
|
||||||
actions_util_1.getRequiredInput("setup-python-dependencies") === "true") {
|
(0, actions_util_1.getRequiredInput)("setup-python-dependencies") === "true") {
|
||||||
try {
|
try {
|
||||||
await init_1.installPythonDeps(codeql, logger);
|
await (0, init_1.installPythonDeps)(codeql, logger);
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
logger.warning(`${err.message} You can call this action with 'setup-python-dependencies: false' to disable this process`);
|
const message = err instanceof Error ? err.message : String(err);
|
||||||
|
logger.warning(`${message} You can call this action with 'setup-python-dependencies: false' to disable this process`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
core.setFailed(e.message);
|
const message = e instanceof Error ? e.message : String(e);
|
||||||
|
core.setFailed(message);
|
||||||
console.log(e);
|
console.log(e);
|
||||||
await actions_util_1.sendStatusReport(await actions_util_1.createStatusReportBase("init", "aborted", startedAt, e.message));
|
await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "aborted", startedAt, message));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
@@ -112,22 +116,23 @@ async function run() {
|
|||||||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||||
const codeqlRam = process.env["CODEQL_RAM"] || "6500";
|
const codeqlRam = process.env["CODEQL_RAM"] || "6500";
|
||||||
core.exportVariable("CODEQL_RAM", codeqlRam);
|
core.exportVariable("CODEQL_RAM", codeqlRam);
|
||||||
const sourceRoot = path.resolve(util_1.getRequiredEnvParam("GITHUB_WORKSPACE"), actions_util_1.getOptionalInput("source-root") || "");
|
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
|
||||||
const tracerConfig = await init_1.runInit(codeql, config, sourceRoot);
|
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", undefined);
|
||||||
if (tracerConfig !== undefined) {
|
if (tracerConfig !== undefined) {
|
||||||
for (const [key, value] of Object.entries(tracerConfig.env)) {
|
for (const [key, value] of Object.entries(tracerConfig.env)) {
|
||||||
core.exportVariable(key, value);
|
core.exportVariable(key, value);
|
||||||
}
|
}
|
||||||
if (process.platform === "win32") {
|
if (process.platform === "win32" &&
|
||||||
await init_1.injectWindowsTracer("Runner.Worker.exe", undefined, config, codeql, tracerConfig);
|
!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
|
||||||
|
await (0, init_1.injectWindowsTracer)("Runner.Worker.exe", undefined, config, codeql, tracerConfig);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
core.setOutput("codeql-path", config.codeQLCmd);
|
core.setOutput("codeql-path", config.codeQLCmd);
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
core.setFailed(error.message);
|
core.setFailed(String(error));
|
||||||
console.log(error);
|
console.log(error);
|
||||||
await actions_util_1.sendStatusReport(await actions_util_1.createStatusReportBase("init", "failure", startedAt, error.message, error.stack));
|
await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "failure", startedAt, String(error), error instanceof Error ? error.stack : undefined));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
await sendSuccessStatusReport(startedAt, config, toolsVersion);
|
await sendSuccessStatusReport(startedAt, config, toolsVersion);
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
20
lib/init.js
generated
20
lib/init.js
generated
@@ -29,9 +29,10 @@ const codeql_1 = require("./codeql");
|
|||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
const tracer_config_1 = require("./tracer-config");
|
const tracer_config_1 = require("./tracer-config");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
|
const util_1 = require("./util");
|
||||||
async function initCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger) {
|
async function initCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger) {
|
||||||
logger.startGroup("Setup CodeQL tools");
|
logger.startGroup("Setup CodeQL tools");
|
||||||
const { codeql, toolsVersion } = await codeql_1.setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger);
|
const { codeql, toolsVersion } = await (0, codeql_1.setupCodeQL)(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger, true);
|
||||||
await codeql.printVersion();
|
await codeql.printVersion();
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
return { codeql, toolsVersion };
|
return { codeql, toolsVersion };
|
||||||
@@ -45,14 +46,19 @@ async function initConfig(languagesInput, queriesInput, packsInput, configFile,
|
|||||||
return config;
|
return config;
|
||||||
}
|
}
|
||||||
exports.initConfig = initConfig;
|
exports.initConfig = initConfig;
|
||||||
async function runInit(codeql, config, sourceRoot) {
|
async function runInit(codeql, config, sourceRoot, processName, processLevel) {
|
||||||
fs.mkdirSync(config.dbLocation, { recursive: true });
|
fs.mkdirSync(config.dbLocation, { recursive: true });
|
||||||
// TODO: replace this code once CodeQL supports multi-language tracing
|
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||||
for (const language of config.languages) {
|
// Init a database cluster
|
||||||
// Init language database
|
await codeql.databaseInitCluster(config.dbLocation, config.languages, sourceRoot, processName, processLevel);
|
||||||
await codeql.databaseInit(util.getCodeQLDatabasePath(config, language), language, sourceRoot);
|
|
||||||
}
|
}
|
||||||
return await tracer_config_1.getCombinedTracerConfig(config, codeql);
|
else {
|
||||||
|
for (const language of config.languages) {
|
||||||
|
// Init language database
|
||||||
|
await codeql.databaseInit(util.getCodeQLDatabasePath(config, language), language, sourceRoot);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return await (0, tracer_config_1.getCombinedTracerConfig)(config, codeql);
|
||||||
}
|
}
|
||||||
exports.runInit = runInit;
|
exports.runInit = runInit;
|
||||||
// Runs a powershell script to inject the tracer into a parent process
|
// Runs a powershell script to inject the tracer into a parent process
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,oBAAW,CAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,CACP,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AApBD,gCAoBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAlCD,gCAkCC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB;IAElB,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAlBD,0BAkBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}
|
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAlCD,gCAkCC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC;IAEhC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;QAChE,0BAA0B;QAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,SAAS,EAChB,UAAU,EACV,WAAW,EACX,YAAY,CACb,CAAC;KACH;SAAM;QACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACvC,yBAAyB;YACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;SACH;KACF;IAED,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AA9BD,0BA8BC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}
|
||||||
58
lib/languages.test.js
generated
58
lib/languages.test.js
generated
@@ -6,39 +6,39 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
ava_1.default("parseLanguage", async (t) => {
|
(0, ava_1.default)("parseLanguage", async (t) => {
|
||||||
// Exact matches
|
// Exact matches
|
||||||
t.deepEqual(languages_1.parseLanguage("csharp"), languages_1.Language.csharp);
|
t.deepEqual((0, languages_1.parseLanguage)("csharp"), languages_1.Language.csharp);
|
||||||
t.deepEqual(languages_1.parseLanguage("cpp"), languages_1.Language.cpp);
|
t.deepEqual((0, languages_1.parseLanguage)("cpp"), languages_1.Language.cpp);
|
||||||
t.deepEqual(languages_1.parseLanguage("go"), languages_1.Language.go);
|
t.deepEqual((0, languages_1.parseLanguage)("go"), languages_1.Language.go);
|
||||||
t.deepEqual(languages_1.parseLanguage("java"), languages_1.Language.java);
|
t.deepEqual((0, languages_1.parseLanguage)("java"), languages_1.Language.java);
|
||||||
t.deepEqual(languages_1.parseLanguage("javascript"), languages_1.Language.javascript);
|
t.deepEqual((0, languages_1.parseLanguage)("javascript"), languages_1.Language.javascript);
|
||||||
t.deepEqual(languages_1.parseLanguage("python"), languages_1.Language.python);
|
t.deepEqual((0, languages_1.parseLanguage)("python"), languages_1.Language.python);
|
||||||
// Aliases
|
// Aliases
|
||||||
t.deepEqual(languages_1.parseLanguage("c"), languages_1.Language.cpp);
|
t.deepEqual((0, languages_1.parseLanguage)("c"), languages_1.Language.cpp);
|
||||||
t.deepEqual(languages_1.parseLanguage("c++"), languages_1.Language.cpp);
|
t.deepEqual((0, languages_1.parseLanguage)("c++"), languages_1.Language.cpp);
|
||||||
t.deepEqual(languages_1.parseLanguage("c#"), languages_1.Language.csharp);
|
t.deepEqual((0, languages_1.parseLanguage)("c#"), languages_1.Language.csharp);
|
||||||
t.deepEqual(languages_1.parseLanguage("typescript"), languages_1.Language.javascript);
|
t.deepEqual((0, languages_1.parseLanguage)("typescript"), languages_1.Language.javascript);
|
||||||
// Not matches
|
// Not matches
|
||||||
t.deepEqual(languages_1.parseLanguage("foo"), undefined);
|
t.deepEqual((0, languages_1.parseLanguage)("foo"), undefined);
|
||||||
t.deepEqual(languages_1.parseLanguage(" "), undefined);
|
t.deepEqual((0, languages_1.parseLanguage)(" "), undefined);
|
||||||
t.deepEqual(languages_1.parseLanguage(""), undefined);
|
t.deepEqual((0, languages_1.parseLanguage)(""), undefined);
|
||||||
});
|
});
|
||||||
ava_1.default("isTracedLanguage", async (t) => {
|
(0, ava_1.default)("isTracedLanguage", async (t) => {
|
||||||
t.true(languages_1.isTracedLanguage(languages_1.Language.cpp));
|
t.true((0, languages_1.isTracedLanguage)(languages_1.Language.cpp));
|
||||||
t.true(languages_1.isTracedLanguage(languages_1.Language.java));
|
t.true((0, languages_1.isTracedLanguage)(languages_1.Language.java));
|
||||||
t.true(languages_1.isTracedLanguage(languages_1.Language.csharp));
|
t.true((0, languages_1.isTracedLanguage)(languages_1.Language.csharp));
|
||||||
t.false(languages_1.isTracedLanguage(languages_1.Language.go));
|
t.false((0, languages_1.isTracedLanguage)(languages_1.Language.go));
|
||||||
t.false(languages_1.isTracedLanguage(languages_1.Language.javascript));
|
t.false((0, languages_1.isTracedLanguage)(languages_1.Language.javascript));
|
||||||
t.false(languages_1.isTracedLanguage(languages_1.Language.python));
|
t.false((0, languages_1.isTracedLanguage)(languages_1.Language.python));
|
||||||
});
|
});
|
||||||
ava_1.default("isScannedLanguage", async (t) => {
|
(0, ava_1.default)("isScannedLanguage", async (t) => {
|
||||||
t.false(languages_1.isScannedLanguage(languages_1.Language.cpp));
|
t.false((0, languages_1.isScannedLanguage)(languages_1.Language.cpp));
|
||||||
t.false(languages_1.isScannedLanguage(languages_1.Language.java));
|
t.false((0, languages_1.isScannedLanguage)(languages_1.Language.java));
|
||||||
t.false(languages_1.isScannedLanguage(languages_1.Language.csharp));
|
t.false((0, languages_1.isScannedLanguage)(languages_1.Language.csharp));
|
||||||
t.true(languages_1.isScannedLanguage(languages_1.Language.go));
|
t.true((0, languages_1.isScannedLanguage)(languages_1.Language.go));
|
||||||
t.true(languages_1.isScannedLanguage(languages_1.Language.javascript));
|
t.true((0, languages_1.isScannedLanguage)(languages_1.Language.javascript));
|
||||||
t.true(languages_1.isScannedLanguage(languages_1.Language.python));
|
t.true((0, languages_1.isScannedLanguage)(languages_1.Language.python));
|
||||||
});
|
});
|
||||||
//# sourceMappingURL=languages.test.js.map
|
//# sourceMappingURL=languages.test.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"languages.test.js","sourceRoot":"","sources":["../src/languages.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,2CAKqB;AACrB,mDAA6C;AAE7C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,gBAAgB;IAChB,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IACtD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,MAAM,CAAC,EAAE,oBAAQ,CAAC,IAAI,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAC9D,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAEtD,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAE9D,cAAc;IACd,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC7C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,SAAS,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAC;AAC5C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IACxC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE1C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mBAAmB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACpC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACzC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE5C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"languages.test.js","sourceRoot":"","sources":["../src/languages.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,2CAKqB;AACrB,mDAA6C;AAE7C,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,gBAAgB;IAChB,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IACtD,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,MAAM,CAAC,EAAE,oBAAQ,CAAC,IAAI,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAC9D,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAEtD,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,GAAG,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAE9D,cAAc;IACd,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC7C,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,GAAG,CAAC,EAAE,SAAS,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAC;AAC5C,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,CAAC,CAAC,IAAI,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IACxC,CAAC,CAAC,IAAI,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE1C,CAAC,CAAC,KAAK,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,KAAK,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,KAAK,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,mBAAmB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACpC,CAAC,CAAC,KAAK,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACzC,CAAC,CAAC,KAAK,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC,CAAC,KAAK,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE5C,CAAC,CAAC,IAAI,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,IAAI,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC"}
|
||||||
75
lib/runner.js
generated
75
lib/runner.js
generated
@@ -37,7 +37,7 @@ const util_1 = require("./util");
|
|||||||
const pkg = require("../package.json");
|
const pkg = require("../package.json");
|
||||||
const program = new commander_1.Command();
|
const program = new commander_1.Command();
|
||||||
program.version(pkg.version).hook("preAction", () => {
|
program.version(pkg.version).hook("preAction", () => {
|
||||||
util_1.initializeEnvironment(util_1.Mode.runner, pkg.version);
|
(0, util_1.initializeEnvironment)(util_1.Mode.runner, pkg.version);
|
||||||
});
|
});
|
||||||
function getTempDir(userInput) {
|
function getTempDir(userInput) {
|
||||||
const tempDir = path.join(userInput || process.cwd(), "codeql-runner");
|
const tempDir = path.join(userInput || process.cwd(), "codeql-runner");
|
||||||
@@ -117,39 +117,41 @@ program
|
|||||||
.option("--trace-process-name <string>", "(Advanced, windows-only) Inject a windows tracer of this process into a process with the given process name.")
|
.option("--trace-process-name <string>", "(Advanced, windows-only) Inject a windows tracer of this process into a process with the given process name.")
|
||||||
.option("--trace-process-level <number>", "(Advanced, windows-only) Inject a windows tracer of this process into a parent process <number> levels up.")
|
.option("--trace-process-level <number>", "(Advanced, windows-only) Inject a windows tracer of this process into a parent process <number> levels up.")
|
||||||
.action(async (cmd) => {
|
.action(async (cmd) => {
|
||||||
const logger = logging_1.getRunnerLogger(cmd.debug);
|
const logger = (0, logging_1.getRunnerLogger)(cmd.debug);
|
||||||
try {
|
try {
|
||||||
const tempDir = getTempDir(cmd.tempDir);
|
const tempDir = getTempDir(cmd.tempDir);
|
||||||
const toolsDir = getToolsDir(cmd.toolsDir);
|
const toolsDir = getToolsDir(cmd.toolsDir);
|
||||||
const checkoutPath = cmd.checkoutPath || process.cwd();
|
const checkoutPath = cmd.checkoutPath || process.cwd();
|
||||||
// Wipe the temp dir
|
// Wipe the temp dir
|
||||||
logger.info(`Cleaning temp directory ${tempDir}`);
|
logger.info(`Cleaning temp directory ${tempDir}`);
|
||||||
fs.rmdirSync(tempDir, { recursive: true });
|
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||||
fs.mkdirSync(tempDir, { recursive: true });
|
fs.mkdirSync(tempDir, { recursive: true });
|
||||||
const auth = await util_1.getGitHubAuth(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
const auth = await (0, util_1.getGitHubAuth)(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth,
|
auth,
|
||||||
externalRepoAuth: auth,
|
externalRepoAuth: auth,
|
||||||
url: util_1.parseGitHubUrl(cmd.githubUrl),
|
url: (0, util_1.parseGitHubUrl)(cmd.githubUrl),
|
||||||
};
|
};
|
||||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
|
||||||
util_1.checkGitHubVersionInRange(gitHubVersion, logger, util_1.Mode.runner);
|
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.runner);
|
||||||
let codeql;
|
let codeql;
|
||||||
if (cmd.codeqlPath !== undefined) {
|
if (cmd.codeqlPath !== undefined) {
|
||||||
codeql = codeql_1.getCodeQL(cmd.codeqlPath);
|
codeql = await (0, codeql_1.getCodeQL)(cmd.codeqlPath);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
codeql = (await init_1.initCodeQL(undefined, apiDetails, tempDir, toolsDir, gitHubVersion.type, logger)).codeql;
|
codeql = (await (0, init_1.initCodeQL)(undefined, apiDetails, tempDir, toolsDir, gitHubVersion.type, logger)).codeql;
|
||||||
}
|
}
|
||||||
|
await (0, util_1.enrichEnvironment)(util_1.Mode.runner, codeql);
|
||||||
const workspacePath = checkoutPath;
|
const workspacePath = checkoutPath;
|
||||||
const config = await init_1.initConfig(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, repository_1.parseRepositoryNwo(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, logger);
|
const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, logger);
|
||||||
const sourceRoot = checkoutPath;
|
const sourceRoot = checkoutPath;
|
||||||
const tracerConfig = await init_1.runInit(codeql, config, sourceRoot);
|
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, parseTraceProcessName(), parseTraceProcessLevel());
|
||||||
if (tracerConfig === undefined) {
|
if (tracerConfig === undefined) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (process.platform === "win32") {
|
if (process.platform === "win32" &&
|
||||||
await init_1.injectWindowsTracer(parseTraceProcessName(), parseTraceProcessLevel(), config, codeql, tracerConfig);
|
!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
|
||||||
|
await (0, init_1.injectWindowsTracer)(parseTraceProcessName(), parseTraceProcessLevel(), config, codeql, tracerConfig);
|
||||||
}
|
}
|
||||||
// Always output a json file of the env that can be consumed programmatically
|
// Always output a json file of the env that can be consumed programmatically
|
||||||
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
|
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
|
||||||
@@ -185,7 +187,7 @@ program
|
|||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.error("Init failed");
|
logger.error("Init failed");
|
||||||
logger.error(e);
|
logger.error(e instanceof Error ? e : new Error(String(e)));
|
||||||
process.exitCode = 1;
|
process.exitCode = 1;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -196,32 +198,33 @@ program
|
|||||||
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
|
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
|
||||||
.option("--debug", "Print more verbose output", false)
|
.option("--debug", "Print more verbose output", false)
|
||||||
.action(async (cmd) => {
|
.action(async (cmd) => {
|
||||||
const logger = logging_1.getRunnerLogger(cmd.debug);
|
const logger = (0, logging_1.getRunnerLogger)(cmd.debug);
|
||||||
try {
|
try {
|
||||||
const config = await config_utils_1.getConfig(getTempDir(cmd.tempDir), logger);
|
const config = await (0, config_utils_1.getConfig)(getTempDir(cmd.tempDir), logger);
|
||||||
if (config === undefined) {
|
if (config === undefined) {
|
||||||
throw new Error("Config file could not be found at expected location. " +
|
throw new Error("Config file could not be found at expected location. " +
|
||||||
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
|
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
|
||||||
}
|
}
|
||||||
|
await (0, util_1.enrichEnvironment)(util_1.Mode.runner, await (0, codeql_1.getCodeQL)(config.codeQLCmd));
|
||||||
importTracerEnvironment(config);
|
importTracerEnvironment(config);
|
||||||
let language = undefined;
|
let language = undefined;
|
||||||
if (cmd.language !== undefined) {
|
if (cmd.language !== undefined) {
|
||||||
language = languages_1.parseLanguage(cmd.language);
|
language = (0, languages_1.parseLanguage)(cmd.language);
|
||||||
if (language === undefined || !config.languages.includes(language)) {
|
if (language === undefined || !config.languages.includes(language)) {
|
||||||
throw new Error(`"${cmd.language}" is not a recognised language. ` +
|
throw new Error(`"${cmd.language}" is not a recognised language. ` +
|
||||||
`Known languages in this project are ${config.languages.join(", ")}.`);
|
`Known languages in this project are ${config.languages.join(", ")}.`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
language = autobuild_1.determineAutobuildLanguage(config, logger);
|
language = (0, autobuild_1.determineAutobuildLanguage)(config, logger);
|
||||||
}
|
}
|
||||||
if (language !== undefined) {
|
if (language !== undefined) {
|
||||||
await autobuild_1.runAutobuild(language, config, logger);
|
await (0, autobuild_1.runAutobuild)(language, config, logger);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.error("Autobuild failed");
|
logger.error("Autobuild failed");
|
||||||
logger.error(e);
|
logger.error(e instanceof Error ? e : new Error(String(e)));
|
||||||
process.exitCode = 1;
|
process.exitCode = 1;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -245,32 +248,34 @@ program
|
|||||||
.option("--category <category>", "String used by Code Scanning for matching the analyses.")
|
.option("--category <category>", "String used by Code Scanning for matching the analyses.")
|
||||||
.option("--debug", "Print more verbose output", false)
|
.option("--debug", "Print more verbose output", false)
|
||||||
.action(async (cmd) => {
|
.action(async (cmd) => {
|
||||||
const logger = logging_1.getRunnerLogger(cmd.debug);
|
const logger = (0, logging_1.getRunnerLogger)(cmd.debug);
|
||||||
try {
|
try {
|
||||||
const config = await config_utils_1.getConfig(getTempDir(cmd.tempDir), logger);
|
const config = await (0, config_utils_1.getConfig)(getTempDir(cmd.tempDir), logger);
|
||||||
if (config === undefined) {
|
if (config === undefined) {
|
||||||
throw new Error("Config file could not be found at expected location. " +
|
throw new Error("Config file could not be found at expected location. " +
|
||||||
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
|
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
|
||||||
}
|
}
|
||||||
const auth = await util_1.getGitHubAuth(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
await (0, util_1.enrichEnvironment)(util_1.Mode.runner, await (0, codeql_1.getCodeQL)(config.codeQLCmd));
|
||||||
|
const auth = await (0, util_1.getGitHubAuth)(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth,
|
auth,
|
||||||
url: util_1.parseGitHubUrl(cmd.githubUrl),
|
url: (0, util_1.parseGitHubUrl)(cmd.githubUrl),
|
||||||
};
|
};
|
||||||
const outputDir = cmd.outputDir || path.join(config.tempDir, "codeql-sarif");
|
const outputDir = cmd.outputDir || path.join(config.tempDir, "codeql-sarif");
|
||||||
const threads = util_1.getThreadsFlag(cmd.threads, logger);
|
const threads = (0, util_1.getThreadsFlag)(cmd.threads, logger);
|
||||||
await analyze_1.runFinalize(outputDir, threads, config, logger);
|
const memory = (0, util_1.getMemoryFlag)(cmd.ram);
|
||||||
await analyze_1.runQueries(outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), threads, cmd.category, config, logger);
|
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
|
||||||
|
await (0, analyze_1.runQueries)(outputDir, memory, (0, util_1.getAddSnippetsFlag)(cmd.addSnippets), threads, cmd.category, config, logger);
|
||||||
if (!cmd.upload) {
|
if (!cmd.upload) {
|
||||||
logger.info("Not uploading results");
|
logger.info("Not uploading results");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const sourceRoot = cmd.checkoutPath || process.cwd();
|
const sourceRoot = cmd.checkoutPath || process.cwd();
|
||||||
await upload_lib.uploadFromRunner(outputDir, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.category, sourceRoot, config.gitHubVersion, apiDetails, logger);
|
await upload_lib.uploadFromRunner(outputDir, (0, repository_1.parseRepositoryNwo)(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.category, sourceRoot, config.gitHubVersion, apiDetails, logger);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.error("Analyze failed");
|
logger.error("Analyze failed");
|
||||||
logger.error(e);
|
logger.error(e instanceof Error ? e : new Error(String(e)));
|
||||||
process.exitCode = 1;
|
process.exitCode = 1;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -288,20 +293,20 @@ program
|
|||||||
.option("--category <category>", "String used by Code Scanning for matching the analyses.")
|
.option("--category <category>", "String used by Code Scanning for matching the analyses.")
|
||||||
.option("--debug", "Print more verbose output", false)
|
.option("--debug", "Print more verbose output", false)
|
||||||
.action(async (cmd) => {
|
.action(async (cmd) => {
|
||||||
const logger = logging_1.getRunnerLogger(cmd.debug);
|
const logger = (0, logging_1.getRunnerLogger)(cmd.debug);
|
||||||
const auth = await util_1.getGitHubAuth(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
const auth = await (0, util_1.getGitHubAuth)(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth,
|
auth,
|
||||||
url: util_1.parseGitHubUrl(cmd.githubUrl),
|
url: (0, util_1.parseGitHubUrl)(cmd.githubUrl),
|
||||||
};
|
};
|
||||||
try {
|
try {
|
||||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
|
||||||
const sourceRoot = cmd.checkoutPath || process.cwd();
|
const sourceRoot = cmd.checkoutPath || process.cwd();
|
||||||
await upload_lib.uploadFromRunner(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.category, sourceRoot, gitHubVersion, apiDetails, logger);
|
await upload_lib.uploadFromRunner(cmd.sarifFile, (0, repository_1.parseRepositoryNwo)(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.category, sourceRoot, gitHubVersion, apiDetails, logger);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.error("Upload failed");
|
logger.error("Upload failed");
|
||||||
logger.error(e);
|
logger.error(e instanceof Error ? e : new Error(String(e)));
|
||||||
process.exitCode = 1;
|
process.exitCode = 1;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
7
lib/testing-utils.js
generated
7
lib/testing-utils.js
generated
@@ -18,12 +18,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.setupActionsVars = exports.setupTests = void 0;
|
exports.setupActionsVars = exports.setupTests = void 0;
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const CodeQL = __importStar(require("./codeql"));
|
const CodeQL = __importStar(require("./codeql"));
|
||||||
function wrapOutput(context) {
|
function wrapOutput(context) {
|
||||||
// Function signature taken from Socket.write.
|
// Function signature taken from Socket.write.
|
||||||
@@ -79,7 +76,7 @@ function setupTests(test) {
|
|||||||
process.stdout.write(t.context.testOutput);
|
process.stdout.write(t.context.testOutput);
|
||||||
}
|
}
|
||||||
// Undo any modifications made by sinon
|
// Undo any modifications made by sinon
|
||||||
sinon_1.default.restore();
|
sinon.restore();
|
||||||
// Undo any modifications to the env
|
// Undo any modifications to the env
|
||||||
process.env = t.context.env;
|
process.env = t.context.env;
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AACA,kDAA0B;AAE1B,iDAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,eAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC"}
|
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,6CAA+B;AAE/B,iDAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC"}
|
||||||
14
lib/toolcache.js
generated
14
lib/toolcache.js
generated
@@ -50,7 +50,7 @@ const util_1 = require("./util");
|
|||||||
* @returns path to the destination directory
|
* @returns path to the destination directory
|
||||||
*/
|
*/
|
||||||
async function extractTar(file, tempDir, logger) {
|
async function extractTar(file, tempDir, logger) {
|
||||||
if (util_1.isActions()) {
|
if ((0, util_1.isActions)()) {
|
||||||
return await actionsToolcache.extractTar(file);
|
return await actionsToolcache.extractTar(file);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -110,7 +110,7 @@ exports.extractTar = extractTar;
|
|||||||
* @param logger logger to use
|
* @param logger logger to use
|
||||||
*/
|
*/
|
||||||
async function cacheDir(sourceDir, tool, version, toolCacheDir, logger) {
|
async function cacheDir(sourceDir, tool, version, toolCacheDir, logger) {
|
||||||
if (util_1.isActions()) {
|
if ((0, util_1.isActions)()) {
|
||||||
return await actionsToolcache.cacheDir(sourceDir, tool, version);
|
return await actionsToolcache.cacheDir(sourceDir, tool, version);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -148,7 +148,7 @@ exports.cacheDir = cacheDir;
|
|||||||
* @param logger logger to use
|
* @param logger logger to use
|
||||||
*/
|
*/
|
||||||
function find(toolName, versionSpec, toolCacheDir, logger) {
|
function find(toolName, versionSpec, toolCacheDir, logger) {
|
||||||
if (util_1.isActions()) {
|
if ((0, util_1.isActions)()) {
|
||||||
return actionsToolcache.find(toolName, versionSpec);
|
return actionsToolcache.find(toolName, versionSpec);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -194,7 +194,7 @@ exports.find = find;
|
|||||||
* @param logger logger to use
|
* @param logger logger to use
|
||||||
*/
|
*/
|
||||||
function findAllVersions(toolName, toolCacheDir, logger) {
|
function findAllVersions(toolName, toolCacheDir, logger) {
|
||||||
if (util_1.isActions()) {
|
if ((0, util_1.isActions)()) {
|
||||||
return actionsToolcache.findAllVersions(toolName);
|
return actionsToolcache.findAllVersions(toolName);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -219,7 +219,7 @@ function findAllVersions(toolName, toolCacheDir, logger) {
|
|||||||
}
|
}
|
||||||
exports.findAllVersions = findAllVersions;
|
exports.findAllVersions = findAllVersions;
|
||||||
async function downloadTool(url, tempDir, headers) {
|
async function downloadTool(url, tempDir, headers) {
|
||||||
const dest = path.join(tempDir, uuid_1.v4());
|
const dest = path.join(tempDir, (0, uuid_1.v4)());
|
||||||
const finalHeaders = Object.assign({ "User-Agent": "CodeQL Action" }, headers);
|
const finalHeaders = Object.assign({ "User-Agent": "CodeQL Action" }, headers);
|
||||||
return await actionsToolcache.downloadTool(url, dest, undefined, finalHeaders);
|
return await actionsToolcache.downloadTool(url, dest, undefined, finalHeaders);
|
||||||
}
|
}
|
||||||
@@ -236,8 +236,8 @@ function createToolPath(tool, version, arch, toolCacheDir, logger) {
|
|||||||
const folderPath = path.join(toolCacheDir, tool, semver.clean(version) || version, arch || "");
|
const folderPath = path.join(toolCacheDir, tool, semver.clean(version) || version, arch || "");
|
||||||
logger.debug(`destination ${folderPath}`);
|
logger.debug(`destination ${folderPath}`);
|
||||||
const markerPath = `${folderPath}.complete`;
|
const markerPath = `${folderPath}.complete`;
|
||||||
fs.rmdirSync(folderPath, { recursive: true });
|
fs.rmSync(folderPath, { recursive: true, force: true });
|
||||||
fs.rmdirSync(markerPath, { recursive: true });
|
fs.rmSync(markerPath, { recursive: true, force: true });
|
||||||
fs.mkdirSync(folderPath, { recursive: true });
|
fs.mkdirSync(folderPath, { recursive: true });
|
||||||
return folderPath;
|
return folderPath;
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
2
lib/toolrunner-error-catcher.js
generated
2
lib/toolrunner-error-catcher.js
generated
@@ -71,7 +71,7 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
|
|||||||
}).exec();
|
}).exec();
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
returnState = e;
|
returnState = e instanceof Error ? e : new Error(String(e));
|
||||||
}
|
}
|
||||||
// if there is a zero return code then we do not apply the matchers
|
// if there is a zero return code then we do not apply the matchers
|
||||||
if (returnState === 0)
|
if (returnState === 0)
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,CAAC;KACjB;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW;iBAChC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;iBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAzED,wDAyEC"}
|
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;KAC7D;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW;iBAChC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;iBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAzED,wDAyEC"}
|
||||||
36
lib/toolrunner-error-catcher.test.js
generated
36
lib/toolrunner-error-catcher.test.js
generated
@@ -26,16 +26,16 @@ const exec = __importStar(require("@actions/exec"));
|
|||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
ava_1.default("matchers are never applied if non-error exit", async (t) => {
|
(0, ava_1.default)("matchers are never applied if non-error exit", async (t) => {
|
||||||
const testArgs = buildDummyArgs("foo bar\\nblort qux", "foo bar\\nblort qux", "", 0);
|
const testArgs = buildDummyArgs("foo bar\\nblort qux", "foo bar\\nblort qux", "", 0);
|
||||||
const matchers = [
|
const matchers = [
|
||||||
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "error!!!" },
|
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "error!!!" },
|
||||||
];
|
];
|
||||||
t.deepEqual(await exec.exec("node", testArgs), 0);
|
t.deepEqual(await exec.exec("node", testArgs), 0);
|
||||||
t.deepEqual(await toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), 0);
|
t.deepEqual(await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, matchers), 0);
|
||||||
});
|
});
|
||||||
ava_1.default("regex matchers are applied to stdout for non-zero exit code", async (t) => {
|
(0, ava_1.default)("regex matchers are applied to stdout for non-zero exit code", async (t) => {
|
||||||
const testArgs = buildDummyArgs("foo bar\\nblort qux", "", "", 1);
|
const testArgs = buildDummyArgs("foo bar\\nblort qux", "", "", 1);
|
||||||
const matchers = [
|
const matchers = [
|
||||||
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
|
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
|
||||||
@@ -44,12 +44,12 @@ ava_1.default("regex matchers are applied to stdout for non-zero exit code", asy
|
|||||||
instanceOf: Error,
|
instanceOf: Error,
|
||||||
message: /failed with exit code 1/,
|
message: /failed with exit code 1/,
|
||||||
});
|
});
|
||||||
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
|
await t.throwsAsync((0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, matchers), {
|
||||||
instanceOf: Error,
|
instanceOf: Error,
|
||||||
message: "🦄",
|
message: "🦄",
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("regex matchers are applied to stderr for non-zero exit code", async (t) => {
|
(0, ava_1.default)("regex matchers are applied to stderr for non-zero exit code", async (t) => {
|
||||||
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
|
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
|
||||||
const matchers = [
|
const matchers = [
|
||||||
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
|
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
|
||||||
@@ -58,12 +58,12 @@ ava_1.default("regex matchers are applied to stderr for non-zero exit code", asy
|
|||||||
instanceOf: Error,
|
instanceOf: Error,
|
||||||
message: /failed with exit code 1/,
|
message: /failed with exit code 1/,
|
||||||
});
|
});
|
||||||
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
|
await t.throwsAsync((0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, matchers), {
|
||||||
instanceOf: Error,
|
instanceOf: Error,
|
||||||
message: "🦄",
|
message: "🦄",
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("matcher returns correct error message when multiple matchers defined", async (t) => {
|
(0, ava_1.default)("matcher returns correct error message when multiple matchers defined", async (t) => {
|
||||||
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
|
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
|
||||||
const matchers = [
|
const matchers = [
|
||||||
{ exitCode: 456, outputRegex: new RegExp("lorem ipsum"), message: "😩" },
|
{ exitCode: 456, outputRegex: new RegExp("lorem ipsum"), message: "😩" },
|
||||||
@@ -74,12 +74,12 @@ ava_1.default("matcher returns correct error message when multiple matchers defi
|
|||||||
instanceOf: Error,
|
instanceOf: Error,
|
||||||
message: /failed with exit code 1/,
|
message: /failed with exit code 1/,
|
||||||
});
|
});
|
||||||
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
|
await t.throwsAsync((0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, matchers), {
|
||||||
instanceOf: Error,
|
instanceOf: Error,
|
||||||
message: "🦄",
|
message: "🦄",
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("matcher returns first match to regex when multiple matches", async (t) => {
|
(0, ava_1.default)("matcher returns first match to regex when multiple matches", async (t) => {
|
||||||
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
|
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
|
||||||
const matchers = [
|
const matchers = [
|
||||||
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
|
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
|
||||||
@@ -90,12 +90,12 @@ ava_1.default("matcher returns first match to regex when multiple matches", asyn
|
|||||||
instanceOf: Error,
|
instanceOf: Error,
|
||||||
message: /failed with exit code 1/,
|
message: /failed with exit code 1/,
|
||||||
});
|
});
|
||||||
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
|
await t.throwsAsync((0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, matchers), {
|
||||||
instanceOf: Error,
|
instanceOf: Error,
|
||||||
message: "🦄",
|
message: "🦄",
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("exit code matchers are applied", async (t) => {
|
(0, ava_1.default)("exit code matchers are applied", async (t) => {
|
||||||
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 123);
|
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 123);
|
||||||
const matchers = [
|
const matchers = [
|
||||||
{
|
{
|
||||||
@@ -108,19 +108,19 @@ ava_1.default("exit code matchers are applied", async (t) => {
|
|||||||
instanceOf: Error,
|
instanceOf: Error,
|
||||||
message: /failed with exit code 123/,
|
message: /failed with exit code 123/,
|
||||||
});
|
});
|
||||||
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
|
await t.throwsAsync((0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, matchers), {
|
||||||
instanceOf: Error,
|
instanceOf: Error,
|
||||||
message: "🦄",
|
message: "🦄",
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("execErrorCatcher respects the ignoreReturnValue option", async (t) => {
|
(0, ava_1.default)("execErrorCatcher respects the ignoreReturnValue option", async (t) => {
|
||||||
const testArgs = buildDummyArgs("standard output", "error output", "", 199);
|
const testArgs = buildDummyArgs("standard output", "error output", "", 199);
|
||||||
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, [], { ignoreReturnCode: false }), { instanceOf: Error });
|
await t.throwsAsync((0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], { ignoreReturnCode: false }), { instanceOf: Error });
|
||||||
t.deepEqual(await toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, [], {
|
t.deepEqual(await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], {
|
||||||
ignoreReturnCode: true,
|
ignoreReturnCode: true,
|
||||||
}), 199);
|
}), 199);
|
||||||
});
|
});
|
||||||
ava_1.default("execErrorCatcher preserves behavior of provided listeners", async (t) => {
|
(0, ava_1.default)("execErrorCatcher preserves behavior of provided listeners", async (t) => {
|
||||||
const stdoutExpected = "standard output";
|
const stdoutExpected = "standard output";
|
||||||
const stderrExpected = "error output";
|
const stderrExpected = "error output";
|
||||||
let stdoutActual = "";
|
let stdoutActual = "";
|
||||||
@@ -134,7 +134,7 @@ ava_1.default("execErrorCatcher preserves behavior of provided listeners", async
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
const testArgs = buildDummyArgs(stdoutExpected, stderrExpected, "", 0);
|
const testArgs = buildDummyArgs(stdoutExpected, stderrExpected, "", 0);
|
||||||
t.deepEqual(await toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, [], {
|
t.deepEqual(await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], {
|
||||||
listeners,
|
listeners,
|
||||||
}), 0);
|
}), 0);
|
||||||
t.deepEqual(stdoutActual, `${stdoutExpected}\n`);
|
t.deepEqual(stdoutActual, `${stdoutExpected}\n`);
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
28
lib/tracer-config.js
generated
28
lib/tracer-config.js
generated
@@ -19,11 +19,13 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.getCombinedTracerConfig = exports.concatTracerConfigs = exports.getTracerConfigForLanguage = void 0;
|
exports.getCombinedTracerConfig = exports.concatTracerConfigs = exports.getTracerConfigForLanguage = exports.getTracerConfigForCluster = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
|
const codeql_1 = require("./codeql");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
|
const util_1 = require("./util");
|
||||||
const CRITICAL_TRACER_VARS = new Set([
|
const CRITICAL_TRACER_VARS = new Set([
|
||||||
"SEMMLE_PRELOAD_libtrace",
|
"SEMMLE_PRELOAD_libtrace",
|
||||||
"SEMMLE_RUNNER",
|
"SEMMLE_RUNNER",
|
||||||
@@ -31,6 +33,14 @@ const CRITICAL_TRACER_VARS = new Set([
|
|||||||
"SEMMLE_DEPTRACE_SOCKET",
|
"SEMMLE_DEPTRACE_SOCKET",
|
||||||
"SEMMLE_JAVA_TOOL_OPTIONS",
|
"SEMMLE_JAVA_TOOL_OPTIONS",
|
||||||
]);
|
]);
|
||||||
|
async function getTracerConfigForCluster(config) {
|
||||||
|
const tracingEnvVariables = JSON.parse(fs.readFileSync(path.resolve(config.dbLocation, "temp/tracingEnvironment/start-tracing.json"), "utf8"));
|
||||||
|
return {
|
||||||
|
spec: tracingEnvVariables["ODASA_TRACER_CONFIGURATION"],
|
||||||
|
env: tracingEnvVariables,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
exports.getTracerConfigForCluster = getTracerConfigForCluster;
|
||||||
async function getTracerConfigForLanguage(codeql, config, language) {
|
async function getTracerConfigForLanguage(codeql, config, language) {
|
||||||
const env = await codeql.getTracerEnv(util.getCodeQLDatabasePath(config, language));
|
const env = await codeql.getTracerEnv(util.getCodeQLDatabasePath(config, language));
|
||||||
const spec = env["ODASA_TRACER_CONFIGURATION"];
|
const spec = env["ODASA_TRACER_CONFIGURATION"];
|
||||||
@@ -161,12 +171,18 @@ async function getCombinedTracerConfig(config, codeql) {
|
|||||||
if (tracedLanguages.length === 0) {
|
if (tracedLanguages.length === 0) {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
// Get all the tracer configs and combine them together
|
let mainTracerConfig;
|
||||||
const tracedLanguageConfigs = {};
|
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||||
for (const language of tracedLanguages) {
|
mainTracerConfig = await getTracerConfigForCluster(config);
|
||||||
tracedLanguageConfigs[language] = await getTracerConfigForLanguage(codeql, config, language);
|
}
|
||||||
|
else {
|
||||||
|
// Get all the tracer configs and combine them together
|
||||||
|
const tracedLanguageConfigs = {};
|
||||||
|
for (const language of tracedLanguages) {
|
||||||
|
tracedLanguageConfigs[language] = await getTracerConfigForLanguage(codeql, config, language);
|
||||||
|
}
|
||||||
|
mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
|
||||||
}
|
}
|
||||||
const mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
|
|
||||||
// Add a couple more variables
|
// Add a couple more variables
|
||||||
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
|
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
|
||||||
const codeQLDir = path.dirname(codeql.getPath());
|
const codeQLDir = path.dirname(codeql.getPath());
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
48
lib/tracer-config.test.js
generated
48
lib/tracer-config.test.js
generated
@@ -30,7 +30,7 @@ const languages_1 = require("./languages");
|
|||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const tracer_config_1 = require("./tracer-config");
|
const tracer_config_1 = require("./tracer-config");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
function getTestConfig(tmpDir) {
|
function getTestConfig(tmpDir) {
|
||||||
return {
|
return {
|
||||||
languages: [languages_1.Language.java],
|
languages: [languages_1.Language.java],
|
||||||
@@ -47,10 +47,10 @@ function getTestConfig(tmpDir) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
// A very minimal setup
|
// A very minimal setup
|
||||||
ava_1.default("getTracerConfigForLanguage - minimal setup", async (t) => {
|
(0, ava_1.default)("getTracerConfigForLanguage - minimal setup", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const config = getTestConfig(tmpDir);
|
const config = getTestConfig(tmpDir);
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async getTracerEnv() {
|
async getTracerEnv() {
|
||||||
return {
|
return {
|
||||||
ODASA_TRACER_CONFIGURATION: "abc",
|
ODASA_TRACER_CONFIGURATION: "abc",
|
||||||
@@ -58,12 +58,12 @@ ava_1.default("getTracerConfigForLanguage - minimal setup", async (t) => {
|
|||||||
};
|
};
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const result = await tracer_config_1.getTracerConfigForLanguage(codeQL, config, languages_1.Language.javascript);
|
const result = await (0, tracer_config_1.getTracerConfigForLanguage)(codeQL, config, languages_1.Language.javascript);
|
||||||
t.deepEqual(result, { spec: "abc", env: { foo: "bar" } });
|
t.deepEqual(result, { spec: "abc", env: { foo: "bar" } });
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
// Existing vars should not be overwritten, unless they are critical or prefixed with CODEQL_
|
// Existing vars should not be overwritten, unless they are critical or prefixed with CODEQL_
|
||||||
ava_1.default("getTracerConfigForLanguage - existing / critical vars", async (t) => {
|
(0, ava_1.default)("getTracerConfigForLanguage - existing / critical vars", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const config = getTestConfig(tmpDir);
|
const config = getTestConfig(tmpDir);
|
||||||
// Set up some variables in the environment
|
// Set up some variables in the environment
|
||||||
@@ -75,7 +75,7 @@ ava_1.default("getTracerConfigForLanguage - existing / critical vars", async (t)
|
|||||||
process.env["SEMMLE_JAVA_TOOL_OPTIONS"] = "abc";
|
process.env["SEMMLE_JAVA_TOOL_OPTIONS"] = "abc";
|
||||||
process.env["CODEQL_VAR"] = "abc";
|
process.env["CODEQL_VAR"] = "abc";
|
||||||
// Now CodeQL returns all these variables, and one more, with different values
|
// Now CodeQL returns all these variables, and one more, with different values
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async getTracerEnv() {
|
async getTracerEnv() {
|
||||||
return {
|
return {
|
||||||
ODASA_TRACER_CONFIGURATION: "abc",
|
ODASA_TRACER_CONFIGURATION: "abc",
|
||||||
@@ -90,7 +90,7 @@ ava_1.default("getTracerConfigForLanguage - existing / critical vars", async (t)
|
|||||||
};
|
};
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const result = await tracer_config_1.getTracerConfigForLanguage(codeQL, config, languages_1.Language.javascript);
|
const result = await (0, tracer_config_1.getTracerConfigForLanguage)(codeQL, config, languages_1.Language.javascript);
|
||||||
t.deepEqual(result, {
|
t.deepEqual(result, {
|
||||||
spec: "abc",
|
spec: "abc",
|
||||||
env: {
|
env: {
|
||||||
@@ -107,7 +107,7 @@ ava_1.default("getTracerConfigForLanguage - existing / critical vars", async (t)
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("concatTracerConfigs - minimal configs correctly combined", async (t) => {
|
(0, ava_1.default)("concatTracerConfigs - minimal configs correctly combined", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const config = getTestConfig(tmpDir);
|
const config = getTestConfig(tmpDir);
|
||||||
const spec1 = path.join(tmpDir, "spec1");
|
const spec1 = path.join(tmpDir, "spec1");
|
||||||
@@ -127,7 +127,7 @@ ava_1.default("concatTracerConfigs - minimal configs correctly combined", async
|
|||||||
c: "c",
|
c: "c",
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const result = tracer_config_1.concatTracerConfigs({ javascript: tc1, python: tc2 }, config);
|
const result = (0, tracer_config_1.concatTracerConfigs)({ javascript: tc1, python: tc2 }, config);
|
||||||
t.deepEqual(result, {
|
t.deepEqual(result, {
|
||||||
spec: path.join(tmpDir, "compound-spec"),
|
spec: path.join(tmpDir, "compound-spec"),
|
||||||
env: {
|
env: {
|
||||||
@@ -140,13 +140,13 @@ ava_1.default("concatTracerConfigs - minimal configs correctly combined", async
|
|||||||
t.deepEqual(fs.readFileSync(result.spec, "utf8"), `${path.join(tmpDir, "compound-build-tracer.log")}\n3\nabc\ndef\nghi`);
|
t.deepEqual(fs.readFileSync(result.spec, "utf8"), `${path.join(tmpDir, "compound-build-tracer.log")}\n3\nabc\ndef\nghi`);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("concatTracerConfigs - conflicting env vars", async (t) => {
|
(0, ava_1.default)("concatTracerConfigs - conflicting env vars", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const config = getTestConfig(tmpDir);
|
const config = getTestConfig(tmpDir);
|
||||||
const spec = path.join(tmpDir, "spec");
|
const spec = path.join(tmpDir, "spec");
|
||||||
fs.writeFileSync(spec, "foo.log\n0");
|
fs.writeFileSync(spec, "foo.log\n0");
|
||||||
// Ok if env vars have the same name and the same value
|
// Ok if env vars have the same name and the same value
|
||||||
t.deepEqual(tracer_config_1.concatTracerConfigs({
|
t.deepEqual((0, tracer_config_1.concatTracerConfigs)({
|
||||||
javascript: { spec, env: { a: "a", b: "b" } },
|
javascript: { spec, env: { a: "a", b: "b" } },
|
||||||
python: { spec, env: { b: "b", c: "c" } },
|
python: { spec, env: { b: "b", c: "c" } },
|
||||||
}, config).env, {
|
}, config).env, {
|
||||||
@@ -155,14 +155,14 @@ ava_1.default("concatTracerConfigs - conflicting env vars", async (t) => {
|
|||||||
c: "c",
|
c: "c",
|
||||||
});
|
});
|
||||||
// Throws if env vars have same name but different values
|
// Throws if env vars have same name but different values
|
||||||
const e = t.throws(() => tracer_config_1.concatTracerConfigs({
|
const e = t.throws(() => (0, tracer_config_1.concatTracerConfigs)({
|
||||||
javascript: { spec, env: { a: "a", b: "b" } },
|
javascript: { spec, env: { a: "a", b: "b" } },
|
||||||
python: { spec, env: { b: "c" } },
|
python: { spec, env: { b: "c" } },
|
||||||
}, config));
|
}, config));
|
||||||
t.deepEqual(e.message, "Incompatible values in environment parameter b: b and c");
|
t.deepEqual(e.message, "Incompatible values in environment parameter b: b and c");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("concatTracerConfigs - cpp spec lines come last if present", async (t) => {
|
(0, ava_1.default)("concatTracerConfigs - cpp spec lines come last if present", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const config = getTestConfig(tmpDir);
|
const config = getTestConfig(tmpDir);
|
||||||
const spec1 = path.join(tmpDir, "spec1");
|
const spec1 = path.join(tmpDir, "spec1");
|
||||||
@@ -182,7 +182,7 @@ ava_1.default("concatTracerConfigs - cpp spec lines come last if present", async
|
|||||||
c: "c",
|
c: "c",
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const result = tracer_config_1.concatTracerConfigs({ cpp: tc1, python: tc2 }, config);
|
const result = (0, tracer_config_1.concatTracerConfigs)({ cpp: tc1, python: tc2 }, config);
|
||||||
t.deepEqual(result, {
|
t.deepEqual(result, {
|
||||||
spec: path.join(tmpDir, "compound-spec"),
|
spec: path.join(tmpDir, "compound-spec"),
|
||||||
env: {
|
env: {
|
||||||
@@ -195,12 +195,12 @@ ava_1.default("concatTracerConfigs - cpp spec lines come last if present", async
|
|||||||
t.deepEqual(fs.readFileSync(result.spec, "utf8"), `${path.join(tmpDir, "compound-build-tracer.log")}\n3\nghi\nabc\ndef`);
|
t.deepEqual(fs.readFileSync(result.spec, "utf8"), `${path.join(tmpDir, "compound-build-tracer.log")}\n3\nghi\nabc\ndef`);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("concatTracerConfigs - SEMMLE_COPY_EXECUTABLES_ROOT is updated to point to compound spec", async (t) => {
|
(0, ava_1.default)("concatTracerConfigs - SEMMLE_COPY_EXECUTABLES_ROOT is updated to point to compound spec", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const config = getTestConfig(tmpDir);
|
const config = getTestConfig(tmpDir);
|
||||||
const spec = path.join(tmpDir, "spec");
|
const spec = path.join(tmpDir, "spec");
|
||||||
fs.writeFileSync(spec, "foo.log\n0");
|
fs.writeFileSync(spec, "foo.log\n0");
|
||||||
const result = tracer_config_1.concatTracerConfigs({
|
const result = (0, tracer_config_1.concatTracerConfigs)({
|
||||||
javascript: { spec, env: { a: "a", b: "b" } },
|
javascript: { spec, env: { a: "a", b: "b" } },
|
||||||
python: { spec, env: { SEMMLE_COPY_EXECUTABLES_ROOT: "foo" } },
|
python: { spec, env: { SEMMLE_COPY_EXECUTABLES_ROOT: "foo" } },
|
||||||
}, config);
|
}, config);
|
||||||
@@ -211,7 +211,7 @@ ava_1.default("concatTracerConfigs - SEMMLE_COPY_EXECUTABLES_ROOT is updated to
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("concatTracerConfigs - compound environment file is created correctly", async (t) => {
|
(0, ava_1.default)("concatTracerConfigs - compound environment file is created correctly", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const config = getTestConfig(tmpDir);
|
const config = getTestConfig(tmpDir);
|
||||||
const spec1 = path.join(tmpDir, "spec1");
|
const spec1 = path.join(tmpDir, "spec1");
|
||||||
@@ -230,7 +230,7 @@ ava_1.default("concatTracerConfigs - compound environment file is created correc
|
|||||||
foo: "bar_baz",
|
foo: "bar_baz",
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const result = tracer_config_1.concatTracerConfigs({ javascript: tc1, python: tc2 }, config, true);
|
const result = (0, tracer_config_1.concatTracerConfigs)({ javascript: tc1, python: tc2 }, config, true);
|
||||||
// Check binary contents for the Unix file
|
// Check binary contents for the Unix file
|
||||||
const envPath = `${result.spec}.environment`;
|
const envPath = `${result.spec}.environment`;
|
||||||
t.true(fs.existsSync(envPath));
|
t.true(fs.existsSync(envPath));
|
||||||
@@ -252,12 +252,12 @@ ava_1.default("concatTracerConfigs - compound environment file is created correc
|
|||||||
t.deepEqual(bufferWindows.toString("utf16le", 36, 38), "\0"); // trailing null character
|
t.deepEqual(bufferWindows.toString("utf16le", 36, 38), "\0"); // trailing null character
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("getCombinedTracerConfig - return undefined when no languages are traced languages", async (t) => {
|
(0, ava_1.default)("getCombinedTracerConfig - return undefined when no languages are traced languages", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const config = getTestConfig(tmpDir);
|
const config = getTestConfig(tmpDir);
|
||||||
// No traced languages
|
// No traced languages
|
||||||
config.languages = [languages_1.Language.javascript, languages_1.Language.python];
|
config.languages = [languages_1.Language.javascript, languages_1.Language.python];
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async getTracerEnv() {
|
async getTracerEnv() {
|
||||||
return {
|
return {
|
||||||
ODASA_TRACER_CONFIGURATION: "abc",
|
ODASA_TRACER_CONFIGURATION: "abc",
|
||||||
@@ -266,10 +266,10 @@ ava_1.default("getCombinedTracerConfig - return undefined when no languages are
|
|||||||
};
|
};
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
t.deepEqual(await tracer_config_1.getCombinedTracerConfig(config, codeQL), undefined);
|
t.deepEqual(await (0, tracer_config_1.getCombinedTracerConfig)(config, codeQL), undefined);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("getCombinedTracerConfig - valid spec file", async (t) => {
|
(0, ava_1.default)("getCombinedTracerConfig - valid spec file", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const config = getTestConfig(tmpDir);
|
const config = getTestConfig(tmpDir);
|
||||||
const spec = path.join(tmpDir, "spec");
|
const spec = path.join(tmpDir, "spec");
|
||||||
@@ -280,7 +280,7 @@ ava_1.default("getCombinedTracerConfig - valid spec file", async (t) => {
|
|||||||
: process.platform === "darwin"
|
: process.platform === "darwin"
|
||||||
? "osx64"
|
? "osx64"
|
||||||
: "linux64";
|
: "linux64";
|
||||||
const codeQL = codeql_1.setCodeQL({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
async getTracerEnv() {
|
async getTracerEnv() {
|
||||||
return {
|
return {
|
||||||
ODASA_TRACER_CONFIGURATION: spec,
|
ODASA_TRACER_CONFIGURATION: spec,
|
||||||
@@ -290,7 +290,7 @@ ava_1.default("getCombinedTracerConfig - valid spec file", async (t) => {
|
|||||||
};
|
};
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const result = await tracer_config_1.getCombinedTracerConfig(config, codeQL);
|
const result = await (0, tracer_config_1.getCombinedTracerConfig)(config, codeQL);
|
||||||
t.notDeepEqual(result, undefined);
|
t.notDeepEqual(result, undefined);
|
||||||
const expectedEnv = {
|
const expectedEnv = {
|
||||||
foo: "bar",
|
foo: "bar",
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
6
lib/upload-lib.js
generated
6
lib/upload-lib.js
generated
@@ -129,7 +129,7 @@ exports.findSarifFilesInDir = findSarifFilesInDir;
|
|||||||
// depending on what the path happens to refer to.
|
// depending on what the path happens to refer to.
|
||||||
// Returns true iff the upload occurred and succeeded
|
// Returns true iff the upload occurred and succeeded
|
||||||
async function uploadFromActions(sarifPath, gitHubVersion, apiDetails, logger) {
|
async function uploadFromActions(sarifPath, gitHubVersion, apiDetails, logger) {
|
||||||
return await uploadFiles(getSarifFilePaths(sarifPath), repository_1.parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getOptionalInput("category"), util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), gitHubVersion, apiDetails, logger);
|
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getOptionalInput("category"), util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), gitHubVersion, apiDetails, logger);
|
||||||
}
|
}
|
||||||
exports.uploadFromActions = uploadFromActions;
|
exports.uploadFromActions = uploadFromActions;
|
||||||
// Uploads a single sarif file or a directory of sarif files
|
// Uploads a single sarif file or a directory of sarif files
|
||||||
@@ -163,7 +163,7 @@ function countResultsInSarif(sarif) {
|
|||||||
parsedSarif = JSON.parse(sarif);
|
parsedSarif = JSON.parse(sarif);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
throw new Error(`Invalid SARIF. JSON syntax error: ${e.message}`);
|
throw new Error(`Invalid SARIF. JSON syntax error: ${e instanceof Error ? e.message : String(e)}`);
|
||||||
}
|
}
|
||||||
if (!Array.isArray(parsedSarif.runs)) {
|
if (!Array.isArray(parsedSarif.runs)) {
|
||||||
throw new Error("Invalid SARIF. Missing 'runs' array.");
|
throw new Error("Invalid SARIF. Missing 'runs' array.");
|
||||||
@@ -259,7 +259,7 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
|||||||
sarifPayload = await fingerprints.addFingerprints(sarifPayload, sourceRoot, logger);
|
sarifPayload = await fingerprints.addFingerprints(sarifPayload, sourceRoot, logger);
|
||||||
sarifPayload = populateRunAutomationDetails(sarifPayload, category, analysisKey, environment);
|
sarifPayload = populateRunAutomationDetails(sarifPayload, category, analysisKey, environment);
|
||||||
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
|
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
|
||||||
const checkoutURI = file_url_1.default(sourceRoot);
|
const checkoutURI = (0, file_url_1.default)(sourceRoot);
|
||||||
const toolNames = util.getToolNames(sarifPayload);
|
const toolNames = util.getToolNames(sarifPayload);
|
||||||
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion);
|
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion);
|
||||||
// Log some useful debug info about the info
|
// Log some useful debug info about the info
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
20
lib/upload-lib.test.js
generated
20
lib/upload-lib.test.js
generated
@@ -29,19 +29,19 @@ const logging_1 = require("./logging");
|
|||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const uploadLib = __importStar(require("./upload-lib"));
|
const uploadLib = __importStar(require("./upload-lib"));
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
ava_1.default.beforeEach(() => {
|
ava_1.default.beforeEach(() => {
|
||||||
util_1.initializeEnvironment(util_1.Mode.actions, "1.2.3");
|
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
|
||||||
});
|
});
|
||||||
ava_1.default("validateSarifFileSchema - valid", (t) => {
|
(0, ava_1.default)("validateSarifFileSchema - valid", (t) => {
|
||||||
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
|
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
|
||||||
t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile, logging_1.getRunnerLogger(true)));
|
t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile, (0, logging_1.getRunnerLogger)(true)));
|
||||||
});
|
});
|
||||||
ava_1.default("validateSarifFileSchema - invalid", (t) => {
|
(0, ava_1.default)("validateSarifFileSchema - invalid", (t) => {
|
||||||
const inputFile = `${__dirname}/../src/testdata/invalid-sarif.sarif`;
|
const inputFile = `${__dirname}/../src/testdata/invalid-sarif.sarif`;
|
||||||
t.throws(() => uploadLib.validateSarifFileSchema(inputFile, logging_1.getRunnerLogger(true)));
|
t.throws(() => uploadLib.validateSarifFileSchema(inputFile, (0, logging_1.getRunnerLogger)(true)));
|
||||||
});
|
});
|
||||||
ava_1.default("validate correct payload used per version", async (t) => {
|
(0, ava_1.default)("validate correct payload used per version", async (t) => {
|
||||||
const newVersions = [
|
const newVersions = [
|
||||||
{ type: util_1.GitHubVariant.DOTCOM },
|
{ type: util_1.GitHubVariant.DOTCOM },
|
||||||
{ type: util_1.GitHubVariant.GHES, version: "3.1.0" },
|
{ type: util_1.GitHubVariant.GHES, version: "3.1.0" },
|
||||||
@@ -72,8 +72,8 @@ ava_1.default("validate correct payload used per version", async (t) => {
|
|||||||
t.falsy(payload.base_sha);
|
t.falsy(payload.base_sha);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
ava_1.default("finding SARIF files", async (t) => {
|
(0, ava_1.default)("finding SARIF files", async (t) => {
|
||||||
await util_1.withTmpDir(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
// include a couple of sarif files
|
// include a couple of sarif files
|
||||||
fs.writeFileSync(path.join(tmpDir, "a.sarif"), "");
|
fs.writeFileSync(path.join(tmpDir, "a.sarif"), "");
|
||||||
fs.writeFileSync(path.join(tmpDir, "b.sarif"), "");
|
fs.writeFileSync(path.join(tmpDir, "b.sarif"), "");
|
||||||
@@ -97,7 +97,7 @@ ava_1.default("finding SARIF files", async (t) => {
|
|||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("populateRunAutomationDetails", (t) => {
|
(0, ava_1.default)("populateRunAutomationDetails", (t) => {
|
||||||
let sarif = '{"runs": [{}]}';
|
let sarif = '{"runs": [{}]}';
|
||||||
const analysisKey = ".github/workflows/codeql-analysis.yml:analyze";
|
const analysisKey = ".github/workflows/codeql-analysis.yml:analyze";
|
||||||
let expectedSarif = '{"runs":[{"automationDetails":{"id":"language:javascript/os:linux/"}}]}';
|
let expectedSarif = '{"runs":[{"automationDetails":{"id":"language:javascript/os:linux/"}}]}';
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,8CAAuB;AAEvB,uCAA4C;AAC5C,mDAA6C;AAC7C,wDAA0C;AAC1C,iCAMgB;AAEhB,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,4BAAqB,CAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,MAAM,SAAS,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CACf,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC9C,MAAM,SAAS,GAAG,GAAG,SAAS,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CACZ,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,2CAA2C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC5D,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE;QAC9B,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC/C,CAAC;IACF,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE;QAC/C,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC/C,CAAC;IACF,MAAM,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;IAEpD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,MAAM,CAAC;IAC1C,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,mBAAmB,EACnB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,CACR,CAAC;QACF,kCAAkC;QAClC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;IAED,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,cAAc,CAAC;IAClD,OAAO,CAAC,GAAG,CACT,mBAAmB,CACpB,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACrD,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,CACR,CAAC;QACF,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,QAAQ,EAAE,mBAAmB,CAAC,CAAC;QACnD,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,QAAQ,EAAE,0CAA0C,CAAC,CAAC;KAC3E;IAED,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,CACR,CAAC;QACF,iDAAiD;QACjD,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qBAAqB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACtC,MAAM,iBAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,kCAAkC;QAClC,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QACnD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAEnD,2CAA2C;QAC3C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,EAAE,CAAC,CAAC;QAEjD,+CAA+C;QAC/C,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QACxC,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAC3D,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QAChD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAEnE,4BAA4B;QAC5B,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QACxC,EAAE,CAAC,WAAW,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,KAAK,CAAC,CAAC;QACrE,EAAE,CAAC,WAAW,CACZ,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,gBAAgB,CAAC,EAC3C,MAAM,CACP,CAAC;QAEF,MAAM,UAAU,GAAG,SAAS,CAAC,mBAAmB,CAAC,MAAM,CAAC,CAAC;QAEzD,CAAC,CAAC,SAAS,CAAC,UAAU,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;YAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;YAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC;YACpC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC;SAC7C,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,8BAA8B,EAAE,CAAC,CAAC,EAAE,EAAE;IACzC,IAAI,KAAK,GAAG,gBAAgB,CAAC;IAC7B,MAAM,WAAW,GAAG,+CAA+C,CAAC;IAEpE,IAAI,aAAa,GACf,yEAAyE,CAAC;IAE5E,sDAAsD;IACtD,IAAI,aAAa,GAAG,SAAS,CAAC,4BAA4B,CACxD,KAAK,EACL,8BAA8B,EAC9B,WAAW,EACX,sCAAsC,CACvC,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;IAE1C,kEAAkE;IAClE,aAAa,GAAG,SAAS,CAAC,4BAA4B,CACpD,KAAK,EACL,+BAA+B,EAC/B,WAAW,EACX,EAAE,CACH,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;IAE1C,4DAA4D;IAC5D,KAAK,GAAG,iDAAiD,CAAC;IAC1D,aAAa,GAAG,iDAAiD,CAAC;IAClE,aAAa,GAAG,SAAS,CAAC,4BAA4B,CACpD,KAAK,EACL,SAAS,EACT,WAAW,EACX,2CAA2C,CAC5C,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;AAC5C,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,8CAAuB;AAEvB,uCAA4C;AAC5C,mDAA6C;AAC7C,wDAA0C;AAC1C,iCAMgB;AAEhB,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,MAAM,SAAS,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CACf,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,IAAA,yBAAe,EAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC9C,MAAM,SAAS,GAAG,GAAG,SAAS,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CACZ,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,IAAA,yBAAe,EAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,2CAA2C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC5D,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE;QAC9B,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC/C,CAAC;IACF,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE;QAC/C,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC/C,CAAC;IACF,MAAM,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;IAEpD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,MAAM,CAAC;IAC1C,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,mBAAmB,EACnB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,CACR,CAAC;QACF,kCAAkC;QAClC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;IAED,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,cAAc,CAAC;IAClD,OAAO,CAAC,GAAG,CACT,mBAAmB,CACpB,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACrD,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,CACR,CAAC;QACF,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,QAAQ,EAAE,mBAAmB,CAAC,CAAC;QACnD,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,QAAQ,EAAE,0CAA0C,CAAC,CAAC;KAC3E;IAED,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,CACR,CAAC;QACF,iDAAiD;QACjD,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;AACH,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,qBAAqB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACtC,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,kCAAkC;QAClC,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QACnD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAEnD,2CAA2C;QAC3C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,EAAE,CAAC,CAAC;QAEjD,+CAA+C;QAC/C,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QACxC,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAC3D,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QAChD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAEnE,4BAA4B;QAC5B,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QACxC,EAAE,CAAC,WAAW,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,KAAK,CAAC,CAAC;QACrE,EAAE,CAAC,WAAW,CACZ,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,gBAAgB,CAAC,EAC3C,MAAM,CACP,CAAC;QAEF,MAAM,UAAU,GAAG,SAAS,CAAC,mBAAmB,CAAC,MAAM,CAAC,CAAC;QAEzD,CAAC,CAAC,SAAS,CAAC,UAAU,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;YAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;YAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC;YACpC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC;SAC7C,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,8BAA8B,EAAE,CAAC,CAAC,EAAE,EAAE;IACzC,IAAI,KAAK,GAAG,gBAAgB,CAAC;IAC7B,MAAM,WAAW,GAAG,+CAA+C,CAAC;IAEpE,IAAI,aAAa,GACf,yEAAyE,CAAC;IAE5E,sDAAsD;IACtD,IAAI,aAAa,GAAG,SAAS,CAAC,4BAA4B,CACxD,KAAK,EACL,8BAA8B,EAC9B,WAAW,EACX,sCAAsC,CACvC,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;IAE1C,kEAAkE;IAClE,aAAa,GAAG,SAAS,CAAC,4BAA4B,CACpD,KAAK,EACL,+BAA+B,EAC/B,WAAW,EACX,EAAE,CACH,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;IAE1C,4DAA4D;IAC5D,KAAK,GAAG,iDAAiD,CAAC;IAC1D,aAAa,GAAG,iDAAiD,CAAC;IAClE,aAAa,GAAG,SAAS,CAAC,4BAA4B,CACpD,KAAK,EACL,SAAS,EACT,WAAW,EACX,2CAA2C,CAC5C,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;AAC5C,CAAC,CAAC,CAAC"}
|
||||||
14
lib/upload-sarif-action.js
generated
14
lib/upload-sarif-action.js
generated
@@ -35,7 +35,7 @@ async function sendSuccessStatusReport(startedAt, uploadStats) {
|
|||||||
await actionsUtil.sendStatusReport(statusReport);
|
await actionsUtil.sendStatusReport(statusReport);
|
||||||
}
|
}
|
||||||
async function run() {
|
async function run() {
|
||||||
util_1.initializeEnvironment(util_1.Mode.actions, pkg.version);
|
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", "starting", startedAt)))) {
|
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", "starting", startedAt)))) {
|
||||||
return;
|
return;
|
||||||
@@ -43,16 +43,18 @@ async function run() {
|
|||||||
try {
|
try {
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth: actionsUtil.getRequiredInput("token"),
|
auth: actionsUtil.getRequiredInput("token"),
|
||||||
url: util_1.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
|
||||||
const uploadStats = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, logging_1.getActionsLogger());
|
const uploadStats = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
|
||||||
await sendSuccessStatusReport(startedAt, uploadStats);
|
await sendSuccessStatusReport(startedAt, uploadStats);
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
core.setFailed(error.message);
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
|
const stack = error instanceof Error ? error.stack : String(error);
|
||||||
|
core.setFailed(message);
|
||||||
console.log(error);
|
console.log(error);
|
||||||
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", "failure", startedAt, error.message, error.stack));
|
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", "failure", startedAt, message, stack));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,4BAAqB,CAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,0BAAmB,CAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,uBAAgB,CAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,0BAAgB,EAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
61
lib/util.js
generated
61
lib/util.js
generated
@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.GITHUB_DOTCOM_URL = void 0;
|
exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.GITHUB_DOTCOM_URL = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
@@ -27,6 +27,7 @@ const core = __importStar(require("@actions/core"));
|
|||||||
const semver = __importStar(require("semver"));
|
const semver = __importStar(require("semver"));
|
||||||
const api_client_1 = require("./api-client");
|
const api_client_1 = require("./api-client");
|
||||||
const apiCompatibility = __importStar(require("./api-compatibility.json"));
|
const apiCompatibility = __importStar(require("./api-compatibility.json"));
|
||||||
|
const codeql_1 = require("./codeql");
|
||||||
/**
|
/**
|
||||||
* The URL for github.com.
|
* The URL for github.com.
|
||||||
*/
|
*/
|
||||||
@@ -44,7 +45,8 @@ function getExtraOptionsEnvParam() {
|
|||||||
return JSON.parse(raw);
|
return JSON.parse(raw);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
throw new Error(`${varName} environment variable is set, but does not contain valid JSON: ${e.message}`);
|
const message = e instanceof Error ? e.message : String(e);
|
||||||
|
throw new Error(`${varName} environment variable is set, but does not contain valid JSON: ${message}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.getExtraOptionsEnvParam = getExtraOptionsEnvParam;
|
exports.getExtraOptionsEnvParam = getExtraOptionsEnvParam;
|
||||||
@@ -75,7 +77,7 @@ async function withTmpDir(body) {
|
|||||||
const symlinkSubdir = path.join(tmpDir, "symlink");
|
const symlinkSubdir = path.join(tmpDir, "symlink");
|
||||||
fs.symlinkSync(realSubdir, symlinkSubdir, "dir");
|
fs.symlinkSync(realSubdir, symlinkSubdir, "dir");
|
||||||
const result = await body(symlinkSubdir);
|
const result = await body(symlinkSubdir);
|
||||||
fs.rmdirSync(tmpDir, { recursive: true });
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
exports.withTmpDir = withTmpDir;
|
exports.withTmpDir = withTmpDir;
|
||||||
@@ -129,7 +131,7 @@ function getAddSnippetsFlag(userInput) {
|
|||||||
exports.getAddSnippetsFlag = getAddSnippetsFlag;
|
exports.getAddSnippetsFlag = getAddSnippetsFlag;
|
||||||
/**
|
/**
|
||||||
* Get the codeql `--threads` value specified for the `threads` input.
|
* Get the codeql `--threads` value specified for the `threads` input.
|
||||||
* If not value was specified, all available threads will be used.
|
* If no value was specified, all available threads will be used.
|
||||||
*
|
*
|
||||||
* The value will be capped to the number of available CPUs.
|
* The value will be capped to the number of available CPUs.
|
||||||
*
|
*
|
||||||
@@ -222,7 +224,7 @@ async function getGitHubVersion(apiDetails) {
|
|||||||
}
|
}
|
||||||
// Doesn't strictly have to be the meta endpoint as we're only
|
// Doesn't strictly have to be the meta endpoint as we're only
|
||||||
// using the response headers which are available on every request.
|
// using the response headers which are available on every request.
|
||||||
const apiClient = api_client_1.getApiClient(apiDetails);
|
const apiClient = (0, api_client_1.getApiClient)(apiDetails);
|
||||||
const response = await apiClient.meta.get();
|
const response = await apiClient.meta.get();
|
||||||
// This happens on dotcom, although we expect to have already returned in that
|
// This happens on dotcom, although we expect to have already returned in that
|
||||||
// case. This can also serve as a fallback in cases we haven't foreseen.
|
// case. This can also serve as a fallback in cases we haven't foreseen.
|
||||||
@@ -378,23 +380,40 @@ var EnvVar;
|
|||||||
*/
|
*/
|
||||||
EnvVar["FEATURE_SANDWICH"] = "CODEQL_ACTION_FEATURE_SANDWICH";
|
EnvVar["FEATURE_SANDWICH"] = "CODEQL_ACTION_FEATURE_SANDWICH";
|
||||||
})(EnvVar || (EnvVar = {}));
|
})(EnvVar || (EnvVar = {}));
|
||||||
|
const exportVar = (mode, name, value) => {
|
||||||
|
if (mode === Mode.actions) {
|
||||||
|
core.exportVariable(name, value);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
process.env[name] = value;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Set some initial environment variables that we can set even without
|
||||||
|
* knowing what version of CodeQL we're running.
|
||||||
|
*/
|
||||||
function initializeEnvironment(mode, version) {
|
function initializeEnvironment(mode, version) {
|
||||||
const exportVar = (name, value) => {
|
exportVar(mode, EnvVar.RUN_MODE, mode);
|
||||||
if (mode === Mode.actions) {
|
exportVar(mode, EnvVar.VERSION, version);
|
||||||
core.exportVariable(name, value);
|
exportVar(mode, EnvVar.FEATURE_SARIF_COMBINE, "true");
|
||||||
}
|
exportVar(mode, EnvVar.FEATURE_WILL_UPLOAD, "true");
|
||||||
else {
|
|
||||||
process.env[name] = value;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
exportVar(EnvVar.RUN_MODE, mode);
|
|
||||||
exportVar(EnvVar.VERSION, version);
|
|
||||||
exportVar(EnvVar.FEATURE_SARIF_COMBINE, "true");
|
|
||||||
exportVar(EnvVar.FEATURE_WILL_UPLOAD, "true");
|
|
||||||
exportVar(EnvVar.FEATURE_MULTI_LANGUAGE, "true");
|
|
||||||
exportVar(EnvVar.FEATURE_SANDWICH, "true");
|
|
||||||
}
|
}
|
||||||
exports.initializeEnvironment = initializeEnvironment;
|
exports.initializeEnvironment = initializeEnvironment;
|
||||||
|
/**
|
||||||
|
* Enrich the environment variables with further flags that we cannot
|
||||||
|
* know the value of until we know what version of CodeQL we're running.
|
||||||
|
*/
|
||||||
|
async function enrichEnvironment(mode, codeql) {
|
||||||
|
if (await codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||||
|
exportVar(mode, EnvVar.FEATURE_MULTI_LANGUAGE, "false");
|
||||||
|
exportVar(mode, EnvVar.FEATURE_SANDWICH, "false");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
exportVar(mode, EnvVar.FEATURE_MULTI_LANGUAGE, "true");
|
||||||
|
exportVar(mode, EnvVar.FEATURE_SANDWICH, "true");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.enrichEnvironment = enrichEnvironment;
|
||||||
function getMode() {
|
function getMode() {
|
||||||
// Make sure we fail fast if the env var is missing. This should
|
// Make sure we fail fast if the env var is missing. This should
|
||||||
// only happen if there is a bug in our code and we neglected
|
// only happen if there is a bug in our code and we neglected
|
||||||
@@ -432,4 +451,8 @@ function isHTTPError(arg) {
|
|||||||
return (arg === null || arg === void 0 ? void 0 : arg.status) !== undefined && Number.isInteger(arg.status);
|
return (arg === null || arg === void 0 ? void 0 : arg.status) !== undefined && Number.isInteger(arg.status);
|
||||||
}
|
}
|
||||||
exports.isHTTPError = isHTTPError;
|
exports.isHTTPError = isHTTPError;
|
||||||
|
async function codeQlVersionAbove(codeql, requiredVersion) {
|
||||||
|
return semver.gte(await codeql.getVersion(), requiredVersion);
|
||||||
|
}
|
||||||
|
exports.codeQlVersionAbove = codeQlVersionAbove;
|
||||||
//# sourceMappingURL=util.js.map
|
//# sourceMappingURL=util.js.map
|
||||||
File diff suppressed because one or more lines are too long
38
lib/util.test.js
generated
38
lib/util.test.js
generated
@@ -27,18 +27,18 @@ const os = __importStar(require("os"));
|
|||||||
const stream = __importStar(require("stream"));
|
const stream = __importStar(require("stream"));
|
||||||
const github = __importStar(require("@actions/github"));
|
const github = __importStar(require("@actions/github"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
ava_1.default("getToolNames", (t) => {
|
(0, ava_1.default)("getToolNames", (t) => {
|
||||||
const input = fs.readFileSync(`${__dirname}/../src/testdata/tool-names.sarif`, "utf8");
|
const input = fs.readFileSync(`${__dirname}/../src/testdata/tool-names.sarif`, "utf8");
|
||||||
const toolNames = util.getToolNames(input);
|
const toolNames = util.getToolNames(input);
|
||||||
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
|
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
|
||||||
});
|
});
|
||||||
ava_1.default("getMemoryFlag() should return the correct --ram flag", (t) => {
|
(0, ava_1.default)("getMemoryFlag() should return the correct --ram flag", (t) => {
|
||||||
const totalMem = Math.floor(os.totalmem() / (1024 * 1024));
|
const totalMem = Math.floor(os.totalmem() / (1024 * 1024));
|
||||||
const expectedThreshold = process.platform === "win32" ? 1536 : 1024;
|
const expectedThreshold = process.platform === "win32" ? 1536 : 1024;
|
||||||
const tests = [
|
const tests = [
|
||||||
@@ -51,12 +51,12 @@ ava_1.default("getMemoryFlag() should return the correct --ram flag", (t) => {
|
|||||||
t.deepEqual(flag, expectedFlag);
|
t.deepEqual(flag, expectedFlag);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
ava_1.default("getMemoryFlag() throws if the ram input is < 0 or NaN", (t) => {
|
(0, ava_1.default)("getMemoryFlag() throws if the ram input is < 0 or NaN", (t) => {
|
||||||
for (const input of ["-1", "hello!"]) {
|
for (const input of ["-1", "hello!"]) {
|
||||||
t.throws(() => util.getMemoryFlag(input));
|
t.throws(() => util.getMemoryFlag(input));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
ava_1.default("getAddSnippetsFlag() should return the correct flag", (t) => {
|
(0, ava_1.default)("getAddSnippetsFlag() should return the correct flag", (t) => {
|
||||||
t.deepEqual(util.getAddSnippetsFlag(true), "--sarif-add-snippets");
|
t.deepEqual(util.getAddSnippetsFlag(true), "--sarif-add-snippets");
|
||||||
t.deepEqual(util.getAddSnippetsFlag("true"), "--sarif-add-snippets");
|
t.deepEqual(util.getAddSnippetsFlag("true"), "--sarif-add-snippets");
|
||||||
t.deepEqual(util.getAddSnippetsFlag(false), "--no-sarif-add-snippets");
|
t.deepEqual(util.getAddSnippetsFlag(false), "--no-sarif-add-snippets");
|
||||||
@@ -64,7 +64,7 @@ ava_1.default("getAddSnippetsFlag() should return the correct flag", (t) => {
|
|||||||
t.deepEqual(util.getAddSnippetsFlag("false"), "--no-sarif-add-snippets");
|
t.deepEqual(util.getAddSnippetsFlag("false"), "--no-sarif-add-snippets");
|
||||||
t.deepEqual(util.getAddSnippetsFlag("foo bar"), "--no-sarif-add-snippets");
|
t.deepEqual(util.getAddSnippetsFlag("foo bar"), "--no-sarif-add-snippets");
|
||||||
});
|
});
|
||||||
ava_1.default("getThreadsFlag() should return the correct --threads flag", (t) => {
|
(0, ava_1.default)("getThreadsFlag() should return the correct --threads flag", (t) => {
|
||||||
const numCpus = os.cpus().length;
|
const numCpus = os.cpus().length;
|
||||||
const tests = [
|
const tests = [
|
||||||
["0", "--threads=0"],
|
["0", "--threads=0"],
|
||||||
@@ -75,34 +75,34 @@ ava_1.default("getThreadsFlag() should return the correct --threads flag", (t) =
|
|||||||
[`${-numCpus - 1}`, `--threads=${-numCpus}`],
|
[`${-numCpus - 1}`, `--threads=${-numCpus}`],
|
||||||
];
|
];
|
||||||
for (const [input, expectedFlag] of tests) {
|
for (const [input, expectedFlag] of tests) {
|
||||||
const flag = util.getThreadsFlag(input, logging_1.getRunnerLogger(true));
|
const flag = util.getThreadsFlag(input, (0, logging_1.getRunnerLogger)(true));
|
||||||
t.deepEqual(flag, expectedFlag);
|
t.deepEqual(flag, expectedFlag);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
ava_1.default("getThreadsFlag() throws if the threads input is not an integer", (t) => {
|
(0, ava_1.default)("getThreadsFlag() throws if the threads input is not an integer", (t) => {
|
||||||
t.throws(() => util.getThreadsFlag("hello!", logging_1.getRunnerLogger(true)));
|
t.throws(() => util.getThreadsFlag("hello!", (0, logging_1.getRunnerLogger)(true)));
|
||||||
});
|
});
|
||||||
ava_1.default("getExtraOptionsEnvParam() succeeds on valid JSON with invalid options (for now)", (t) => {
|
(0, ava_1.default)("getExtraOptionsEnvParam() succeeds on valid JSON with invalid options (for now)", (t) => {
|
||||||
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
|
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
|
||||||
const options = { foo: 42 };
|
const options = { foo: 42 };
|
||||||
process.env.CODEQL_ACTION_EXTRA_OPTIONS = JSON.stringify(options);
|
process.env.CODEQL_ACTION_EXTRA_OPTIONS = JSON.stringify(options);
|
||||||
t.deepEqual(util.getExtraOptionsEnvParam(), options);
|
t.deepEqual(util.getExtraOptionsEnvParam(), options);
|
||||||
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
|
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
|
||||||
});
|
});
|
||||||
ava_1.default("getExtraOptionsEnvParam() succeeds on valid options", (t) => {
|
(0, ava_1.default)("getExtraOptionsEnvParam() succeeds on valid options", (t) => {
|
||||||
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
|
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
|
||||||
const options = { database: { init: ["--debug"] } };
|
const options = { database: { init: ["--debug"] } };
|
||||||
process.env.CODEQL_ACTION_EXTRA_OPTIONS = JSON.stringify(options);
|
process.env.CODEQL_ACTION_EXTRA_OPTIONS = JSON.stringify(options);
|
||||||
t.deepEqual(util.getExtraOptionsEnvParam(), options);
|
t.deepEqual(util.getExtraOptionsEnvParam(), options);
|
||||||
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
|
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
|
||||||
});
|
});
|
||||||
ava_1.default("getExtraOptionsEnvParam() fails on invalid JSON", (t) => {
|
(0, ava_1.default)("getExtraOptionsEnvParam() fails on invalid JSON", (t) => {
|
||||||
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
|
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
|
||||||
process.env.CODEQL_ACTION_EXTRA_OPTIONS = "{{invalid-json}}";
|
process.env.CODEQL_ACTION_EXTRA_OPTIONS = "{{invalid-json}}";
|
||||||
t.throws(util.getExtraOptionsEnvParam);
|
t.throws(util.getExtraOptionsEnvParam);
|
||||||
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
|
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
|
||||||
});
|
});
|
||||||
ava_1.default("parseGitHubUrl", (t) => {
|
(0, ava_1.default)("parseGitHubUrl", (t) => {
|
||||||
t.deepEqual(util.parseGitHubUrl("github.com"), "https://github.com");
|
t.deepEqual(util.parseGitHubUrl("github.com"), "https://github.com");
|
||||||
t.deepEqual(util.parseGitHubUrl("https://github.com"), "https://github.com");
|
t.deepEqual(util.parseGitHubUrl("https://github.com"), "https://github.com");
|
||||||
t.deepEqual(util.parseGitHubUrl("https://api.github.com"), "https://github.com");
|
t.deepEqual(util.parseGitHubUrl("https://api.github.com"), "https://github.com");
|
||||||
@@ -126,7 +126,7 @@ ava_1.default("parseGitHubUrl", (t) => {
|
|||||||
message: '"http:///::::433" is not a valid URL',
|
message: '"http:///::::433" is not a valid URL',
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("allowed API versions", async (t) => {
|
(0, ava_1.default)("allowed API versions", async (t) => {
|
||||||
t.is(util.apiVersionInRange("1.33.0", "1.33", "2.0"), undefined);
|
t.is(util.apiVersionInRange("1.33.0", "1.33", "2.0"), undefined);
|
||||||
t.is(util.apiVersionInRange("1.33.1", "1.33", "2.0"), undefined);
|
t.is(util.apiVersionInRange("1.33.1", "1.33", "2.0"), undefined);
|
||||||
t.is(util.apiVersionInRange("1.34.0", "1.33", "2.0"), undefined);
|
t.is(util.apiVersionInRange("1.34.0", "1.33", "2.0"), undefined);
|
||||||
@@ -143,13 +143,13 @@ function mockGetMetaVersionHeader(versionHeader) {
|
|||||||
"x-github-enterprise-version": versionHeader,
|
"x-github-enterprise-version": versionHeader,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const spyGetContents = sinon_1.default
|
const spyGetContents = sinon
|
||||||
.stub(client.meta, "get")
|
.stub(client.meta, "get")
|
||||||
.resolves(response);
|
.resolves(response);
|
||||||
sinon_1.default.stub(api, "getApiClient").value(() => client);
|
sinon.stub(api, "getApiClient").value(() => client);
|
||||||
return spyGetContents;
|
return spyGetContents;
|
||||||
}
|
}
|
||||||
ava_1.default("getGitHubVersion", async (t) => {
|
(0, ava_1.default)("getGitHubVersion", async (t) => {
|
||||||
const v = await util.getGitHubVersion({
|
const v = await util.getGitHubVersion({
|
||||||
auth: "",
|
auth: "",
|
||||||
url: "https://github.com",
|
url: "https://github.com",
|
||||||
@@ -174,7 +174,7 @@ ava_1.default("getGitHubVersion", async (t) => {
|
|||||||
});
|
});
|
||||||
t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3);
|
t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3);
|
||||||
});
|
});
|
||||||
ava_1.default("getGitHubAuth", async (t) => {
|
(0, ava_1.default)("getGitHubAuth", async (t) => {
|
||||||
const msgs = [];
|
const msgs = [];
|
||||||
const mockLogger = {
|
const mockLogger = {
|
||||||
warning: (msg) => msgs.push(msg),
|
warning: (msg) => msgs.push(msg),
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user