mirror of
https://github.com/github/codeql-action.git
synced 2025-12-26 01:00:20 +08:00
Compare commits
214 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
231aa2c8a8 | ||
|
|
1e065048b0 | ||
|
|
beac1051f4 | ||
|
|
61d6a49634 | ||
|
|
be59c0665c | ||
|
|
a5cefa8e7f | ||
|
|
a34ca99b46 | ||
|
|
48fa82899a | ||
|
|
597c204127 | ||
|
|
e0fd640b0c | ||
|
|
d731c012c4 | ||
|
|
6dfc772b5f | ||
|
|
60e58b4a21 | ||
|
|
9b1206e898 | ||
|
|
40cfcb0a3f | ||
|
|
e199504667 | ||
|
|
e6e3bf4923 | ||
|
|
5b9b480165 | ||
|
|
b625b628b7 | ||
|
|
28c93a0113 | ||
|
|
4b40a3a20e | ||
|
|
32be38eeff | ||
|
|
5ed1e985c2 | ||
|
|
4bd9723e2b | ||
|
|
2f6d17472d | ||
|
|
2649b6603a | ||
|
|
b8107301d2 | ||
|
|
ebf1b8f8fc | ||
|
|
932b6a98b8 | ||
|
|
259993b92a | ||
|
|
ef51ec1e54 | ||
|
|
2073a69919 | ||
|
|
10f2d3c15a | ||
|
|
6800116267 | ||
|
|
00c2a84eba | ||
|
|
7ddd7cadfd | ||
|
|
063077ecbf | ||
|
|
54bf6c6142 | ||
|
|
abbff2838b | ||
|
|
515828d974 | ||
|
|
caa49ae471 | ||
|
|
70fdddff11 | ||
|
|
6ba0a36550 | ||
|
|
4a918790cd | ||
|
|
42d6d35dd1 | ||
|
|
e009918fbc | ||
|
|
70a288daae | ||
|
|
bdc7c5d203 | ||
|
|
272d916f23 | ||
|
|
f12f76f047 | ||
|
|
28a9b2d6d7 | ||
|
|
9f8ddbdfd7 | ||
|
|
9203e314a3 | ||
|
|
80b12d6f73 | ||
|
|
620a267204 | ||
|
|
bac4fe1a38 | ||
|
|
166d98c19e | ||
|
|
a9337bc304 | ||
|
|
4023575d64 | ||
|
|
cf1437a514 | ||
|
|
f9c9a2567c | ||
|
|
b9c859bfa1 | ||
|
|
b4187d626b | ||
|
|
bfbb7ab03c | ||
|
|
4e5a06f009 | ||
|
|
e8f7169839 | ||
|
|
6ce923c375 | ||
|
|
b2b478264a | ||
|
|
5eba74a3c9 | ||
|
|
ff3337ee1b | ||
|
|
484236cda4 | ||
|
|
f837e8e761 | ||
|
|
ef21864950 | ||
|
|
4789c1331c | ||
|
|
59ebabde5d | ||
|
|
3224214d91 | ||
|
|
e09fbf5b4a | ||
|
|
e9ff99b027 | ||
|
|
8b9e982393 | ||
|
|
8d1e008ecb | ||
|
|
579411fb6c | ||
|
|
e4818d46c4 | ||
|
|
4778dfbd93 | ||
|
|
4b3fd91988 | ||
|
|
0a3f985290 | ||
|
|
04f1897968 | ||
|
|
a9872fdfbd | ||
|
|
85fac8bbb4 | ||
|
|
536940c435 | ||
|
|
a0924e149e | ||
|
|
0d52bbad34 | ||
|
|
6ac6037211 | ||
|
|
959cbb7472 | ||
|
|
10ca836463 | ||
|
|
d58039a1e3 | ||
|
|
37a4496237 | ||
|
|
b7028afcb4 | ||
|
|
f629dada4c | ||
|
|
ccee4c68ff | ||
|
|
899bf9c076 | ||
|
|
dd7c3ef80e | ||
|
|
b7b875efff | ||
|
|
53ab991fbe | ||
|
|
54d25f56dd | ||
|
|
d827cf3d65 | ||
|
|
9438015b82 | ||
|
|
5aced81848 | ||
|
|
118e294bb9 | ||
|
|
dc9c1c1a51 | ||
|
|
a409f43c7a | ||
|
|
e67ad6aaed | ||
|
|
6856c22f5f | ||
|
|
896079047b | ||
|
|
18b0d6e135 | ||
|
|
e58b8d6a61 | ||
|
|
d0b64028fe | ||
|
|
01330498de | ||
|
|
64815a1382 | ||
|
|
dc0a4f451f | ||
|
|
d5c50b68cb | ||
|
|
a669cc5936 | ||
|
|
6fec2ab57a | ||
|
|
aab7a26877 | ||
|
|
2b971a70bb | ||
|
|
4e81e2933a | ||
|
|
bf944d782b | ||
|
|
566a5e6727 | ||
|
|
10c89976dc | ||
|
|
8121f62c54 | ||
|
|
104319fe98 | ||
|
|
aba18b82f7 | ||
|
|
4a5ad5af18 | ||
|
|
19f867a052 | ||
|
|
5e452f0d9d | ||
|
|
8bebf77dbd | ||
|
|
fb74504ab5 | ||
|
|
c51babb6c6 | ||
|
|
79166d0788 | ||
|
|
44ef9d902a | ||
|
|
384a214d60 | ||
|
|
697ed97fa5 | ||
|
|
2207a72006 | ||
|
|
4623c8edb6 | ||
|
|
9085295c40 | ||
|
|
3b0a2f607d | ||
|
|
5566638d56 | ||
|
|
27c1438455 | ||
|
|
1e8d3b8fca | ||
|
|
7fc528c3c6 | ||
|
|
2cbc140ac5 | ||
|
|
1653364141 | ||
|
|
61cc378b7f | ||
|
|
7aa5026a55 | ||
|
|
c80f00a5c9 | ||
|
|
62b14cbbad | ||
|
|
794a4b543a | ||
|
|
ee6ba9c213 | ||
|
|
81f99a8582 | ||
|
|
4b18b7bc24 | ||
|
|
4acf201e5b | ||
|
|
1e5919b22d | ||
|
|
375dacad24 | ||
|
|
e0ff272230 | ||
|
|
aa0e650c6a | ||
|
|
98b2ddc7f9 | ||
|
|
0d9b15ca93 | ||
|
|
54d8b0da6b | ||
|
|
b1c26c4c53 | ||
|
|
21044b004d | ||
|
|
5e70e07df1 | ||
|
|
0828b04e3a | ||
|
|
04dc6825d1 | ||
|
|
0dc6f25b82 | ||
|
|
e455e6c8e5 | ||
|
|
bea0fc1967 | ||
|
|
b2a92eb56d | ||
|
|
075b74d36e | ||
|
|
9af9a11da8 | ||
|
|
a631f4b016 | ||
|
|
77cda4d75d | ||
|
|
3d90c4f911 | ||
|
|
1384ce4ab3 | ||
|
|
6c5cad784c | ||
|
|
58b2ab08a8 | ||
|
|
e0dec83cfc | ||
|
|
00a3c456fb | ||
|
|
e628ee0ae1 | ||
|
|
605b23d10b | ||
|
|
160613c380 | ||
|
|
caf1c5057b | ||
|
|
c62445de22 | ||
|
|
7fc3c603aa | ||
|
|
24fd4c0f4e | ||
|
|
d0517be03a | ||
|
|
37b4358e44 | ||
|
|
122b180b66 | ||
|
|
8337c2be0f | ||
|
|
5296a763b1 | ||
|
|
3afc2b194c | ||
|
|
3cf2a1ba2e | ||
|
|
4d4e25083a | ||
|
|
9de6c31571 | ||
|
|
44ae944a29 | ||
|
|
e2338066a1 | ||
|
|
8f05fcd048 | ||
|
|
9dac9f748a | ||
|
|
c6e756bb39 | ||
|
|
8d8b898c0f | ||
|
|
9f2aa7ec75 | ||
|
|
bff0be7364 | ||
|
|
daf4614f68 | ||
|
|
e2d523ca5e | ||
|
|
996d04b1e5 | ||
|
|
79f8286c68 |
21
.github/dependabot.yml
vendored
21
.github/dependabot.yml
vendored
@@ -1,20 +1,17 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
- package-ecosystem: npm
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Gives us a working day to merge this before our typical release
|
||||
interval: weekly
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
- Update dependencies
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/runner"
|
||||
update-types:
|
||||
- version-update:semver-minor
|
||||
- version-update:semver-patch
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Gives us a working day to merge this before our typical release
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
interval: weekly
|
||||
|
||||
32
.github/setup-swift/action.yml
vendored
Normal file
32
.github/setup-swift/action.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: "Set up Swift"
|
||||
description: Performs necessary steps to set up appropriate Swift version.
|
||||
inputs:
|
||||
codeql-path:
|
||||
required: true
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Get Swift version
|
||||
id: get_swift_version
|
||||
# We don't support Swift on Windows or prior versions of CLI.
|
||||
if: "(runner.os != 'Windows') && (matrix.version == 'cached' || matrix.version == 'latest' || matrix.version == 'nightly-latest')"
|
||||
shell: bash
|
||||
env:
|
||||
CODEQL_PATH: ${{inputs.codeql-path}}
|
||||
run: |
|
||||
if [ $RUNNER_OS = "macOS" ]; then
|
||||
PLATFORM="osx64"
|
||||
else # We do not run this step on Windows.
|
||||
PLATFORM="linux64"
|
||||
fi
|
||||
SWIFT_EXTRACTOR_DIR="$("$CODEQL_PATH" resolve languages --format json | jq -r '.swift[0]')"
|
||||
VERSION="$("$SWIFT_EXTRACTOR_DIR/tools/$PLATFORM/extractor" --version | awk '/version/ { print $3 }')"
|
||||
# Specify 5.7.0, otherwise setup Action will default to latest minor version.
|
||||
if [ $VERSION = "5.7" ]; then
|
||||
VERSION="5.7.0"
|
||||
fi
|
||||
echo "version=$VERSION" | tee -a $GITHUB_OUTPUT
|
||||
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||
if: "(runner.os != 'Windows') && (matrix.version == 'cached' || matrix.version == 'latest' || matrix.version == 'nightly-latest')"
|
||||
with:
|
||||
swift-version: "${{steps.get_swift_version.outputs.version}}"
|
||||
1
.github/workflows/__analyze-ref-input.yml
generated
vendored
1
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -7,6 +7,7 @@ name: "PR Check - Analyze: 'ref' and 'sha' from inputs"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__autobuild-action.yml
generated
vendored
1
.github/workflows/__autobuild-action.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - autobuild-action
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
13
.github/workflows/__export-file-baseline-information.yml
generated
vendored
13
.github/workflows/__export-file-baseline-information.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Export file baseline information
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -42,18 +43,16 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
# Windows doesn't support Swift, and only macOS latest and nightly-latest support Swift 5.7.1.
|
||||
if: runner.os == 'Linux' || (runner.os == 'macOS' && matrix.version == 'cached')
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
env:
|
||||
CODEQL_FILE_BASELINE_INFORMATION: true
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: true
|
||||
- uses: ./../action/.github/setup-swift
|
||||
with:
|
||||
codeql-path: ${{steps.init.outputs.codeql-path}}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
@@ -62,7 +61,6 @@ jobs:
|
||||
output: ${{ runner.temp }}/results
|
||||
env:
|
||||
CODEQL_FILE_BASELINE_INFORMATION: true
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: true
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
@@ -87,4 +85,5 @@ jobs:
|
||||
fi
|
||||
done
|
||||
env:
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: true # Remove when Swift is GA.
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
1
.github/workflows/__extractor-ram-threads.yml
generated
vendored
1
.github/workflows/__extractor-ram-threads.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Extractor ram and threads options test
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__go-custom-queries.yml
generated
vendored
1
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Go: Custom queries'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
1
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Go: tracing with autobuilder step'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
1
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Go: tracing with custom build steps'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
1
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Go: tracing with legacy workflow'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__init-with-registries.yml
generated
vendored
1
.github/workflows/__init-with-registries.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Packaging: Download using registries'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__javascript-source-root.yml
generated
vendored
1
.github/workflows/__javascript-source-root.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Custom source root
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__ml-powered-queries.yml
generated
vendored
1
.github/workflows/__ml-powered-queries.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - ML-powered queries
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
16
.github/workflows/__multi-language-autodetect.yml
generated
vendored
16
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Multi-language repository
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -65,17 +66,16 @@ jobs:
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- uses: ./../action/.github/setup-swift
|
||||
with:
|
||||
codeql-path: ${{steps.init.outputs.codeql-path}}
|
||||
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
@@ -129,8 +129,8 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Check language autodetect for Swift
|
||||
if: "!startsWith(matrix.os, 'windows') && (matrix.version == 'cached' || matrix.version\
|
||||
\ == 'latest' || matrix.version == 'nightly-latest')"
|
||||
if: (matrix.version == 'cached' || matrix.version == 'latest' || matrix.version
|
||||
== 'nightly-latest')
|
||||
shell: bash
|
||||
run: |
|
||||
SWIFT_DB=${{ fromJson(steps.analysis.outputs.db-locations).swift }}
|
||||
|
||||
1
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
1
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Packaging: Config and input passed to the CLI'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
1
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Packaging: Config and input'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__packaging-config-js.yml
generated
vendored
1
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Packaging: Config file'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__packaging-inputs-js.yml
generated
vendored
1
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Packaging: Action input'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__remote-config.yml
generated
vendored
1
.github/workflows/__remote-config.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Remote config file
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__rubocop-multi-language.yml
generated
vendored
1
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - RuboCop multi-language
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__ruby.yml
generated
vendored
1
.github/workflows/__ruby.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Ruby analysis
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__split-workflow.yml
generated
vendored
1
.github/workflows/__split-workflow.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Split workflow
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
73
.github/workflows/__submit-sarif-failure.yml
generated
vendored
Normal file
73
.github/workflows/__submit-sarif-failure.yml
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Submit SARIF after failure
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
submit-sarif-failure:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Submit SARIF after failure
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./init
|
||||
with:
|
||||
languages: javascript
|
||||
- name: Fail
|
||||
# We want this job to pass if the Action correctly uploads the SARIF file for
|
||||
# the failed run.
|
||||
# Setting this step to continue on error means that it is marked as completing
|
||||
# successfully, so will not fail the job.
|
||||
continue-on-error: true
|
||||
run: exit 1
|
||||
- uses: ./analyze
|
||||
# In a real workflow, this step wouldn't run. Since we used `continue-on-error`
|
||||
# above, we manually disable it with an `if` condition.
|
||||
if: false
|
||||
with:
|
||||
category: /test-codeql-version:${{ matrix.version }}
|
||||
env:
|
||||
# Internal-only environment variable used to indicate that the post-init Action
|
||||
# should expect to upload a SARIF file for the failed run.
|
||||
CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF: true
|
||||
# Make sure the uploading SARIF files feature is enabled.
|
||||
CODEQL_ACTION_UPLOAD_FAILED_SARIF: true
|
||||
# Upload the failed SARIF file as an integration test of the API endpoint.
|
||||
CODEQL_ACTION_TEST_MODE: false
|
||||
# Mark telemetry for this workflow so it can be treated separately.
|
||||
CODEQL_ACTION_TESTING_ENVIRONMENT: codeql-action-pr-checks
|
||||
|
||||
15
.github/workflows/__swift-autobuild.yml
generated
vendored
15
.github/workflows/__swift-autobuild.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Swift analysis using autobuild
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -42,15 +43,17 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
languages: swift
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/.github/setup-swift
|
||||
with:
|
||||
codeql-path: ${{steps.init.outputs.codeql-path}}
|
||||
- name: Check working directory
|
||||
shell: bash
|
||||
run: pwd
|
||||
- uses: ./../action/autobuild
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
@@ -63,5 +66,5 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: 'true'
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: 'true' # Remove when Swift is GA.
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
15
.github/workflows/__swift-custom-build.yml
generated
vendored
15
.github/workflows/__swift-custom-build.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Swift analysis using a custom build command
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -48,15 +49,17 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
languages: swift
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/.github/setup-swift
|
||||
with:
|
||||
codeql-path: ${{steps.init.outputs.codeql-path}}
|
||||
- name: Check working directory
|
||||
shell: bash
|
||||
run: pwd
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
@@ -71,6 +74,6 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: 'true'
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: 'true' # Remove when Swift is GA.
|
||||
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
1
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
1
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Autobuild working directory
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__test-local-codeql.yml
generated
vendored
1
.github/workflows/__test-local-codeql.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Local CodeQL bundle
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__test-proxy.yml
generated
vendored
1
.github/workflows/__test-proxy.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Proxy test
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
6
.github/workflows/__unset-environment.yml
generated
vendored
6
.github/workflows/__unset-environment.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Test unsetting environment variables
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -59,7 +60,10 @@ jobs:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
|
||||
# Disable Kotlin analysis while it's incompatible with Kotlin 1.8, until we find a
|
||||
# workaround for our PR checks.
|
||||
run: env -i CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN=true PATH="$PATH" HOME="$HOME"
|
||||
./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
- shell: bash
|
||||
|
||||
1
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
1
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -7,6 +7,7 @@ name: "PR Check - Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__with-checkout-path.yml
generated
vendored
1
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Use a custom `checkout_path`
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
14
.github/workflows/codeql.yml
vendored
14
.github/workflows/codeql.yml
vendored
@@ -8,6 +8,9 @@ on:
|
||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||
# by other workflows.
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
schedule:
|
||||
# Weekly on Sunday.
|
||||
- cron: '30 1 * * 0'
|
||||
|
||||
env:
|
||||
CODEQL_ACTION_TESTING_ENVIRONMENT: codeql-action-pr-checks
|
||||
@@ -54,7 +57,7 @@ jobs:
|
||||
# be the same as `tools: null`. This allows us to make the job for each of the bundles a
|
||||
# required status check.
|
||||
#
|
||||
# If we're running on push, then we can skip running with `tools: latest` when it would be
|
||||
# If we're running on push or schedule, then we can skip running with `tools: latest` when it would be
|
||||
# the same as running with `tools: null`.
|
||||
if [[ "$GITHUB_EVENT_NAME" != "pull_request" && "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then
|
||||
VERSIONS_JSON='[null]'
|
||||
@@ -78,8 +81,10 @@ jobs:
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./init
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Initialize CodeQL
|
||||
uses: ./init
|
||||
id: init
|
||||
with:
|
||||
languages: javascript
|
||||
@@ -88,4 +93,5 @@ jobs:
|
||||
# confirm steps.init.outputs.codeql-path points to the codeql binary
|
||||
- name: Print CodeQL Version
|
||||
run: ${{steps.init.outputs.codeql-path}} version --format=json
|
||||
- uses: ./analyze
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: ./analyze
|
||||
|
||||
@@ -2,6 +2,9 @@
|
||||
# when the analyze step fails.
|
||||
name: PR Check - Debug artifacts after failure
|
||||
env:
|
||||
# Disable Kotlin analysis while it's incompatible with Kotlin 1.8, until we find a
|
||||
# workaround for our PR checks.
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: true
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
on:
|
||||
push:
|
||||
|
||||
3
.github/workflows/debug-artifacts.yml
vendored
3
.github/workflows/debug-artifacts.yml
vendored
@@ -1,6 +1,9 @@
|
||||
# Checks logs, SARIF, and database bundle debug artifacts exist.
|
||||
name: PR Check - Debug artifact upload
|
||||
env:
|
||||
# Disable Kotlin analysis while it's incompatible with Kotlin 1.8, until we find a
|
||||
# workaround for our PR checks.
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: true
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
on:
|
||||
push:
|
||||
|
||||
2
.github/workflows/pr-checks.yml
vendored
2
.github/workflows/pr-checks.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
|
||||
23
.github/workflows/python-deps.yml
vendored
23
.github/workflows/python-deps.yml
vendored
@@ -28,17 +28,7 @@ jobs:
|
||||
matrix:
|
||||
os: [ubuntu-20.04, ubuntu-22.04, macos-latest]
|
||||
python_deps_type: [pipenv, poetry, requirements, setup_py]
|
||||
python_version: [2, 3]
|
||||
exclude:
|
||||
# Python2 and poetry are not supported. See https://github.com/actions/setup-python/issues/374
|
||||
- python_version: 2
|
||||
python_deps_type: poetry
|
||||
# Python2 and pipenv are not supported since pipenv v2021.11.5
|
||||
- python_version: 2
|
||||
python_deps_type: pipenv
|
||||
# Python2 is not available on ubuntu-22.04 by default -- see https://github.com/github/codeql-action/pull/1257
|
||||
- python_version: 2
|
||||
os: ubuntu-22.04
|
||||
python_version: [3]
|
||||
|
||||
|
||||
env:
|
||||
@@ -138,14 +128,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python_deps_type: [pipenv, poetry, requirements, setup_py]
|
||||
python_version: [2, 3]
|
||||
exclude:
|
||||
# Python2 and poetry are not supported. See https://github.com/actions/setup-python/issues/374
|
||||
- python_version: 2
|
||||
python_deps_type: poetry
|
||||
# Python2 and pipenv are not supported since pipenv v2021.11.5
|
||||
- python_version: 2
|
||||
python_deps_type: pipenv
|
||||
python_version: [3]
|
||||
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
@@ -156,7 +139,7 @@ jobs:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python_version }}
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ fi
|
||||
|
||||
if [ "$#" -eq 1 ]; then
|
||||
# If we were passed an argument, use that as the SHA
|
||||
GITHUB_SHA="$0"
|
||||
GITHUB_SHA="$1"
|
||||
elif [ "$#" -gt 1 ]; then
|
||||
echo "Usage: $0 [SHA]"
|
||||
echo "Update the required checks based on the SHA, or main."
|
||||
@@ -23,7 +23,7 @@ fi
|
||||
echo "Getting checks for $GITHUB_SHA"
|
||||
|
||||
# Ignore any checks with "https://", CodeQL, LGTM, and Update checks.
|
||||
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or contains("Update") or contains("update") or contains("test-setup-python-scripts") | not)] | unique | sort')"
|
||||
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or . == "check-expected-release-files" or contains("Update") or contains("update") or contains("test-setup-python-scripts") | not)] | unique | sort')"
|
||||
|
||||
echo "$CHECKS" | jq
|
||||
|
||||
|
||||
2
.github/workflows/update-release-branch.yml
vendored
2
.github/workflows/update-release-branch.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.7"
|
||||
- name: Checkout CodeQL Action
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
env:
|
||||
ENTERPRISE_RELEASES_PATH: ${{ github.workspace }}/enterprise-releases/
|
||||
- name: Commit Changes
|
||||
uses: peter-evans/create-pull-request@c7f493a8000b8aeb17a1332e326ba76b57cb83eb # v3.4.1
|
||||
uses: peter-evans/create-pull-request@2b011faafdcbc9ceb11414d64d0573f37c774b04 # v4.2.3
|
||||
with:
|
||||
commit-message: Update supported GitHub Enterprise Server versions.
|
||||
title: Update supported GitHub Enterprise Server versions.
|
||||
|
||||
30
CHANGELOG.md
30
CHANGELOG.md
@@ -1,5 +1,31 @@
|
||||
# CodeQL Action Changelog
|
||||
|
||||
## 1.1.39 - 18 Jan 2023
|
||||
|
||||
- CodeQL Action v1 is now deprecated, and is no longer updated or supported. For better performance, improved security, and new features, upgrade to v2. For more information, see [this changelog post](https://github.blog/changelog/2023-01-18-code-scanning-codeql-action-v1-is-now-deprecated/). [#1467](https://github.com/github/codeql-action/pull/1466)
|
||||
- Python automatic dependency installation will no longer fail for projects using Poetry that specify `virtualenvs.options.no-pip = true` in their `poetry.toml`. [#1431](https://github.com/github/codeql-action/pull/1431)
|
||||
- Avoid printing a stack trace and error message when the action fails to find the SHA at the
|
||||
current directory. This will happen in several non-error states and so we now avoid cluttering the
|
||||
log with this message. [#1485](https://github.com/github/codeql-action/pull/1485)
|
||||
|
||||
## 1.1.38 - 12 Jan 2023
|
||||
|
||||
- Update default CodeQL bundle version to 2.12.0. [#1466](https://github.com/github/codeql-action/pull/1466)
|
||||
|
||||
## 1.1.37 - 14 Dec 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.11.6. [#1433](https://github.com/github/codeql-action/pull/1433)
|
||||
|
||||
## 1.1.36 - 08 Dec 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.11.5. [#1412](https://github.com/github/codeql-action/pull/1412)
|
||||
- Add a step that tries to upload a SARIF file for the workflow run when that workflow run fails. This will help better surface failed code scanning workflow runs. [#1393](https://github.com/github/codeql-action/pull/1393)
|
||||
- Python automatic dependency installation will no longer consider dependency code installed in venv as user-written, for projects using Poetry that specify `virtualenvs.in-project = true` in their `poetry.toml`. [#1419](https://github.com/github/codeql-action/pull/1419)
|
||||
|
||||
## 1.1.35 - 01 Dec 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.1.34 - 25 Nov 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.11.4. [#1391](https://github.com/github/codeql-action/pull/1391)
|
||||
@@ -43,7 +69,7 @@
|
||||
## 1.1.25 - 21 Sep 2022
|
||||
|
||||
- We will soon be rolling out a feature of the CodeQL Action that stores some information used to make future runs faster in the GitHub Actions cache. Initially, this will only be enabled on JavaScript repositories, but we plan to add more languages to this soon. The new feature can be disabled by passing the `trap-caching: false` option to your workflow's `init` step, for example if you are already using the GitHub Actions cache for a different purpose and are near the storage limit for it.
|
||||
- Add support for Python automatic dependency installation with Poetry 1.2 [#1258](https://github.com/github/codeql-action/pull/1258).
|
||||
- Add support for Python automatic dependency installation with Poetry 1.2 [#1258](https://github.com/github/codeql-action/pull/1258)
|
||||
|
||||
## 1.1.24 - 16 Sep 2022
|
||||
|
||||
@@ -294,7 +320,7 @@ No user facing changes.
|
||||
## 1.0.4 - 28 Jun 2021
|
||||
|
||||
- Fix `RUNNER_TEMP environment variable must be set` when using runner. [#594](https://github.com/github/codeql-action/pull/594)
|
||||
- Fix couting of lines of code for C# projects. [#586](https://github.com/github/codeql-action/pull/586)
|
||||
- Fix counting of lines of code for C# projects. [#586](https://github.com/github/codeql-action/pull/586)
|
||||
|
||||
## 1.0.3 - 23 Jun 2021
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ inputs:
|
||||
upload:
|
||||
description: Upload the SARIF file to Code Scanning
|
||||
required: false
|
||||
# If changing this, make sure to update workflow.ts accordingly.
|
||||
default: "true"
|
||||
cleanup-level:
|
||||
description: "Level of cleanup to perform on CodeQL databases at the end of the analyze step. This should either be 'none' to skip cleanup, or be a valid argument for the --mode flag of the CodeQL CLI command 'codeql database cleanup' as documented at https://codeql.github.com/docs/codeql-cli/manual/database-cleanup"
|
||||
@@ -44,6 +45,7 @@ inputs:
|
||||
checkout_path:
|
||||
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
|
||||
required: false
|
||||
# If changing this, make sure to update workflow.ts accordingly.
|
||||
default: ${{ github.workspace }}
|
||||
ref:
|
||||
description: "The ref where results will be uploaded. If not provided, the Action will use the GITHUB_REF environment variable. If provided, the sha input must be provided as well. This input is not available in pull requests from forks."
|
||||
|
||||
238
lib/actions-util.js
generated
238
lib/actions-util.js
generated
@@ -19,17 +19,17 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.workflowEventName = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.workflowEventName = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util_1 = require("./util");
|
||||
const workflow_1 = require("./workflow");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
/**
|
||||
@@ -88,8 +88,9 @@ const getCommitOid = async function (checkoutPath, ref = "HEAD") {
|
||||
return commitOid.trim();
|
||||
}
|
||||
catch (e) {
|
||||
core.info(`Failed to call git to get current commit. Continuing with data from environment or input: ${e}`);
|
||||
core.info(e.stack || "NO STACK");
|
||||
core.info("Could not determine current commit SHA using git. Continuing with data from user input or environment.");
|
||||
core.debug(`Reason: ${e.message}`);
|
||||
core.debug(e.stack || "NO STACK");
|
||||
return (0, exports.getOptionalInput)("sha") || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
}
|
||||
};
|
||||
@@ -145,225 +146,6 @@ const determineMergeBaseCommitOid = async function () {
|
||||
}
|
||||
};
|
||||
exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
|
||||
function isObject(o) {
|
||||
return o !== null && typeof o === "object";
|
||||
}
|
||||
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
|
||||
function escapeRegExp(string) {
|
||||
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
||||
}
|
||||
function patternToRegExp(value) {
|
||||
return new RegExp(`^${value
|
||||
.toString()
|
||||
.split(GLOB_PATTERN)
|
||||
.reduce(function (arr, cur) {
|
||||
if (cur === "**") {
|
||||
arr.push(".*?");
|
||||
}
|
||||
else if (cur === "*") {
|
||||
arr.push("[^/]*?");
|
||||
}
|
||||
else if (cur) {
|
||||
arr.push(escapeRegExp(cur));
|
||||
}
|
||||
return arr;
|
||||
}, [])
|
||||
.join("")}$`);
|
||||
}
|
||||
// this function should return true if patternA is a superset of patternB
|
||||
// e.g: * is a superset of main-* but main-* is not a superset of *.
|
||||
function patternIsSuperset(patternA, patternB) {
|
||||
return patternToRegExp(patternA).test(patternB);
|
||||
}
|
||||
exports.patternIsSuperset = patternIsSuperset;
|
||||
function branchesToArray(branches) {
|
||||
if (typeof branches === "string") {
|
||||
return [branches];
|
||||
}
|
||||
if (Array.isArray(branches)) {
|
||||
if (branches.length === 0) {
|
||||
return "**";
|
||||
}
|
||||
return branches;
|
||||
}
|
||||
return "**";
|
||||
}
|
||||
function toCodedErrors(errors) {
|
||||
return Object.entries(errors).reduce((acc, [key, value]) => {
|
||||
acc[key] = { message: value, code: key };
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
// code to send back via status report
|
||||
// message to add as a warning annotation to the run
|
||||
exports.WorkflowErrors = toCodedErrors({
|
||||
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
||||
});
|
||||
function getWorkflowErrors(doc) {
|
||||
var _a, _b, _c, _d, _e;
|
||||
const errors = [];
|
||||
const jobName = process.env.GITHUB_JOB;
|
||||
if (jobName) {
|
||||
const job = (_a = doc === null || doc === void 0 ? void 0 : doc.jobs) === null || _a === void 0 ? void 0 : _a[jobName];
|
||||
const steps = job === null || job === void 0 ? void 0 : job.steps;
|
||||
if (Array.isArray(steps)) {
|
||||
for (const step of steps) {
|
||||
// this was advice that we used to give in the README
|
||||
// we actually want to run the analysis on the merge commit
|
||||
// to produce results that are more inline with expectations
|
||||
// (i.e: this is what will happen if you merge this PR)
|
||||
// and avoid some race conditions
|
||||
if ((step === null || step === void 0 ? void 0 : step.run) === "git checkout HEAD^2") {
|
||||
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let missingPush = false;
|
||||
if (doc.on === undefined) {
|
||||
// this is not a valid config
|
||||
}
|
||||
else if (typeof doc.on === "string") {
|
||||
if (doc.on === "pull_request") {
|
||||
missingPush = true;
|
||||
}
|
||||
}
|
||||
else if (Array.isArray(doc.on)) {
|
||||
const hasPush = doc.on.includes("push");
|
||||
const hasPullRequest = doc.on.includes("pull_request");
|
||||
if (hasPullRequest && !hasPush) {
|
||||
missingPush = true;
|
||||
}
|
||||
}
|
||||
else if (isObject(doc.on)) {
|
||||
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
||||
const hasPullRequest = Object.prototype.hasOwnProperty.call(doc.on, "pull_request");
|
||||
if (!hasPush && hasPullRequest) {
|
||||
missingPush = true;
|
||||
}
|
||||
if (hasPush && hasPullRequest) {
|
||||
const paths = (_b = doc.on.push) === null || _b === void 0 ? void 0 : _b.paths;
|
||||
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
||||
// if they didn't change any files
|
||||
// currently we cannot go back through the history and find the most recent baseline
|
||||
if (Array.isArray(paths) && paths.length > 0) {
|
||||
errors.push(exports.WorkflowErrors.PathsSpecified);
|
||||
}
|
||||
const pathsIgnore = (_c = doc.on.push) === null || _c === void 0 ? void 0 : _c["paths-ignore"];
|
||||
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
||||
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
|
||||
}
|
||||
}
|
||||
// if doc.on.pull_request is null that means 'all branches'
|
||||
// if doc.on.pull_request is undefined that means 'off'
|
||||
// we only want to check for mismatched branches if pull_request is on.
|
||||
if (doc.on.pull_request !== undefined) {
|
||||
const push = branchesToArray((_d = doc.on.push) === null || _d === void 0 ? void 0 : _d.branches);
|
||||
if (push !== "**") {
|
||||
const pull_request = branchesToArray((_e = doc.on.pull_request) === null || _e === void 0 ? void 0 : _e.branches);
|
||||
if (pull_request !== "**") {
|
||||
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
|
||||
if (difference.length > 0) {
|
||||
// there are branches in pull_request that may not have a baseline
|
||||
// because we are not building them on push
|
||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
||||
}
|
||||
}
|
||||
else if (push.length > 0) {
|
||||
// push is set up to run on a subset of branches
|
||||
// and you could open a PR against a branch with no baseline
|
||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (missingPush) {
|
||||
errors.push(exports.WorkflowErrors.MissingPushHook);
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
exports.getWorkflowErrors = getWorkflowErrors;
|
||||
async function validateWorkflow() {
|
||||
let workflow;
|
||||
try {
|
||||
workflow = await getWorkflow();
|
||||
}
|
||||
catch (e) {
|
||||
return `error: getWorkflow() failed: ${String(e)}`;
|
||||
}
|
||||
let workflowErrors;
|
||||
try {
|
||||
workflowErrors = getWorkflowErrors(workflow);
|
||||
}
|
||||
catch (e) {
|
||||
return `error: getWorkflowErrors() failed: ${String(e)}`;
|
||||
}
|
||||
if (workflowErrors.length > 0) {
|
||||
let message;
|
||||
try {
|
||||
message = formatWorkflowErrors(workflowErrors);
|
||||
}
|
||||
catch (e) {
|
||||
return `error: formatWorkflowErrors() failed: ${String(e)}`;
|
||||
}
|
||||
core.warning(message);
|
||||
}
|
||||
return formatWorkflowCause(workflowErrors);
|
||||
}
|
||||
exports.validateWorkflow = validateWorkflow;
|
||||
function formatWorkflowErrors(errors) {
|
||||
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
||||
const errorsList = errors.map((e) => e.message).join(" ");
|
||||
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
|
||||
}
|
||||
exports.formatWorkflowErrors = formatWorkflowErrors;
|
||||
function formatWorkflowCause(errors) {
|
||||
if (errors.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
return errors.map((e) => e.code).join(",");
|
||||
}
|
||||
exports.formatWorkflowCause = formatWorkflowCause;
|
||||
async function getWorkflow() {
|
||||
const relativePath = await getWorkflowPath();
|
||||
const absolutePath = path.join((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), relativePath);
|
||||
return yaml.load(fs.readFileSync(absolutePath, "utf-8"));
|
||||
}
|
||||
exports.getWorkflow = getWorkflow;
|
||||
/**
|
||||
* Get the path of the currently executing workflow.
|
||||
*/
|
||||
async function getWorkflowPath() {
|
||||
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
|
||||
const owner = repo_nwo[0];
|
||||
const repo = repo_nwo[1];
|
||||
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
||||
const apiClient = api.getApiClient();
|
||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
||||
owner,
|
||||
repo,
|
||||
run_id,
|
||||
});
|
||||
const workflowUrl = runsResponse.data.workflow_url;
|
||||
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
||||
return workflowResponse.data.path;
|
||||
}
|
||||
/**
|
||||
* Get the workflow run ID.
|
||||
*/
|
||||
function getWorkflowRunID() {
|
||||
const workflowRunID = parseInt((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"), 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
||||
}
|
||||
return workflowRunID;
|
||||
}
|
||||
exports.getWorkflowRunID = getWorkflowRunID;
|
||||
/**
|
||||
* Get the analysis key parameter for the current job.
|
||||
*
|
||||
@@ -377,7 +159,7 @@ async function getAnalysisKey() {
|
||||
if (analysisKey !== undefined) {
|
||||
return analysisKey;
|
||||
}
|
||||
const workflowPath = await getWorkflowPath();
|
||||
const workflowPath = await (0, workflow_1.getWorkflowPath)();
|
||||
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
||||
analysisKey = `${workflowPath}:${jobName}`;
|
||||
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
||||
@@ -392,10 +174,10 @@ async function getAutomationID() {
|
||||
exports.getAutomationID = getAutomationID;
|
||||
function computeAutomationID(analysis_key, environment) {
|
||||
let automationID = `${analysis_key}/`;
|
||||
// the id has to be deterministic so we sort the fields
|
||||
if (environment !== undefined && environment !== "null") {
|
||||
const environmentObject = JSON.parse(environment);
|
||||
for (const entry of Object.entries(environmentObject).sort()) {
|
||||
const matrix = (0, util_1.parseMatrixInput)(environment);
|
||||
if (matrix !== undefined) {
|
||||
// the id has to be deterministic so we sort the fields
|
||||
for (const entry of Object.entries(matrix).sort()) {
|
||||
if (typeof entry[1] === "string") {
|
||||
automationID += `${entry[0]}:${entry[1]}/`;
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
355
lib/actions-util.test.js
generated
355
lib/actions-util.test.js
generated
@@ -25,14 +25,10 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsutil = __importStar(require("./actions-util"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
function errorCodes(actual, expected) {
|
||||
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
||||
}
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("getRef() throws on the empty string", async (t) => {
|
||||
process.env["GITHUB_REF"] = "";
|
||||
@@ -98,6 +94,30 @@ function errorCodes(actual, expected) {
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns CODE_SCANNING_REF as a fallback for GITHUB_REF", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/HEAD";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["CODE_SCANNING_REF"] = expectedRef;
|
||||
process.env["GITHUB_REF"] = "";
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns GITHUB_REF over CODE_SCANNING_REF if both are provided", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["CODE_SCANNING_REF"] = "refs/pull/1/HEAD";
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
@@ -143,333 +163,6 @@ function errorCodes(actual, expected) {
|
||||
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", undefined);
|
||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on is empty", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is valid", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: ["push", "pull_request"],
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: ["push", "pull_request", "schedule"],
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push should not have a path", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main"], paths: ["test/*"] },
|
||||
pull_request: { branches: ["main"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is a correct object", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main"] },
|
||||
pull_request: { branches: ["feature"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main", "feature"] },
|
||||
pull_request: { branches: ["main"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main"] },
|
||||
pull_request: { branches: ["main", "feature"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: 1,
|
||||
pull_request: 1,
|
||||
},
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: 1,
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: [1],
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { 1: 1 },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: 1 },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: [1] },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: { steps: 1 } },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: [undefined] },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: {
|
||||
branches: 1,
|
||||
},
|
||||
pull_request: {
|
||||
branches: 1,
|
||||
},
|
||||
},
|
||||
}), []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["feature/*"] },
|
||||
pull_request: { branches: "feature/moose" },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["feature/moose"] },
|
||||
pull_request: { branches: "feature/*" },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
||||
process.env.GITHUB_JOB = "test";
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: ["push", "pull_request"],
|
||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowErrors() when there is one error", (t) => {
|
||||
const message = actionsutil.formatWorkflowErrors([
|
||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||
]);
|
||||
t.true(message.startsWith("1 issue was detected with this workflow:"));
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowErrors() when there are multiple errors", (t) => {
|
||||
const message = actionsutil.formatWorkflowErrors([
|
||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||
actionsutil.WorkflowErrors.PathsSpecified,
|
||||
]);
|
||||
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowCause() with no errors", (t) => {
|
||||
const message = actionsutil.formatWorkflowCause([]);
|
||||
t.deepEqual(message, undefined);
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowCause()", (t) => {
|
||||
const message = actionsutil.formatWorkflowCause([
|
||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||
actionsutil.WorkflowErrors.PathsSpecified,
|
||||
]);
|
||||
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
||||
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
|
||||
});
|
||||
(0, ava_1.default)("patternIsSuperset()", (t) => {
|
||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
||||
t.true(actionsutil.patternIsSuperset("*", "*"));
|
||||
t.true(actionsutil.patternIsSuperset("*", "main-*"));
|
||||
t.false(actionsutil.patternIsSuperset("main-*", "*"));
|
||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
||||
t.true(actionsutil.patternIsSuperset("main", "main"));
|
||||
t.false(actionsutil.patternIsSuperset("*", "feature/*"));
|
||||
t.true(actionsutil.patternIsSuperset("**", "feature/*"));
|
||||
t.false(actionsutil.patternIsSuperset("feature-*", "**"));
|
||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**/d"));
|
||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**"));
|
||||
t.true(actionsutil.patternIsSuperset("a/**", "a/**/c"));
|
||||
t.true(actionsutil.patternIsSuperset("a/**/c", "a/main-**/c"));
|
||||
t.false(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/d/**/c"));
|
||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/c/**/c"));
|
||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/d/**/c"));
|
||||
t.false(actionsutil.patternIsSuperset("a/**/c/d/**/c", "a/**/b/**/c"));
|
||||
t.false(actionsutil.patternIsSuperset("a/main-**/c", "a/**/c"));
|
||||
t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
|
||||
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when branches contain dots", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
on:
|
||||
push:
|
||||
branches: [4.1, master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [4.1, master]
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master, ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||
process.env.GITHUB_JOB = "test";
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
jobs:
|
||||
test:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test2:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test3:
|
||||
steps: []
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||
process.env.GITHUB_JOB = "test3";
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
jobs:
|
||||
test:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test2:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test3:
|
||||
steps: []
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on is missing", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() with a different on setup", (t) => {
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on: "workflow_dispatch"
|
||||
`)), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on: [workflow_dispatch]
|
||||
`)), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
workflow_dispatch: {}
|
||||
`)), []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
`)), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on: ["push"]
|
||||
`)), []));
|
||||
});
|
||||
(0, ava_1.default)("initializeEnvironment", (t) => {
|
||||
(0, util_1.initializeEnvironment)("1.2.3");
|
||||
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
||||
|
||||
File diff suppressed because one or more lines are too long
1
lib/analyze-action-env.test.js
generated
1
lib/analyze-action-env.test.js
generated
@@ -45,6 +45,7 @@ const util = __importStar(require("./util"));
|
||||
.stub(actionsUtil, "createStatusReportBase")
|
||||
.resolves({});
|
||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAEnE,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
1
lib/analyze-action-input.test.js
generated
1
lib/analyze-action-input.test.js
generated
@@ -61,6 +61,7 @@ const util = __importStar(require("./util"));
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
optionalInputStub.withArgs("expect-error").returns("false");
|
||||
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
process.env["CODEQL_THREADS"] = "1";
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnE,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
12
lib/analyze-action.js
generated
12
lib/analyze-action.js
generated
@@ -39,6 +39,7 @@ const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const shared_environment_1 = require("./shared-environment");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
@@ -103,6 +104,8 @@ function doesGoExtractionOutputExist(config) {
|
||||
* - We detect whether an autobuild step is present by checking the
|
||||
* `util.DID_AUTOBUILD_GO_ENV_VAR_NAME` environment variable, which is set
|
||||
* when the autobuilder is invoked.
|
||||
* - We detect whether the Go database has already been finalized in case it
|
||||
* has been manually set in a prior Action step.
|
||||
* - We approximate whether manual build steps are present by looking at
|
||||
* whether any extraction output already exists for Go.
|
||||
*/
|
||||
@@ -114,6 +117,10 @@ async function runAutobuildIfLegacyGoWorkflow(config, logger) {
|
||||
logger.debug("Won't run Go autobuild since it has already been run.");
|
||||
return;
|
||||
}
|
||||
if ((0, analyze_1.dbIsFinalized)(config, languages_1.Language.go, logger)) {
|
||||
logger.debug("Won't run Go autobuild since there is already a finalized database for Go.");
|
||||
return;
|
||||
}
|
||||
// This captures whether a user has added manual build steps for Go
|
||||
if (doesGoExtractionOutputExist(config)) {
|
||||
logger.debug("Won't run Go autobuild since at least one file of Go code has already been extracted.");
|
||||
@@ -170,7 +177,7 @@ async function run() {
|
||||
}
|
||||
core.setOutput("db-locations", dbLocations);
|
||||
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
|
||||
uploadResult = await upload_lib.uploadFromActions(outputDir, logger);
|
||||
uploadResult = await upload_lib.uploadFromActions(outputDir, actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getOptionalInput("category"), logger);
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
}
|
||||
else {
|
||||
@@ -195,6 +202,7 @@ async function run() {
|
||||
if (actionsUtil.getOptionalInput("expect-error") === "true") {
|
||||
core.setFailed(`expect-error input was set to true but no error was thrown.`);
|
||||
}
|
||||
core.exportVariable(shared_environment_1.CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY, "true");
|
||||
}
|
||||
catch (origError) {
|
||||
const error = origError instanceof Error ? origError : new Error(String(origError));
|
||||
@@ -202,7 +210,6 @@ async function run() {
|
||||
hasBadExpectErrorInput()) {
|
||||
core.setFailed(error.message);
|
||||
}
|
||||
console.log(error);
|
||||
if (error instanceof analyze_1.CodeQLAnalysisError) {
|
||||
const stats = { ...error.queriesStatusReport };
|
||||
await sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
|
||||
@@ -232,7 +239,6 @@ async function runWrapper() {
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`analyze action failed: ${error}`);
|
||||
console.log(error);
|
||||
}
|
||||
await (0, util_1.checkForTimeout)();
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
23
lib/analyze.js
generated
23
lib/analyze.js
generated
@@ -50,6 +50,7 @@ async function setupPythonExtractor(logger) {
|
||||
// If CODEQL_PYTHON is not set, no dependencies were installed, so we don't need to do anything
|
||||
return;
|
||||
}
|
||||
const scriptsFolder = path.resolve(__dirname, "../python-setup");
|
||||
let output = "";
|
||||
const options = {
|
||||
listeners: {
|
||||
@@ -58,10 +59,7 @@ async function setupPythonExtractor(logger) {
|
||||
},
|
||||
},
|
||||
};
|
||||
await new toolrunner.ToolRunner(codeqlPython, [
|
||||
"-c",
|
||||
"import os; import pip; print(os.path.dirname(os.path.dirname(pip.__file__)))",
|
||||
], options).exec();
|
||||
await new toolrunner.ToolRunner(codeqlPython, [path.join(scriptsFolder, "find_site_packages.py")], options).exec();
|
||||
logger.info(`Setting LGTM_INDEX_IMPORT_PATH=${output}`);
|
||||
process.env["LGTM_INDEX_IMPORT_PATH"] = output;
|
||||
output = "";
|
||||
@@ -129,12 +127,6 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
const queries = config.queries[language];
|
||||
const queryFilters = validateQueryFilters(config.originalUserInput["query-filters"]);
|
||||
const packsWithVersion = config.packs[language] || [];
|
||||
const hasBuiltinQueries = (queries === null || queries === void 0 ? void 0 : queries.builtin.length) > 0;
|
||||
const hasCustomQueries = (queries === null || queries === void 0 ? void 0 : queries.custom.length) > 0;
|
||||
const hasPackWithCustomQueries = packsWithVersion.length > 0;
|
||||
if (!hasBuiltinQueries && !hasCustomQueries && !hasPackWithCustomQueries) {
|
||||
throw new Error(`Unable to analyze ${language} as no queries were selected for this language`);
|
||||
}
|
||||
try {
|
||||
if (await util.useCodeScanningConfigInCli(codeql, featureEnablement)) {
|
||||
// If we are using the code scanning config in the CLI,
|
||||
@@ -159,6 +151,15 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
logger.info(analysisSummary);
|
||||
}
|
||||
else {
|
||||
// config was generated by the action, so must be interpreted by the action.
|
||||
const hasBuiltinQueries = (queries === null || queries === void 0 ? void 0 : queries.builtin.length) > 0;
|
||||
const hasCustomQueries = (queries === null || queries === void 0 ? void 0 : queries.custom.length) > 0;
|
||||
const hasPackWithCustomQueries = packsWithVersion.length > 0;
|
||||
if (!hasBuiltinQueries &&
|
||||
!hasCustomQueries &&
|
||||
!hasPackWithCustomQueries) {
|
||||
throw new Error(`Unable to analyze ${language} as no queries were selected for this language`);
|
||||
}
|
||||
logger.startGroup(`Running queries for ${language}`);
|
||||
const querySuitePaths = [];
|
||||
if (queries["builtin"].length > 0) {
|
||||
@@ -207,7 +208,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
return statusReport;
|
||||
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId, featureEnablement);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId);
|
||||
}
|
||||
async function runPrintLinesOfCode(language) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
{ "maximumVersion": "3.8", "minimumVersion": "3.3" }
|
||||
{ "maximumVersion": "3.8", "minimumVersion": "3.4" }
|
||||
|
||||
244
lib/codeql.js
generated
244
lib/codeql.js
generated
@@ -22,21 +22,19 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CODEQL_VERSION_CONFIG_FILES = exports.CODEQL_DEFAULT_ACTION_REPOSITORY = exports.CommandInvocationError = void 0;
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CODEQL_DEFAULT_ACTION_REPOSITORY = exports.CommandInvocationError = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const query_string_1 = __importDefault(require("query-string"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const uuid_1 = require("uuid");
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api = __importStar(require("./api-client"));
|
||||
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
||||
const error_matcher_1 = require("./error-matcher");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
@@ -74,9 +72,9 @@ const CODEQL_MINIMUM_VERSION = "2.6.3";
|
||||
*/
|
||||
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
|
||||
const CODEQL_VERSION_LUA_TRACER_CONFIG = "2.10.0";
|
||||
exports.CODEQL_VERSION_CONFIG_FILES = "2.10.1";
|
||||
const CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED = "2.10.4";
|
||||
exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
|
||||
const CODEQL_VERSION_FILE_BASELINE_INFORMATION = "2.11.3";
|
||||
/**
|
||||
* This variable controls using the new style of tracing from the CodeQL
|
||||
* CLI. In particular, with versions above this we will use both indirect
|
||||
@@ -197,95 +195,125 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
|
||||
}
|
||||
return `https://github.com/${exports.CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
|
||||
}
|
||||
async function getCodeQLSource(toolsInput, bypassToolcache, apiDetails, variant, logger) {
|
||||
var _a;
|
||||
if (toolsInput && toolsInput !== "latest" && !toolsInput.startsWith("http")) {
|
||||
return {
|
||||
codeqlTarPath: toolsInput,
|
||||
sourceType: "local",
|
||||
toolsVersion: "local",
|
||||
};
|
||||
}
|
||||
const forceLatestReason =
|
||||
// We use the special value of 'latest' to prioritize the version in the
|
||||
// defaults over any pinned cached version.
|
||||
toolsInput === "latest"
|
||||
? '"tools: latest" was requested'
|
||||
: // If the user hasn't requested a particular CodeQL version, then bypass
|
||||
// the toolcache when the appropriate feature is enabled. This
|
||||
// allows us to quickly rollback a broken bundle that has made its way
|
||||
// into the toolcache.
|
||||
toolsInput === undefined && bypassToolcache
|
||||
? "a specific version of CodeQL was not requested and the bypass toolcache feature is enabled"
|
||||
: undefined;
|
||||
const forceLatest = forceLatestReason !== undefined;
|
||||
if (forceLatest) {
|
||||
logger.debug(`Forcing the latest version of the CodeQL tools since ${forceLatestReason}.`);
|
||||
}
|
||||
const codeqlURL = forceLatest ? undefined : toolsInput;
|
||||
const requestedSemVer = convertToSemVer(getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`), logger);
|
||||
// If we find the specified version, we always use that.
|
||||
const codeqlFolder = toolcache.find("CodeQL", requestedSemVer);
|
||||
if (codeqlFolder) {
|
||||
return {
|
||||
codeqlFolder,
|
||||
sourceType: "toolcache",
|
||||
toolsVersion: requestedSemVer,
|
||||
};
|
||||
}
|
||||
// If we don't find the requested version, in some cases we may allow a
|
||||
// different version to save download time if the version hasn't been
|
||||
// specified explicitly (in which case we always honor it).
|
||||
if (!codeqlURL && !forceLatest) {
|
||||
const codeqlVersions = toolcache.findAllVersions("CodeQL");
|
||||
if (codeqlVersions.length === 1 && (0, util_1.isGoodVersion)(codeqlVersions[0])) {
|
||||
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0]);
|
||||
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
|
||||
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
|
||||
return {
|
||||
codeqlFolder: tmpCodeqlFolder,
|
||||
sourceType: "toolcache",
|
||||
toolsVersion: codeqlVersions[0],
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
codeqlURL: codeqlURL ||
|
||||
(await getCodeQLBundleDownloadURL(apiDetails, variant, logger)),
|
||||
semanticVersion: requestedSemVer,
|
||||
sourceType: "download",
|
||||
toolsVersion: ((_a = semver.prerelease(requestedSemVer)) === null || _a === void 0 ? void 0 : _a.join(".")) || requestedSemVer,
|
||||
};
|
||||
}
|
||||
async function downloadCodeQL(codeqlURL, semanticVersion, apiDetails, tempDir, logger) {
|
||||
const parsedCodeQLURL = new URL(codeqlURL);
|
||||
const searchParams = new URLSearchParams(parsedCodeQLURL.search);
|
||||
const headers = {
|
||||
accept: "application/octet-stream",
|
||||
};
|
||||
// We only want to provide an authorization header if we are downloading
|
||||
// from the same GitHub instance the Action is running on.
|
||||
// This avoids leaking Enterprise tokens to dotcom.
|
||||
// We also don't want to send an authorization header if there's already a token provided in the URL.
|
||||
if (searchParams.has("token")) {
|
||||
logger.debug("CodeQL tools URL contains an authorization token.");
|
||||
}
|
||||
else if (codeqlURL.startsWith(`${apiDetails.url}/`)) {
|
||||
logger.debug("Providing an authorization token to download CodeQL tools.");
|
||||
headers.authorization = `token ${apiDetails.auth}`;
|
||||
}
|
||||
else {
|
||||
logger.debug("Downloading CodeQL tools without an authorization token.");
|
||||
}
|
||||
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`);
|
||||
const dest = path.join(tempDir, (0, uuid_1.v4)());
|
||||
const finalHeaders = Object.assign({ "User-Agent": "CodeQL Action" }, headers);
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL, dest, undefined, finalHeaders);
|
||||
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
return await toolcache.cacheDir(codeqlExtracted, "CodeQL", semanticVersion);
|
||||
}
|
||||
/**
|
||||
* Set up CodeQL CLI access.
|
||||
*
|
||||
* @param codeqlURL
|
||||
* @param toolsInput
|
||||
* @param apiDetails
|
||||
* @param tempDir
|
||||
* @param variant
|
||||
* @param features
|
||||
* @param bypassToolcache
|
||||
* @param logger
|
||||
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
||||
* version requirement. Must be set to true outside tests.
|
||||
* @returns a { CodeQL, toolsVersion } object.
|
||||
*/
|
||||
async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, bypassToolcache, logger, checkVersion) {
|
||||
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, bypassToolcache, logger, checkVersion) {
|
||||
try {
|
||||
const forceLatestReason =
|
||||
// We use the special value of 'latest' to prioritize the version in the
|
||||
// defaults over any pinned cached version.
|
||||
codeqlURL === "latest"
|
||||
? '"tools: latest" was requested'
|
||||
: // If the user hasn't requested a particular CodeQL version, then bypass
|
||||
// the toolcache when the appropriate feature is enabled. This
|
||||
// allows us to quickly rollback a broken bundle that has made its way
|
||||
// into the toolcache.
|
||||
codeqlURL === undefined && bypassToolcache
|
||||
? "a specific version of CodeQL was not requested and the bypass toolcache feature is enabled"
|
||||
: undefined;
|
||||
const forceLatest = forceLatestReason !== undefined;
|
||||
if (forceLatest) {
|
||||
logger.debug(`Forcing the latest version of the CodeQL tools since ${forceLatestReason}.`);
|
||||
codeqlURL = undefined;
|
||||
}
|
||||
const source = await getCodeQLSource(toolsInput, bypassToolcache, apiDetails, variant, logger);
|
||||
let codeqlFolder;
|
||||
let codeqlURLVersion;
|
||||
if (codeqlURL && !codeqlURL.startsWith("http")) {
|
||||
codeqlFolder = await toolcache.extractTar(codeqlURL);
|
||||
codeqlURLVersion = "local";
|
||||
}
|
||||
else {
|
||||
codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
|
||||
const codeqlURLSemVer = convertToSemVer(codeqlURLVersion, logger);
|
||||
// If we find the specified version, we always use that.
|
||||
codeqlFolder = toolcache.find("CodeQL", codeqlURLSemVer);
|
||||
// If we don't find the requested version, in some cases we may allow a
|
||||
// different version to save download time if the version hasn't been
|
||||
// specified explicitly (in which case we always honor it).
|
||||
if (!codeqlFolder && !codeqlURL && !forceLatest) {
|
||||
const codeqlVersions = toolcache.findAllVersions("CodeQL");
|
||||
if (codeqlVersions.length === 1 && (0, util_1.isGoodVersion)(codeqlVersions[0])) {
|
||||
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0]);
|
||||
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
|
||||
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
|
||||
codeqlFolder = tmpCodeqlFolder;
|
||||
codeqlURLVersion = codeqlVersions[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
if (codeqlFolder) {
|
||||
switch (source.sourceType) {
|
||||
case "local":
|
||||
codeqlFolder = await toolcache.extractTar(source.codeqlTarPath);
|
||||
break;
|
||||
case "toolcache":
|
||||
codeqlFolder = source.codeqlFolder;
|
||||
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
}
|
||||
else {
|
||||
if (!codeqlURL) {
|
||||
codeqlURL = await getCodeQLBundleDownloadURL(apiDetails, variant, logger);
|
||||
}
|
||||
const parsedCodeQLURL = new URL(codeqlURL);
|
||||
const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search);
|
||||
const headers = {
|
||||
accept: "application/octet-stream",
|
||||
};
|
||||
// We only want to provide an authorization header if we are downloading
|
||||
// from the same GitHub instance the Action is running on.
|
||||
// This avoids leaking Enterprise tokens to dotcom.
|
||||
// We also don't want to send an authorization header if there's already a token provided in the URL.
|
||||
if (codeqlURL.startsWith(`${apiDetails.url}/`) &&
|
||||
parsedQueryString["token"] === undefined) {
|
||||
logger.debug("Downloading CodeQL bundle with token.");
|
||||
headers.authorization = `token ${apiDetails.auth}`;
|
||||
}
|
||||
else {
|
||||
logger.debug("Downloading CodeQL bundle without token.");
|
||||
}
|
||||
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`);
|
||||
const dest = path.join(tempDir, (0, uuid_1.v4)());
|
||||
const finalHeaders = Object.assign({ "User-Agent": "CodeQL Action" }, headers);
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL, dest, undefined, finalHeaders);
|
||||
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLSemVer);
|
||||
}
|
||||
break;
|
||||
case "download":
|
||||
codeqlFolder = await downloadCodeQL(source.codeqlURL, source.semanticVersion, apiDetails, tempDir, logger);
|
||||
break;
|
||||
default:
|
||||
(0, util_1.assertNever)(source);
|
||||
}
|
||||
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
|
||||
if (process.platform === "win32") {
|
||||
@@ -295,7 +323,7 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, bypassToolca
|
||||
throw new Error(`Unsupported platform: ${process.platform}`);
|
||||
}
|
||||
cachedCodeQL = await getCodeQLForCmd(codeqlCmd, checkVersion);
|
||||
return { codeql: cachedCodeQL, toolsVersion: codeqlURLVersion };
|
||||
return { codeql: cachedCodeQL, toolsVersion: source.toolsVersion };
|
||||
}
|
||||
catch (e) {
|
||||
logger.error(e instanceof Error ? e : new Error(String(e)));
|
||||
@@ -371,6 +399,7 @@ function setCodeQL(partialCodeql) {
|
||||
databaseRunQueries: resolveFunction(partialCodeql, "databaseRunQueries"),
|
||||
databaseInterpretResults: resolveFunction(partialCodeql, "databaseInterpretResults"),
|
||||
databasePrintBaseline: resolveFunction(partialCodeql, "databasePrintBaseline"),
|
||||
diagnosticsExport: resolveFunction(partialCodeql, "diagnosticsExport"),
|
||||
};
|
||||
return cachedCodeQL;
|
||||
}
|
||||
@@ -489,7 +518,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
...getExtraOptionsFromEnv(["database", "init"]),
|
||||
]);
|
||||
},
|
||||
async databaseInitCluster(config, sourceRoot, processName, featureEnablement) {
|
||||
async databaseInitCluster(config, sourceRoot, processName, featureEnablement, logger) {
|
||||
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
||||
if (config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l)).length > 0) {
|
||||
extraArgs.push("--begin-tracing");
|
||||
@@ -507,9 +536,16 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
extraArgs.push("--no-internal-use-lua-tracing");
|
||||
}
|
||||
}
|
||||
const configLocation = await generateCodeScanningConfig(codeql, config, featureEnablement);
|
||||
// A config file is only generated if the CliConfigFileEnabled feature flag is enabled.
|
||||
const configLocation = await generateCodeScanningConfig(codeql, config, featureEnablement, logger);
|
||||
// Only pass external repository token if a config file is going to be parsed by the CLI.
|
||||
let externalRepositoryToken;
|
||||
if (configLocation) {
|
||||
extraArgs.push(`--codescanning-config=${configLocation}`);
|
||||
externalRepositoryToken = (0, actions_util_1.getOptionalInput)("external-repository-token");
|
||||
if (externalRepositoryToken) {
|
||||
extraArgs.push("--external-repository-token-stdin");
|
||||
}
|
||||
}
|
||||
await runTool(cmd, [
|
||||
"database",
|
||||
@@ -519,7 +555,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
`--source-root=${sourceRoot}`,
|
||||
...extraArgs,
|
||||
...getExtraOptionsFromEnv(["database", "init"]),
|
||||
]);
|
||||
], { stdin: externalRepositoryToken });
|
||||
},
|
||||
async runAutobuild(language) {
|
||||
const cmdName = process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh";
|
||||
@@ -666,9 +702,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
if (querySuitePath) {
|
||||
codeqlArgs.push(querySuitePath);
|
||||
}
|
||||
await runTool(cmd, codeqlArgs);
|
||||
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, codeqlArgs, error_matcher_1.errorMatchers);
|
||||
},
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId, featureEnablement) {
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId) {
|
||||
const codeqlArgs = [
|
||||
"database",
|
||||
"interpret-results",
|
||||
@@ -687,7 +723,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
if (automationDetailsId !== undefined) {
|
||||
codeqlArgs.push("--sarif-category", automationDetailsId);
|
||||
}
|
||||
if (await featureEnablement.getValue(feature_flags_1.Feature.FileBaselineInformationEnabled, this)) {
|
||||
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_FILE_BASELINE_INFORMATION)) {
|
||||
codeqlArgs.push("--sarif-add-baseline-file-info");
|
||||
}
|
||||
codeqlArgs.push(databasePath);
|
||||
@@ -695,7 +731,8 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
codeqlArgs.push(...querySuitePaths);
|
||||
}
|
||||
// capture stdout, which contains analysis summaries
|
||||
return await runTool(cmd, codeqlArgs);
|
||||
const returnState = await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, codeqlArgs, error_matcher_1.errorMatchers);
|
||||
return returnState.stdout;
|
||||
},
|
||||
async databasePrintBaseline(databasePath) {
|
||||
const codeqlArgs = [
|
||||
@@ -771,6 +808,19 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
];
|
||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
async diagnosticsExport(sarifFile, automationDetailsId) {
|
||||
const args = [
|
||||
"diagnostics",
|
||||
"export",
|
||||
"--format=sarif-latest",
|
||||
`--output=${sarifFile}`,
|
||||
...getExtraOptionsFromEnv(["diagnostics", "export"]),
|
||||
];
|
||||
if (automationDetailsId !== undefined) {
|
||||
args.push("--sarif-category", automationDetailsId);
|
||||
}
|
||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
};
|
||||
// To ensure that status reports include the CodeQL CLI version wherever
|
||||
// possible, we want to call getVersion(), which populates the version value
|
||||
@@ -840,20 +890,26 @@ exports.getExtraOptions = getExtraOptions;
|
||||
* status reports on GitHub.com.
|
||||
*/
|
||||
const maxErrorSize = 20000;
|
||||
async function runTool(cmd, args = []) {
|
||||
async function runTool(cmd, args = [], opts = {}) {
|
||||
let output = "";
|
||||
let error = "";
|
||||
const exitCode = await new toolrunner.ToolRunner(cmd, args, {
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
output += data.toString();
|
||||
output += data.toString("utf8");
|
||||
},
|
||||
stderr: (data) => {
|
||||
const toRead = Math.min(maxErrorSize - error.length, data.length);
|
||||
error += data.toString("utf8", 0, toRead);
|
||||
let readStartIndex = 0;
|
||||
// If the error is too large, then we only take the last 20,000 characters
|
||||
if (data.length - maxErrorSize > 0) {
|
||||
// Eg: if we have 20,000 the start index should be 2.
|
||||
readStartIndex = data.length - maxErrorSize + 1;
|
||||
}
|
||||
error += data.toString("utf8", readStartIndex);
|
||||
},
|
||||
},
|
||||
ignoreReturnCode: true,
|
||||
...(opts.stdin ? { input: Buffer.from(opts.stdin || "") } : {}),
|
||||
}).exec();
|
||||
if (exitCode !== 0)
|
||||
throw new CommandInvocationError(cmd, args, exitCode, error, output);
|
||||
@@ -867,7 +923,7 @@ async function runTool(cmd, args = []) {
|
||||
* @param config The configuration to use.
|
||||
* @returns the path to the generated user configuration file.
|
||||
*/
|
||||
async function generateCodeScanningConfig(codeql, config, featureEnablement) {
|
||||
async function generateCodeScanningConfig(codeql, config, featureEnablement, logger) {
|
||||
var _a;
|
||||
if (!(await util.useCodeScanningConfigInCli(codeql, featureEnablement))) {
|
||||
return;
|
||||
@@ -926,6 +982,10 @@ async function generateCodeScanningConfig(codeql, config, featureEnablement) {
|
||||
augmentedConfig.packs["javascript"].push(packString);
|
||||
}
|
||||
}
|
||||
logger.info(`Writing augmented user configuration file to ${configLocation}`);
|
||||
logger.startGroup("Augmented user configuration file contents");
|
||||
logger.info(yaml.dump(augmentedConfig));
|
||||
logger.endGroup();
|
||||
fs.writeFileSync(configLocation, yaml.dump(augmentedConfig));
|
||||
return configLocation;
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
176
lib/codeql.test.js
generated
176
lib/codeql.test.js
generated
@@ -27,6 +27,7 @@ const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
@@ -45,14 +46,12 @@ const util_1 = require("./util");
|
||||
const sampleApiDetails = {
|
||||
auth: "token",
|
||||
url: "https://github.com",
|
||||
apiURL: undefined,
|
||||
registriesAuthTokens: undefined,
|
||||
apiURL: "https://api.github.com",
|
||||
};
|
||||
const sampleGHAEApiDetails = {
|
||||
auth: "token",
|
||||
url: "https://example.githubenterprise.com",
|
||||
apiURL: undefined,
|
||||
registriesAuthTokens: undefined,
|
||||
apiURL: "https://example.githubenterprise.com/api/v3",
|
||||
};
|
||||
let stubConfig;
|
||||
ava_1.default.beforeEach(() => {
|
||||
@@ -82,7 +81,13 @@ ava_1.default.beforeEach(() => {
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
});
|
||||
async function mockApiAndSetupCodeQL({ apiDetails, bypassToolcache, isPinned, tmpDir, toolsInput, version, }) {
|
||||
/**
|
||||
* Mocks the API for downloading the bundle tagged `tagName`.
|
||||
*
|
||||
* @returns the download URL for the bundle. This can be passed to the tools parameter of
|
||||
* `codeql.setupCodeQL`.
|
||||
*/
|
||||
async function mockDownloadApi({ apiDetails = sampleApiDetails, isPinned, tagName, }) {
|
||||
var _a;
|
||||
const platform = process.platform === "win32"
|
||||
? "win64"
|
||||
@@ -91,97 +96,94 @@ async function mockApiAndSetupCodeQL({ apiDetails, bypassToolcache, isPinned, tm
|
||||
: "osx64";
|
||||
const baseUrl = (_a = apiDetails === null || apiDetails === void 0 ? void 0 : apiDetails.url) !== null && _a !== void 0 ? _a : "https://example.com";
|
||||
const relativeUrl = apiDetails
|
||||
? `/github/codeql-action/releases/download/${version}/codeql-bundle-${platform}.tar.gz`
|
||||
: `/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||
? `/github/codeql-action/releases/download/${tagName}/codeql-bundle-${platform}.tar.gz`
|
||||
: `/download/${tagName}/codeql-bundle.tar.gz`;
|
||||
(0, nock_1.default)(baseUrl)
|
||||
.get(relativeUrl)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle${isPinned ? "-pinned" : ""}.tar.gz`));
|
||||
return await codeql.setupCodeQL(toolsInput ? toolsInput.input : `${baseUrl}${relativeUrl}`, apiDetails !== null && apiDetails !== void 0 ? apiDetails : sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, !!bypassToolcache, (0, logging_1.getRunnerLogger)(true), false);
|
||||
return `${baseUrl}${relativeUrl}`;
|
||||
}
|
||||
(0, ava_1.default)("download codeql bundle cache", async (t) => {
|
||||
async function installIntoToolcache({ apiDetails = sampleApiDetails, isPinned, tagName, tmpDir, }) {
|
||||
const url = await mockDownloadApi({ apiDetails, isPinned, tagName });
|
||||
await codeql.setupCodeQL(url, apiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, (0, logging_1.getRunnerLogger)(true), false);
|
||||
}
|
||||
(0, ava_1.default)("downloads and caches explicitly requested bundles that aren't in the toolcache", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const versions = ["20200601", "20200610"];
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
const version = versions[i];
|
||||
const codeQLConfig = await mockApiAndSetupCodeQL({ version, tmpDir });
|
||||
const url = await mockDownloadApi({
|
||||
tagName: `codeql-bundle-${version}`,
|
||||
isPinned: false,
|
||||
});
|
||||
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
||||
t.deepEqual(codeQLConfig.toolsVersion, version);
|
||||
t.is(result.toolsVersion, version);
|
||||
}
|
||||
t.is(toolcache.findAllVersions("CodeQL").length, 2);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("download codeql bundle cache explicitly requested with pinned different version cached", async (t) => {
|
||||
(0, ava_1.default)("downloads an explicitly requested bundle even if a different version is cached", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const pinnedCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: "20200601",
|
||||
await installIntoToolcache({
|
||||
tagName: "codeql-bundle-20200601",
|
||||
isPinned: true,
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
t.deepEqual(pinnedCodeQLConfig.toolsVersion, "20200601");
|
||||
const unpinnedCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: "20200610",
|
||||
tmpDir,
|
||||
const url = await mockDownloadApi({
|
||||
tagName: "codeql-bundle-20200610",
|
||||
});
|
||||
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
||||
t.deepEqual(unpinnedCodeQLConfig.toolsVersion, "20200610");
|
||||
t.deepEqual(result.toolsVersion, "20200610");
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("don't download codeql bundle cache with pinned different version cached", async (t) => {
|
||||
(0, ava_1.default)("uses a cached bundle when no tools input is given", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const pinnedCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: "20200601",
|
||||
await installIntoToolcache({
|
||||
tagName: "codeql-bundle-20200601",
|
||||
isPinned: true,
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
t.deepEqual(pinnedCodeQLConfig.toolsVersion, "20200601");
|
||||
const codeQLConfig = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(codeQLConfig.toolsVersion, "0.0.0-20200601");
|
||||
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(result.toolsVersion, "0.0.0-20200601");
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 1);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("download codeql bundle cache with different version cached (not pinned)", async (t) => {
|
||||
(0, ava_1.default)("downloads bundle if only an unpinned version is cached", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const cachedCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: "20200601",
|
||||
await installIntoToolcache({
|
||||
tagName: "codeql-bundle-20200601",
|
||||
isPinned: false,
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
t.deepEqual(cachedCodeQLConfig.toolsVersion, "20200601");
|
||||
const codeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: defaults.bundleVersion,
|
||||
tmpDir,
|
||||
apiDetails: sampleApiDetails,
|
||||
toolsInput: { input: undefined },
|
||||
await mockDownloadApi({
|
||||
tagName: defaults.bundleVersion,
|
||||
});
|
||||
t.deepEqual(codeQLConfig.toolsVersion, defaults.bundleVersion.replace("codeql-bundle-", ""));
|
||||
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(result.toolsVersion, defaults.bundleVersion.replace("codeql-bundle-", ""));
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)('download codeql bundle cache with pinned different version cached if "latest" tools specified', async (t) => {
|
||||
(0, ava_1.default)('downloads bundle if "latest" tools specified but not cached', async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const pinnedCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: "20200601",
|
||||
await installIntoToolcache({
|
||||
tagName: "codeql-bundle-20200601",
|
||||
isPinned: true,
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
t.deepEqual(pinnedCodeQLConfig.toolsVersion, "20200601");
|
||||
const latestCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: defaults.bundleVersion,
|
||||
apiDetails: sampleApiDetails,
|
||||
toolsInput: { input: "latest" },
|
||||
tmpDir,
|
||||
await mockDownloadApi({
|
||||
tagName: defaults.bundleVersion,
|
||||
});
|
||||
t.deepEqual(latestCodeQLConfig.toolsVersion, defaults.bundleVersion.replace("codeql-bundle-", ""));
|
||||
const result = await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(result.toolsVersion, defaults.bundleVersion.replace("codeql-bundle-", ""));
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
@@ -199,20 +201,15 @@ for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCAC
|
||||
(0, ava_1.default)(`download codeql bundle ${shouldToolcacheBeBypassed ? "bypasses" : "does not bypass"} toolcache when feature ${isFeatureEnabled ? "enabled" : "disabled"} and tools: ${toolsInput} passed`, async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
await mockApiAndSetupCodeQL({
|
||||
version: "codeql-bundle-20200601",
|
||||
apiDetails: sampleApiDetails,
|
||||
await installIntoToolcache({
|
||||
tagName: "codeql-bundle-20200601",
|
||||
isPinned: true,
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
await mockApiAndSetupCodeQL({
|
||||
version: defaults.bundleVersion,
|
||||
apiDetails: sampleApiDetails,
|
||||
bypassToolcache: isFeatureEnabled,
|
||||
toolsInput: { input: toolsInput },
|
||||
tmpDir,
|
||||
await mockDownloadApi({
|
||||
tagName: defaults.bundleVersion,
|
||||
});
|
||||
await codeql.setupCodeQL(toolsInput, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, isFeatureEnabled, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, shouldToolcacheBeBypassed ? 2 : 1);
|
||||
});
|
||||
@@ -241,6 +238,23 @@ for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCAC
|
||||
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
// This is a workaround to mock `api.getApiDetails()` since it doesn't seem to be possible to
|
||||
// mock this directly. The difficulty is that `getApiDetails()` is called locally in
|
||||
// `api-client.ts`, but `sinon.stub(api, "getApiDetails")` only affects calls to
|
||||
// `getApiDetails()` via an imported `api` module.
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("token")
|
||||
.returns(sampleGHAEApiDetails.auth);
|
||||
const requiredEnvParamStub = sinon.stub(util, "getRequiredEnvParam");
|
||||
requiredEnvParamStub
|
||||
.withArgs("GITHUB_SERVER_URL")
|
||||
.returns(sampleGHAEApiDetails.url);
|
||||
requiredEnvParamStub
|
||||
.withArgs("GITHUB_API_URL")
|
||||
.returns(sampleGHAEApiDetails.apiURL);
|
||||
sinon.stub(actionsUtil, "isRunningLocalAction").returns(false);
|
||||
process.env["GITHUB_ACTION_REPOSITORY"] = "github/codeql-action";
|
||||
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, util.GitHubVariant.GHAE, false, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 1);
|
||||
@@ -308,14 +322,18 @@ for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCAC
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
|
||||
});
|
||||
(0, ava_1.default)("databaseInitCluster() without injected codescanning config", async (t) => {
|
||||
@@ -323,6 +341,8 @@ for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCAC
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.8.1");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
const thisStubConfig = {
|
||||
...stubConfig,
|
||||
tempDir,
|
||||
@@ -347,7 +367,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon
|
||||
.stub(codeqlObject, "getVersion")
|
||||
.resolves(codeql.CODEQL_VERSION_CONFIG_FILES);
|
||||
.resolves(feature_flags_1.featureConfig[feature_flags_1.Feature.CliConfigFileEnabled].minimumVersion);
|
||||
const thisStubConfig = {
|
||||
...stubConfig,
|
||||
...configOverride,
|
||||
@@ -377,7 +397,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
}, {}, {
|
||||
packs: ["codeql/javascript-experimental-atm-queries@~0.3.0"],
|
||||
packs: ["codeql/javascript-experimental-atm-queries@~0.4.0"],
|
||||
});
|
||||
(0, ava_1.default)("injected ML queries with existing packs", injectedConfigMacro, {
|
||||
injectedMlQueries: true,
|
||||
@@ -391,7 +411,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
packs: {
|
||||
javascript: [
|
||||
"codeql/something-else",
|
||||
"codeql/javascript-experimental-atm-queries@~0.3.0",
|
||||
"codeql/javascript-experimental-atm-queries@~0.4.0",
|
||||
],
|
||||
},
|
||||
});
|
||||
@@ -406,7 +426,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
}, {
|
||||
packs: {
|
||||
cpp: ["codeql/something-else"],
|
||||
javascript: ["codeql/javascript-experimental-atm-queries@~0.3.0"],
|
||||
javascript: ["codeql/javascript-experimental-atm-queries@~0.4.0"],
|
||||
},
|
||||
});
|
||||
(0, ava_1.default)("injected packs from input", injectedConfigMacro, {
|
||||
@@ -459,7 +479,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
},
|
||||
},
|
||||
}, {
|
||||
packs: ["xxx", "yyy", "codeql/javascript-experimental-atm-queries@~0.3.0"],
|
||||
packs: ["xxx", "yyy", "codeql/javascript-experimental-atm-queries@~0.4.0"],
|
||||
});
|
||||
// similar, but with queries
|
||||
(0, ava_1.default)("injected queries from input", injectedConfigMacro, {
|
||||
@@ -553,7 +573,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon
|
||||
.stub(codeqlObject, "getVersion")
|
||||
.resolves(codeql.CODEQL_VERSION_CONFIG_FILES);
|
||||
.resolves(feature_flags_1.featureConfig[feature_flags_1.Feature.CliConfigFileEnabled].minimumVersion);
|
||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
// should have used an config file
|
||||
@@ -564,24 +584,22 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = origCODEQL_PASS_CONFIG_TO_CLI;
|
||||
}
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-baseline-file-info when feature enabled", async (t) => {
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-baseline-file-info for 2.11.3", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
|
||||
// The version of CodeQL is checked separately to determine feature enablement, and does not
|
||||
// otherwise impact this test, so set it to 0.0.0.
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.FileBaselineInformationEnabled]));
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.11.3");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info should be present, but it is absent");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-baseline-file-info if feature disabled", async (t) => {
|
||||
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-baseline-file-info for 2.11.2", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
|
||||
// The version of CodeQL is checked upstream to determine feature enablement, so it does not
|
||||
// affect this test.
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.11.2");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info must be absent, but it is present");
|
||||
});
|
||||
function stubToolRunnerConstructor() {
|
||||
|
||||
File diff suppressed because one or more lines are too long
31
lib/config-utils.js
generated
31
lib/config-utils.js
generated
@@ -240,8 +240,12 @@ async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, te
|
||||
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
|
||||
return await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureEnablement, configFile);
|
||||
}
|
||||
// Otherwise, must be a reference to another repo
|
||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
||||
// Otherwise, must be a reference to another repo.
|
||||
// If config parsing is handled in CLI, then this repo will be downloaded
|
||||
// later by the CLI.
|
||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, featureEnablement))) {
|
||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
// Regex validating stars in paths or paths-ignore entries.
|
||||
@@ -925,22 +929,23 @@ async function initConfig(languagesInput, queriesInput, packsInput, registriesIn
|
||||
else {
|
||||
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger);
|
||||
}
|
||||
// The list of queries should not be empty for any language. If it is then
|
||||
// it is a user configuration error.
|
||||
for (const language of config.languages) {
|
||||
const hasBuiltinQueries = ((_a = config.queries[language]) === null || _a === void 0 ? void 0 : _a.builtin.length) > 0;
|
||||
const hasCustomQueries = ((_b = config.queries[language]) === null || _b === void 0 ? void 0 : _b.custom.length) > 0;
|
||||
const hasPacks = (((_c = config.packs[language]) === null || _c === void 0 ? void 0 : _c.length) || 0) > 0;
|
||||
if (!hasPacks && !hasBuiltinQueries && !hasCustomQueries) {
|
||||
throw new Error(`Did not detect any queries to run for ${language}. ` +
|
||||
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
||||
}
|
||||
}
|
||||
// When using the codescanning config in the CLI, pack downloads
|
||||
// happen in the CLI during the `database init` command, so no need
|
||||
// to download them here.
|
||||
await (0, util_1.logCodeScanningConfigInCli)(codeQL, featureEnablement, logger);
|
||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, featureEnablement))) {
|
||||
// The list of queries should not be empty for any language. If it is then
|
||||
// it is a user configuration error.
|
||||
// This check occurs in the CLI when it parses the config file.
|
||||
for (const language of config.languages) {
|
||||
const hasBuiltinQueries = ((_a = config.queries[language]) === null || _a === void 0 ? void 0 : _a.builtin.length) > 0;
|
||||
const hasCustomQueries = ((_b = config.queries[language]) === null || _b === void 0 ? void 0 : _b.custom.length) > 0;
|
||||
const hasPacks = (((_c = config.packs[language]) === null || _c === void 0 ? void 0 : _c.length) || 0) > 0;
|
||||
if (!hasPacks && !hasBuiltinQueries && !hasCustomQueries) {
|
||||
throw new Error(`Did not detect any queries to run for ${language}. ` +
|
||||
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
||||
}
|
||||
}
|
||||
const registries = parseRegistries(registriesInput);
|
||||
await downloadPacks(codeQL, config.languages, config.packs, registries, apiDetails, config.tempDir, logger);
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
42
lib/database-upload.js
generated
42
lib/database-upload.js
generated
@@ -44,24 +44,32 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||
const client = (0, api_client_1.getApiClient)();
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
// Upload the database bundle.
|
||||
// Although we are uploading arbitrary file contents to the API, it's worth
|
||||
// noting that it's the API's job to validate that the contents is acceptable.
|
||||
// This API method is available to anyone with write access to the repo.
|
||||
const payload = fs.readFileSync(await (0, util_1.bundleDb)(config, language, codeql, language));
|
||||
try {
|
||||
await client.request(`POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name`, {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
language,
|
||||
name: `${language}-database`,
|
||||
data: payload,
|
||||
headers: {
|
||||
authorization: `token ${apiDetails.auth}`,
|
||||
"Content-Type": "application/zip",
|
||||
},
|
||||
});
|
||||
logger.debug(`Successfully uploaded database for ${language}`);
|
||||
// Upload the database bundle.
|
||||
// Although we are uploading arbitrary file contents to the API, it's worth
|
||||
// noting that it's the API's job to validate that the contents is acceptable.
|
||||
// This API method is available to anyone with write access to the repo.
|
||||
const bundledDb = await (0, util_1.bundleDb)(config, language, codeql, language);
|
||||
const bundledDbSize = fs.statSync(bundledDb).size;
|
||||
const bundledDbReadStream = fs.createReadStream(bundledDb);
|
||||
try {
|
||||
await client.request(`POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name`, {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
language,
|
||||
name: `${language}-database`,
|
||||
data: bundledDbReadStream,
|
||||
headers: {
|
||||
authorization: `token ${apiDetails.auth}`,
|
||||
"Content-Type": "application/zip",
|
||||
"Content-Length": bundledDbSize,
|
||||
},
|
||||
});
|
||||
logger.debug(`Successfully uploaded database for ${language}`);
|
||||
}
|
||||
finally {
|
||||
bundledDbReadStream.close();
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
console.log(e);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,GAAE,CAAC;IAC9B,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,8BAA8B;QAC9B,2EAA2E;QAC3E,8EAA8E;QAC9E,wEAAwE;QACxE,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAC7B,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CACnD,CAAC;QACF,IAAI;YACF,MAAM,MAAM,CAAC,OAAO,CAClB,wGAAwG,EACxG;gBACE,KAAK,EAAE,aAAa,CAAC,KAAK;gBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;gBACxB,QAAQ;gBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;gBAC5B,IAAI,EAAE,OAAO;gBACb,OAAO,EAAE;oBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;oBACzC,cAAc,EAAE,iBAAiB;iBAClC;aACF,CACF,CAAC;YACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAxDD,0CAwDC"}
|
||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,GAAE,CAAC;IAC9B,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI;YACF,8BAA8B;YAC9B,2EAA2E;YAC3E,8EAA8E;YAC9E,wEAAwE;YACxE,MAAM,SAAS,GAAG,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;YACrE,MAAM,aAAa,GAAG,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC;YAClD,MAAM,mBAAmB,GAAG,EAAE,CAAC,gBAAgB,CAAC,SAAS,CAAC,CAAC;YAC3D,IAAI;gBACF,MAAM,MAAM,CAAC,OAAO,CAClB,wGAAwG,EACxG;oBACE,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;oBAC5B,IAAI,EAAE,mBAAmB;oBACzB,OAAO,EAAE;wBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;wBACzC,cAAc,EAAE,iBAAiB;wBACjC,gBAAgB,EAAE,aAAa;qBAChC;iBACF,CACF,CAAC;gBACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;aAChE;oBAAS;gBACR,mBAAmB,CAAC,KAAK,EAAE,CAAC;aAC7B;SACF;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AA7DD,0CA6DC"}
|
||||
@@ -1,3 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20221123"
|
||||
"bundleVersion": "codeql-bundle-20230105",
|
||||
"cliVersion": "2.12.0",
|
||||
"priorBundleVersion": "codeql-bundle-20221211",
|
||||
"priorCliVersion": "2.11.6"
|
||||
}
|
||||
|
||||
4
lib/error-matcher.js
generated
4
lib/error-matcher.js
generated
@@ -12,6 +12,10 @@ exports.namedMatchersForTesting = {
|
||||
message: "No code found during the build. Please see:\n" +
|
||||
"https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning#no-code-found-during-the-build",
|
||||
},
|
||||
fatalError: {
|
||||
outputRegex: new RegExp("A fatal error occurred"),
|
||||
message: "A fatal error occurred.",
|
||||
},
|
||||
};
|
||||
// we collapse the matches into an array for use in execErrorCatcher
|
||||
exports.errorMatchers = Object.values(exports.namedMatchersForTesting);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"error-matcher.js","sourceRoot":"","sources":["../src/error-matcher.ts"],"names":[],"mappings":";;;AAQA,qCAAqC;AACxB,QAAA,uBAAuB,GAAoC;IACtE;;MAEE;IACF,iBAAiB,EAAE;QACjB,QAAQ,EAAE,EAAE;QACZ,WAAW,EAAE,IAAI,MAAM,CAAC,2CAA2C,CAAC;QACpE,OAAO,EACL,+CAA+C;YAC/C,yJAAyJ;KAC5J;CACF,CAAC;AAEF,oEAAoE;AACvD,QAAA,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC,+BAAuB,CAAC,CAAC"}
|
||||
{"version":3,"file":"error-matcher.js","sourceRoot":"","sources":["../src/error-matcher.ts"],"names":[],"mappings":";;;AAQA,qCAAqC;AACxB,QAAA,uBAAuB,GAAoC;IACtE;;MAEE;IACF,iBAAiB,EAAE;QACjB,QAAQ,EAAE,EAAE;QACZ,WAAW,EAAE,IAAI,MAAM,CAAC,2CAA2C,CAAC;QACpE,OAAO,EACL,+CAA+C;YAC/C,yJAAyJ;KAC5J;IACD,UAAU,EAAE;QACV,WAAW,EAAE,IAAI,MAAM,CAAC,wBAAwB,CAAC;QACjD,OAAO,EAAE,yBAAyB;KACnC;CACF,CAAC;AAEF,oEAAoE;AACvD,QAAA,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC,+BAAuB,CAAC,CAAC"}
|
||||
3
lib/error-matcher.test.js
generated
3
lib/error-matcher.test.js
generated
@@ -16,6 +16,9 @@ NB We test the regexes for all the matchers against example log output snippets.
|
||||
2020-09-07T17:39:53.9251124Z [2020-09-07 17:39:53] [ERROR] Spawned process exited abnormally (code 255; tried to run: [/opt/hostedtoolcache/CodeQL/0.0.0-20200630/x64/codeql/javascript/tools/autobuild.sh])
|
||||
`));
|
||||
});
|
||||
(0, ava_1.default)("fatalError matches against example log output", async (t) => {
|
||||
t.assert(testErrorMatcher("fatalError", "A fatal error occurred: Could not process query metadata for test-query.ql"));
|
||||
});
|
||||
function testErrorMatcher(matcherName, logSample) {
|
||||
if (!(matcherName in error_matcher_1.namedMatchersForTesting)) {
|
||||
throw new Error(`Unknown matcher ${matcherName}`);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"error-matcher.test.js","sourceRoot":"","sources":["../src/error-matcher.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,mDAA0D;AAE1D;;EAEE;AAEF,IAAA,aAAI,EAAC,6DAA6D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9E,CAAC,CAAC,MAAM,CACN,gBAAgB,CACd,mBAAmB,EACnB;;;;;GAKH,CACE,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,gBAAgB,CAAC,WAAmB,EAAE,SAAiB;IAC9D,IAAI,CAAC,CAAC,WAAW,IAAI,uCAAuB,CAAC,EAAE;QAC7C,MAAM,IAAI,KAAK,CAAC,mBAAmB,WAAW,EAAE,CAAC,CAAC;KACnD;IACD,MAAM,KAAK,GAAG,uCAAuB,CAAC,WAAW,CAAC,CAAC,WAAW,CAAC;IAC/D,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,uBAAuB,WAAW,kBAAkB,CAAC,CAAC;KACvE;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AAC/B,CAAC"}
|
||||
{"version":3,"file":"error-matcher.test.js","sourceRoot":"","sources":["../src/error-matcher.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,mDAA0D;AAE1D;;EAEE;AAEF,IAAA,aAAI,EAAC,6DAA6D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9E,CAAC,CAAC,MAAM,CACN,gBAAgB,CACd,mBAAmB,EACnB;;;;;GAKH,CACE,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,+CAA+C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChE,CAAC,CAAC,MAAM,CACN,gBAAgB,CACd,YAAY,EACZ,4EAA4E,CAC7E,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,gBAAgB,CAAC,WAAmB,EAAE,SAAiB;IAC9D,IAAI,CAAC,CAAC,WAAW,IAAI,uCAAuB,CAAC,EAAE;QAC7C,MAAM,IAAI,KAAK,CAAC,mBAAmB,WAAW,EAAE,CAAC,CAAC;KACnD;IACD,MAAM,KAAK,GAAG,uCAAuB,CAAC,WAAW,CAAC,CAAC,WAAW,CAAC;IAC/D,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,uBAAuB,WAAW,kBAAkB,CAAC,CAAC;KACvE;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AAC/B,CAAC"}
|
||||
12
lib/feature-flags.js
generated
12
lib/feature-flags.js
generated
@@ -30,9 +30,9 @@ var Feature;
|
||||
Feature["BypassToolcacheKotlinSwiftEnabled"] = "bypass_toolcache_kotlin_swift_enabled";
|
||||
Feature["CliConfigFileEnabled"] = "cli_config_file_enabled";
|
||||
Feature["DisableKotlinAnalysisEnabled"] = "disable_kotlin_analysis_enabled";
|
||||
Feature["FileBaselineInformationEnabled"] = "file_baseline_information_enabled";
|
||||
Feature["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
||||
Feature["TrapCachingEnabled"] = "trap_caching_enabled";
|
||||
Feature["UploadFailedSarifEnabled"] = "upload_failed_sarif_enabled";
|
||||
})(Feature = exports.Feature || (exports.Feature = {}));
|
||||
exports.featureConfig = {
|
||||
[Feature.BypassToolcacheEnabled]: {
|
||||
@@ -53,11 +53,7 @@ exports.featureConfig = {
|
||||
},
|
||||
[Feature.CliConfigFileEnabled]: {
|
||||
envVar: "CODEQL_PASS_CONFIG_TO_CLI",
|
||||
minimumVersion: "2.11.1",
|
||||
},
|
||||
[Feature.FileBaselineInformationEnabled]: {
|
||||
envVar: "CODEQL_FILE_BASELINE_INFORMATION",
|
||||
minimumVersion: "2.11.3",
|
||||
minimumVersion: "2.11.6",
|
||||
},
|
||||
[Feature.MlPoweredQueriesEnabled]: {
|
||||
envVar: "CODEQL_ML_POWERED_QUERIES",
|
||||
@@ -67,6 +63,10 @@ exports.featureConfig = {
|
||||
envVar: "CODEQL_TRAP_CACHING",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.UploadFailedSarifEnabled]: {
|
||||
envVar: "CODEQL_ACTION_UPLOAD_FAILED_SARIF",
|
||||
minimumVersion: "2.11.3",
|
||||
},
|
||||
};
|
||||
exports.FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json";
|
||||
/**
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,6CAA4C;AAI5C,6CAA+B;AAM/B,IAAY,OAQX;AARD,WAAY,OAAO;IACjB,8DAAmD,CAAA;IACnD,sFAA2E,CAAA;IAC3E,2DAAgD,CAAA;IAChD,2EAAgE,CAAA;IAChE,+EAAoE,CAAA;IACpE,iEAAsD,CAAA;IACtD,sDAA2C,CAAA;AAC7C,CAAC,EARW,OAAO,GAAP,eAAO,KAAP,eAAO,QAQlB;AAEY,QAAA,aAAa,GAGtB;IACF,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;QAChC,MAAM,EAAE,yBAAyB;QACjC,+EAA+E;QAC/E,iCAAiC;QACjC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,iCAAiC,CAAC,EAAE;QAC3C,MAAM,EAAE,sCAAsC;QAC9C,+EAA+E;QAC/E,iCAAiC;QACjC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,4BAA4B,CAAC,EAAE;QACtC,MAAM,EAAE,gCAAgC;QACxC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,oBAAoB,CAAC,EAAE;QAC9B,MAAM,EAAE,2BAA2B;QACnC,cAAc,EAAE,QAAQ;KACzB;IACD,CAAC,OAAO,CAAC,8BAA8B,CAAC,EAAE;QACxC,MAAM,EAAE,kCAAkC;QAC1C,cAAc,EAAE,QAAQ;KACzB;IACD,CAAC,OAAO,CAAC,uBAAuB,CAAC,EAAE;QACjC,MAAM,EAAE,2BAA2B;QACnC,cAAc,EAAE,OAAO;KACxB;IACD,CAAC,OAAO,CAAC,kBAAkB,CAAC,EAAE;QAC5B,MAAM,EAAE,qBAAqB;QAC7B,cAAc,EAAE,SAAS;KAC1B;CACF,CAAC;AAUW,QAAA,uBAAuB,GAAG,2BAA2B,CAAC;AAEnE;;;;GAIG;AACH,MAAa,QAAQ;IAGnB,YACE,aAAiC,EACjC,aAA4B,EAC5B,OAAe,EACf,MAAc;QAEd,IAAI,CAAC,kBAAkB,GAAG,IAAI,kBAAkB,CAC9C,aAAa,EACb,aAAa,EACb,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,+BAAuB,CAAC,EAC3C,MAAM,CACP,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;OAWG;IACH,KAAK,CAAC,QAAQ,CAAC,OAAgB,EAAE,MAAe;QAC9C,IAAI,CAAC,MAAM,IAAI,qBAAa,CAAC,OAAO,CAAC,CAAC,cAAc,EAAE;YACpD,MAAM,IAAI,KAAK,CACb,8DAA8D,OAAO,2CAA2C,CACjH,CAAC;SACH;QAED,oDAAoD;QACpD,IAAI,OAAO,KAAK,OAAO,CAAC,sBAAsB,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE;YACrE,OAAO,KAAK,CAAC;SACd;QAED,MAAM,MAAM,GAAG,CACb,OAAO,CAAC,GAAG,CAAC,qBAAa,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,IAAI,EAAE,CACjD,CAAC,iBAAiB,EAAE,CAAC;QAEtB,sFAAsF;QACtF,IAAI,MAAM,KAAK,OAAO,EAAE;YACtB,OAAO,KAAK,CAAC;SACd;QAED,yEAAyE;QACzE,MAAM,cAAc,GAAG,qBAAa,CAAC,OAAO,CAAC,CAAC,cAAc,CAAC;QAC7D,IAAI,MAAM,IAAI,cAAc,EAAE;YAC5B,IAAI,CAAC,CAAC,MAAM,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC,EAAE;gBAC5D,OAAO,KAAK,CAAC;aACd;SACF;QAED,8EAA8E;QAC9E,IAAI,MAAM,KAAK,MAAM,EAAE;YACrB,OAAO,IAAI,CAAC;SACb;QACD,gDAAgD;QAChD,OAAO,MAAM,IAAI,CAAC,kBAAkB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;IACzD,CAAC;CACF;AAjED,4BAiEC;AAED,MAAM,kBAAkB;IAGtB,YACmB,aAAiC,EACjC,aAA4B,EAC5B,gBAAwB,EACxB,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,kBAAa,GAAb,aAAa,CAAe;QAC5B,qBAAgB,GAAhB,gBAAgB,CAAQ;QACxB,WAAM,GAAN,MAAM,CAAQ;QAE/B,IAAI;IACN,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,OAAgB;QAC7B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC7C,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,qCAAqC,OAAO,4BAA4B,CACzE,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,MAAM,iBAAiB,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;QAC5C,IAAI,iBAAiB,KAAK,SAAS,EAAE;YACnC,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,YAAY,OAAO,uDAAuD,CAC3E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,CAAC,CAAC,iBAAiB,CAAC;IAC7B,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,0CAA0C;QAC1C,IAAI,IAAI,CAAC,iBAAiB,KAAK,SAAS,EAAE;YACxC,OAAO,IAAI,CAAC,iBAAiB,CAAC;SAC/B;QAED,wEAAwE;QACxE,MAAM,SAAS,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC9C,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,IAAI,CAAC,iBAAiB,GAAG,SAAS,CAAC;YACnC,OAAO,SAAS,CAAC;SAClB;QAED,wCAAwC;QACxC,IAAI,WAAW,GAAG,MAAM,IAAI,CAAC,eAAe,EAAE,CAAC;QAC/C,IAAI,WAAW,KAAK,SAAS,EAAE;YAC7B,WAAW,GAAG,EAAE,CAAC;SAClB;QAED,+BAA+B;QAC/B,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QAErC,+DAA+D;QAC/D,MAAM,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAExC,OAAO,WAAW,CAAC;IACrB,CAAC;IAEO,KAAK,CAAC,cAAc;QAG1B,IAAI;YACF,IAAI,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,gBAAgB,CAAC,EAAE;gBACxC,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8BAA8B,IAAI,CAAC,gBAAgB,EAAE,CACtD,CAAC;gBACF,OAAO,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC,CAAC;aACnE;SACF;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,2CAA2C,IAAI,CAAC,gBAAgB,KAAK,CAAC,mCAAmC,CAC1G,CAAC;SACH;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAEO,KAAK,CAAC,eAAe,CAC3B,KAAoC;QAEpC,IAAI;YACF,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,4BAA4B,IAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC;YACvE,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,2CAA2C,IAAI,CAAC,gBAAgB,KAAK,CAAC,GAAG,CAC1E,CAAC;SACH;IACH,CAAC;IAEO,KAAK,CAAC,eAAe;QAC3B,iDAAiD;QACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,oEAAoE,CACrE,CAAC;YACF,OAAO,EAAE,CAAC;SACX;QACD,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAA,yBAAY,GAAE,CAAC,OAAO,CAC3C,8DAA8D,EAC9D;gBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;gBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;aAC9B,CACF,CAAC;YACF,OAAO,QAAQ,CAAC,IAAI,CAAC;SACtB;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;gBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;oBAC9F,oEAAoE;oBACpE,qFAAqF;oBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;aACH;iBAAM;gBACL,kFAAkF;gBAClF,8EAA8E;gBAC9E,2FAA2F;gBAC3F,eAAe;gBACf,MAAM,IAAI,KAAK,CACb,sEAAsE,CAAC,EAAE,CAC1E,CAAC;aACH;SACF;IACH,CAAC;CACF"}
|
||||
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,6CAA4C;AAI5C,6CAA+B;AAM/B,IAAY,OAQX;AARD,WAAY,OAAO;IACjB,8DAAmD,CAAA;IACnD,sFAA2E,CAAA;IAC3E,2DAAgD,CAAA;IAChD,2EAAgE,CAAA;IAChE,iEAAsD,CAAA;IACtD,sDAA2C,CAAA;IAC3C,mEAAwD,CAAA;AAC1D,CAAC,EARW,OAAO,GAAP,eAAO,KAAP,eAAO,QAQlB;AAEY,QAAA,aAAa,GAGtB;IACF,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;QAChC,MAAM,EAAE,yBAAyB;QACjC,+EAA+E;QAC/E,iCAAiC;QACjC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,iCAAiC,CAAC,EAAE;QAC3C,MAAM,EAAE,sCAAsC;QAC9C,+EAA+E;QAC/E,iCAAiC;QACjC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,4BAA4B,CAAC,EAAE;QACtC,MAAM,EAAE,gCAAgC;QACxC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,oBAAoB,CAAC,EAAE;QAC9B,MAAM,EAAE,2BAA2B;QACnC,cAAc,EAAE,QAAQ;KACzB;IACD,CAAC,OAAO,CAAC,uBAAuB,CAAC,EAAE;QACjC,MAAM,EAAE,2BAA2B;QACnC,cAAc,EAAE,OAAO;KACxB;IACD,CAAC,OAAO,CAAC,kBAAkB,CAAC,EAAE;QAC5B,MAAM,EAAE,qBAAqB;QAC7B,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,wBAAwB,CAAC,EAAE;QAClC,MAAM,EAAE,mCAAmC;QAC3C,cAAc,EAAE,QAAQ;KACzB;CACF,CAAC;AAUW,QAAA,uBAAuB,GAAG,2BAA2B,CAAC;AAEnE;;;;GAIG;AACH,MAAa,QAAQ;IAGnB,YACE,aAAiC,EACjC,aAA4B,EAC5B,OAAe,EACf,MAAc;QAEd,IAAI,CAAC,kBAAkB,GAAG,IAAI,kBAAkB,CAC9C,aAAa,EACb,aAAa,EACb,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,+BAAuB,CAAC,EAC3C,MAAM,CACP,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;OAWG;IACH,KAAK,CAAC,QAAQ,CAAC,OAAgB,EAAE,MAAe;QAC9C,IAAI,CAAC,MAAM,IAAI,qBAAa,CAAC,OAAO,CAAC,CAAC,cAAc,EAAE;YACpD,MAAM,IAAI,KAAK,CACb,8DAA8D,OAAO,2CAA2C,CACjH,CAAC;SACH;QAED,oDAAoD;QACpD,IAAI,OAAO,KAAK,OAAO,CAAC,sBAAsB,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE;YACrE,OAAO,KAAK,CAAC;SACd;QAED,MAAM,MAAM,GAAG,CACb,OAAO,CAAC,GAAG,CAAC,qBAAa,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,IAAI,EAAE,CACjD,CAAC,iBAAiB,EAAE,CAAC;QAEtB,sFAAsF;QACtF,IAAI,MAAM,KAAK,OAAO,EAAE;YACtB,OAAO,KAAK,CAAC;SACd;QAED,yEAAyE;QACzE,MAAM,cAAc,GAAG,qBAAa,CAAC,OAAO,CAAC,CAAC,cAAc,CAAC;QAC7D,IAAI,MAAM,IAAI,cAAc,EAAE;YAC5B,IAAI,CAAC,CAAC,MAAM,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC,EAAE;gBAC5D,OAAO,KAAK,CAAC;aACd;SACF;QAED,8EAA8E;QAC9E,IAAI,MAAM,KAAK,MAAM,EAAE;YACrB,OAAO,IAAI,CAAC;SACb;QACD,gDAAgD;QAChD,OAAO,MAAM,IAAI,CAAC,kBAAkB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;IACzD,CAAC;CACF;AAjED,4BAiEC;AAED,MAAM,kBAAkB;IAGtB,YACmB,aAAiC,EACjC,aAA4B,EAC5B,gBAAwB,EACxB,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,kBAAa,GAAb,aAAa,CAAe;QAC5B,qBAAgB,GAAhB,gBAAgB,CAAQ;QACxB,WAAM,GAAN,MAAM,CAAQ;QAE/B,IAAI;IACN,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,OAAgB;QAC7B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC7C,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,qCAAqC,OAAO,4BAA4B,CACzE,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,MAAM,iBAAiB,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;QAC5C,IAAI,iBAAiB,KAAK,SAAS,EAAE;YACnC,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,YAAY,OAAO,uDAAuD,CAC3E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,CAAC,CAAC,iBAAiB,CAAC;IAC7B,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,0CAA0C;QAC1C,IAAI,IAAI,CAAC,iBAAiB,KAAK,SAAS,EAAE;YACxC,OAAO,IAAI,CAAC,iBAAiB,CAAC;SAC/B;QAED,wEAAwE;QACxE,MAAM,SAAS,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC9C,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,IAAI,CAAC,iBAAiB,GAAG,SAAS,CAAC;YACnC,OAAO,SAAS,CAAC;SAClB;QAED,wCAAwC;QACxC,IAAI,WAAW,GAAG,MAAM,IAAI,CAAC,eAAe,EAAE,CAAC;QAC/C,IAAI,WAAW,KAAK,SAAS,EAAE;YAC7B,WAAW,GAAG,EAAE,CAAC;SAClB;QAED,+BAA+B;QAC/B,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QAErC,+DAA+D;QAC/D,MAAM,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAExC,OAAO,WAAW,CAAC;IACrB,CAAC;IAEO,KAAK,CAAC,cAAc;QAG1B,IAAI;YACF,IAAI,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,gBAAgB,CAAC,EAAE;gBACxC,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8BAA8B,IAAI,CAAC,gBAAgB,EAAE,CACtD,CAAC;gBACF,OAAO,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC,CAAC;aACnE;SACF;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,2CAA2C,IAAI,CAAC,gBAAgB,KAAK,CAAC,mCAAmC,CAC1G,CAAC;SACH;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAEO,KAAK,CAAC,eAAe,CAC3B,KAAoC;QAEpC,IAAI;YACF,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,4BAA4B,IAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC;YACvE,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,2CAA2C,IAAI,CAAC,gBAAgB,KAAK,CAAC,GAAG,CAC1E,CAAC;SACH;IACH,CAAC;IAEO,KAAK,CAAC,eAAe;QAC3B,iDAAiD;QACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,oEAAoE,CACrE,CAAC;YACF,OAAO,EAAE,CAAC;SACX;QACD,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAA,yBAAY,GAAE,CAAC,OAAO,CAC3C,8DAA8D,EAC9D;gBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;gBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;aAC9B,CACF,CAAC;YACF,OAAO,QAAQ,CAAC,IAAI,CAAC;SACtB;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;gBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;oBAC9F,oEAAoE;oBACpE,qFAAqF;oBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;aACH;iBAAM;gBACL,kFAAkF;gBAClF,8EAA8E;gBAC9E,2FAA2F;gBAC3F,eAAe;gBACf,MAAM,IAAI,KAAK,CACb,sEAAsE,CAAC,EAAE,CAC1E,CAAC;aACH;SACF;IACH,CAAC;CACF"}
|
||||
80
lib/init-action-post-helper.js
generated
80
lib/init-action-post-helper.js
generated
@@ -19,24 +19,94 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.run = void 0;
|
||||
exports.run = exports.tryUploadSarifIfRunFailed = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const logging_1 = require("./logging");
|
||||
async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, printDebugLogs) {
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const shared_environment_1 = require("./shared-environment");
|
||||
const uploadLib = __importStar(require("./upload-lib"));
|
||||
const util_1 = require("./util");
|
||||
const workflow_1 = require("./workflow");
|
||||
function createFailedUploadFailedSarifResult(error) {
|
||||
return {
|
||||
upload_failed_run_error: error instanceof Error ? error.message : String(error),
|
||||
upload_failed_run_stack_trace: error instanceof Error ? error.stack : undefined,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Upload a failed SARIF file if we can verify that SARIF upload is enabled and determine the SARIF
|
||||
* category for the workflow.
|
||||
*/
|
||||
async function maybeUploadFailedSarif(config, repositoryNwo, featureEnablement, logger) {
|
||||
var _a;
|
||||
if (!config.codeQLCmd) {
|
||||
return { upload_failed_run_skipped_because: "CodeQL command not found" };
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
if (!(await featureEnablement.getValue(feature_flags_1.Feature.UploadFailedSarifEnabled, codeql))) {
|
||||
return { upload_failed_run_skipped_because: "Feature disabled" };
|
||||
}
|
||||
const workflow = await (0, workflow_1.getWorkflow)();
|
||||
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
||||
const matrix = (0, util_1.parseMatrixInput)(actionsUtil.getRequiredInput("matrix"));
|
||||
if ((0, workflow_1.getUploadInputOrThrow)(workflow, jobName, matrix) !== "true" ||
|
||||
(0, util_1.isInTestMode)()) {
|
||||
return { upload_failed_run_skipped_because: "SARIF upload is disabled" };
|
||||
}
|
||||
const category = (0, workflow_1.getCategoryInputOrThrow)(workflow, jobName, matrix);
|
||||
const checkoutPath = (0, workflow_1.getCheckoutPathInputOrThrow)(workflow, jobName, matrix);
|
||||
const sarifFile = "../codeql-failed-run.sarif";
|
||||
await codeql.diagnosticsExport(sarifFile, category);
|
||||
core.info(`Uploading failed SARIF file ${sarifFile}`);
|
||||
const uploadResult = await uploadLib.uploadFromActions(sarifFile, checkoutPath, category, logger);
|
||||
await uploadLib.waitForProcessing(repositoryNwo, uploadResult.sarifID, logger, { isUnsuccessfulExecution: true });
|
||||
return (_a = uploadResult === null || uploadResult === void 0 ? void 0 : uploadResult.statusReport) !== null && _a !== void 0 ? _a : {};
|
||||
}
|
||||
async function tryUploadSarifIfRunFailed(config, repositoryNwo, featureEnablement, logger) {
|
||||
if (process.env[shared_environment_1.CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY] !== "true") {
|
||||
try {
|
||||
return await maybeUploadFailedSarif(config, repositoryNwo, featureEnablement, logger);
|
||||
}
|
||||
catch (e) {
|
||||
logger.debug(`Failed to upload a SARIF file for this failed CodeQL code scanning run. ${e}`);
|
||||
return createFailedUploadFailedSarifResult(e);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return {
|
||||
upload_failed_run_skipped_because: "Analyze Action completed successfully",
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.tryUploadSarifIfRunFailed = tryUploadSarifIfRunFailed;
|
||||
async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, printDebugLogs, repositoryNwo, featureEnablement, logger) {
|
||||
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||
if (config === undefined) {
|
||||
logger.warning("Debugging artifacts are unavailable since the 'init' Action failed before it could produce any.");
|
||||
return;
|
||||
}
|
||||
const uploadFailedSarifResult = await tryUploadSarifIfRunFailed(config, repositoryNwo, featureEnablement, logger);
|
||||
if (uploadFailedSarifResult.upload_failed_run_skipped_because) {
|
||||
logger.debug("Won't upload a failed SARIF file for this CodeQL code scanning run because: " +
|
||||
`${uploadFailedSarifResult.upload_failed_run_skipped_because}.`);
|
||||
}
|
||||
// Throw an error if in integration tests, we expected to upload a SARIF file for a failed run
|
||||
// but we didn't upload anything.
|
||||
if (process.env["CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF"] === "true" &&
|
||||
!uploadFailedSarifResult.raw_upload_size_bytes) {
|
||||
throw new Error("Expected to upload a failed SARIF file for this CodeQL code scanning run, " +
|
||||
`but the result was instead ${uploadFailedSarifResult}.`);
|
||||
}
|
||||
// Upload appropriate Actions artifacts for debugging
|
||||
if (config === null || config === void 0 ? void 0 : config.debugMode) {
|
||||
if (config.debugMode) {
|
||||
core.info("Debug mode is on. Uploading available database bundles and logs as Actions debugging artifacts...");
|
||||
await uploadDatabaseBundleDebugArtifact(config, logger);
|
||||
await uploadLogsDebugArtifact(config);
|
||||
await printDebugLogs(config);
|
||||
}
|
||||
return uploadFailedSarifResult;
|
||||
}
|
||||
exports.run = run;
|
||||
//# sourceMappingURL=init-action-post-helper.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAA2C;AAC3C,uCAA6C;AAEtC,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB;IAExB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,SAAS,EAAE;QACrB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;AACH,CAAC;AAxBD,kBAwBC"}
|
||||
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,qCAAqC;AACrC,iDAAmD;AACnD,mDAA6D;AAG7D,6DAAuF;AACvF,wDAA0C;AAC1C,iCAA6E;AAC7E,yCAKoB;AAWpB,SAAS,mCAAmC,CAC1C,KAAc;IAEd,OAAO;QACL,uBAAuB,EACrB,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;QACxD,6BAA6B,EAC3B,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS;KACnD,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,KAAK,UAAU,sBAAsB,CACnC,MAAc,EACd,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;;IAEd,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;QACrB,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IACE,CAAC,CAAC,MAAM,iBAAiB,CAAC,QAAQ,CAChC,uBAAO,CAAC,wBAAwB,EAChC,MAAM,CACP,CAAC,EACF;QACA,OAAO,EAAE,iCAAiC,EAAE,kBAAkB,EAAE,CAAC;KAClE;IACD,MAAM,QAAQ,GAAG,MAAM,IAAA,sBAAW,GAAE,CAAC;IACrC,MAAM,OAAO,GAAG,IAAA,0BAAmB,EAAC,YAAY,CAAC,CAAC;IAClD,MAAM,MAAM,GAAG,IAAA,uBAAgB,EAAC,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC,CAAC;IACxE,IACE,IAAA,gCAAqB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,KAAK,MAAM;QAC3D,IAAA,mBAAY,GAAE,EACd;QACA,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,QAAQ,GAAG,IAAA,kCAAuB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACpE,MAAM,YAAY,GAAG,IAAA,sCAA2B,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IAE5E,MAAM,SAAS,GAAG,4BAA4B,CAAC;IAC/C,MAAM,MAAM,CAAC,iBAAiB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;IAEpD,IAAI,CAAC,IAAI,CAAC,+BAA+B,SAAS,EAAE,CAAC,CAAC;IACtD,MAAM,YAAY,GAAG,MAAM,SAAS,CAAC,iBAAiB,CACpD,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,MAAM,CACP,CAAC;IACF,MAAM,SAAS,CAAC,iBAAiB,CAC/B,aAAa,EACb,YAAY,CAAC,OAAO,EACpB,MAAM,EACN,EAAE,uBAAuB,EAAE,IAAI,EAAE,CAClC,CAAC;IACF,OAAO,MAAA,YAAY,aAAZ,YAAY,uBAAZ,YAAY,CAAE,YAAY,mCAAI,EAAE,CAAC;AAC1C,CAAC;AAEM,KAAK,UAAU,yBAAyB,CAC7C,MAAc,EACd,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;IAEd,IAAI,OAAO,CAAC,GAAG,CAAC,oEAA+C,CAAC,KAAK,MAAM,EAAE;QAC3E,IAAI;YACF,OAAO,MAAM,sBAAsB,CACjC,MAAM,EACN,aAAa,EACb,iBAAiB,EACjB,MAAM,CACP,CAAC;SACH;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,KAAK,CACV,2EAA2E,CAAC,EAAE,CAC/E,CAAC;YACF,OAAO,mCAAmC,CAAC,CAAC,CAAC,CAAC;SAC/C;KACF;SAAM;QACL,OAAO;YACL,iCAAiC,EAC/B,uCAAuC;SAC1C,CAAC;KACH;AACH,CAAC;AA1BD,8DA0BC;AAEM,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB,EACxB,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;QACF,OAAO;KACR;IAED,MAAM,uBAAuB,GAAG,MAAM,yBAAyB,CAC7D,MAAM,EACN,aAAa,EACb,iBAAiB,EACjB,MAAM,CACP,CAAC;IACF,IAAI,uBAAuB,CAAC,iCAAiC,EAAE;QAC7D,MAAM,CAAC,KAAK,CACV,8EAA8E;YAC5E,GAAG,uBAAuB,CAAC,iCAAiC,GAAG,CAClE,CAAC;KACH;IACD,8FAA8F;IAC9F,iCAAiC;IACjC,IACE,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC,KAAK,MAAM;QAClE,CAAC,uBAAuB,CAAC,qBAAqB,EAC9C;QACA,MAAM,IAAI,KAAK,CACb,4EAA4E;YAC1E,8BAA8B,uBAAuB,GAAG,CAC3D,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,CAAC,SAAS,EAAE;QACpB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;IAED,OAAO,uBAAuB,CAAC;AACjC,CAAC;AApDD,kBAoDC"}
|
||||
196
lib/init-action-post-helper.test.js
generated
196
lib/init-action-post-helper.test.js
generated
@@ -24,13 +24,21 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const codeql = __importStar(require("./codeql"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const initActionPostHelper = __importStar(require("./init-action-post-helper"));
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const uploadLib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
const workflow = __importStar(require("./workflow"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("post: init action with debug mode off", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
@@ -44,7 +52,7 @@ const util = __importStar(require("./util"));
|
||||
const uploadDatabaseBundleSpy = sinon.spy();
|
||||
const uploadLogsSpy = sinon.spy();
|
||||
const printDebugLogsSpy = sinon.spy();
|
||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy);
|
||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy, (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.assert(uploadDatabaseBundleSpy.notCalled);
|
||||
t.assert(uploadLogsSpy.notCalled);
|
||||
t.assert(printDebugLogsSpy.notCalled);
|
||||
@@ -52,6 +60,7 @@ const util = __importStar(require("./util"));
|
||||
});
|
||||
(0, ava_1.default)("post: init action with debug mode on", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
@@ -65,10 +74,193 @@ const util = __importStar(require("./util"));
|
||||
const uploadDatabaseBundleSpy = sinon.spy();
|
||||
const uploadLogsSpy = sinon.spy();
|
||||
const printDebugLogsSpy = sinon.spy();
|
||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy);
|
||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy, (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.assert(uploadDatabaseBundleSpy.called);
|
||||
t.assert(uploadLogsSpy.called);
|
||||
t.assert(printDebugLogsSpy.called);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("uploads failed SARIF run for typical workflow", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v2",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v2",
|
||||
with: {
|
||||
category: "my-category",
|
||||
},
|
||||
},
|
||||
]);
|
||||
await testFailedSarifUpload(t, actionsWorkflow, { category: "my-category" });
|
||||
});
|
||||
(0, ava_1.default)("doesn't upload failed SARIF for workflow with upload: false", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v2",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v2",
|
||||
with: {
|
||||
category: "my-category",
|
||||
upload: false,
|
||||
},
|
||||
},
|
||||
]);
|
||||
const result = await testFailedSarifUpload(t, actionsWorkflow, {
|
||||
expectUpload: false,
|
||||
});
|
||||
t.is(result.upload_failed_run_skipped_because, "SARIF upload is disabled");
|
||||
});
|
||||
(0, ava_1.default)("uploading failed SARIF run succeeds when workflow uses an input with a matrix var", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v2",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v2",
|
||||
with: {
|
||||
category: "/language:${{ matrix.language }}",
|
||||
},
|
||||
},
|
||||
]);
|
||||
await testFailedSarifUpload(t, actionsWorkflow, {
|
||||
category: "/language:csharp",
|
||||
matrix: { language: "csharp" },
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("uploading failed SARIF run fails when workflow uses a complex upload input", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v2",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v2",
|
||||
with: {
|
||||
upload: "${{ matrix.language != 'csharp' }}",
|
||||
},
|
||||
},
|
||||
]);
|
||||
const result = await testFailedSarifUpload(t, actionsWorkflow, {
|
||||
expectUpload: false,
|
||||
});
|
||||
t.is(result.upload_failed_run_error, "Could not get upload input to github/codeql-action/analyze since it contained an " +
|
||||
"unrecognized dynamic value.");
|
||||
});
|
||||
(0, ava_1.default)("uploading failed SARIF run fails when workflow does not reference github/codeql-action", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
]);
|
||||
const result = await testFailedSarifUpload(t, actionsWorkflow, {
|
||||
expectUpload: false,
|
||||
});
|
||||
t.is(result.upload_failed_run_error, "Could not get upload input to github/codeql-action/analyze since the analyze job does not " +
|
||||
"call github/codeql-action/analyze.");
|
||||
t.truthy(result.upload_failed_run_stack_trace);
|
||||
});
|
||||
function createTestWorkflow(steps) {
|
||||
return {
|
||||
name: "CodeQL",
|
||||
on: {
|
||||
push: {
|
||||
branches: ["main"],
|
||||
},
|
||||
pull_request: {
|
||||
branches: ["main"],
|
||||
},
|
||||
},
|
||||
jobs: {
|
||||
analyze: {
|
||||
name: "CodeQL Analysis",
|
||||
"runs-on": "ubuntu-latest",
|
||||
steps,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
async function testFailedSarifUpload(t, actionsWorkflow, { category, expectUpload = true, matrix = {}, } = {}) {
|
||||
const config = {
|
||||
codeQLCmd: "codeql",
|
||||
debugMode: true,
|
||||
languages: [],
|
||||
packs: [],
|
||||
};
|
||||
process.env["GITHUB_JOB"] = "analyze";
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["GITHUB_WORKSPACE"] =
|
||||
"/home/runner/work/codeql-action/codeql-action";
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("matrix")
|
||||
.returns(JSON.stringify(matrix));
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeql, "getCodeQL").resolves(codeqlObject);
|
||||
const diagnosticsExportStub = sinon.stub(codeqlObject, "diagnosticsExport");
|
||||
sinon.stub(workflow, "getWorkflow").resolves(actionsWorkflow);
|
||||
const uploadFromActions = sinon.stub(uploadLib, "uploadFromActions");
|
||||
uploadFromActions.resolves({
|
||||
sarifID: "42",
|
||||
statusReport: { raw_upload_size_bytes: 20, zipped_upload_size_bytes: 10 },
|
||||
});
|
||||
const waitForProcessing = sinon.stub(uploadLib, "waitForProcessing");
|
||||
const result = await initActionPostHelper.tryUploadSarifIfRunFailed(config, (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.UploadFailedSarifEnabled]), (0, logging_1.getRunnerLogger)(true));
|
||||
if (expectUpload) {
|
||||
t.deepEqual(result, {
|
||||
raw_upload_size_bytes: 20,
|
||||
zipped_upload_size_bytes: 10,
|
||||
});
|
||||
}
|
||||
if (expectUpload) {
|
||||
t.true(diagnosticsExportStub.calledOnceWith(sinon.match.string, category), `Actual args were: ${diagnosticsExportStub.args}`);
|
||||
t.true(uploadFromActions.calledOnceWith(sinon.match.string, sinon.match.string, category, sinon.match.any), `Actual args were: ${uploadFromActions.args}`);
|
||||
t.true(waitForProcessing.calledOnceWith(sinon.match.any, "42", sinon.match.any, {
|
||||
isUnsuccessfulExecution: true,
|
||||
}));
|
||||
}
|
||||
else {
|
||||
t.true(diagnosticsExportStub.notCalled);
|
||||
t.true(uploadFromActions.notCalled);
|
||||
t.true(waitForProcessing.notCalled);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
//# sourceMappingURL=init-action-post-helper.test.js.map
|
||||
File diff suppressed because one or more lines are too long
30
lib/init-action-post.js
generated
30
lib/init-action-post.js
generated
@@ -25,17 +25,37 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api_client_1 = require("./api-client");
|
||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const initActionPostHelper = __importStar(require("./init-action-post-helper"));
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const util_1 = require("./util");
|
||||
async function runWrapper() {
|
||||
const startedAt = new Date();
|
||||
let uploadFailedSarifResult;
|
||||
try {
|
||||
await initActionPostHelper.run(debugArtifacts.uploadDatabaseBundleDebugArtifact, debugArtifacts.uploadLogsDebugArtifact, actionsUtil.printDebugLogs);
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||
uploadFailedSarifResult = await initActionPostHelper.run(debugArtifacts.uploadDatabaseBundleDebugArtifact, debugArtifacts.uploadLogsDebugArtifact, actions_util_1.printDebugLogs, repositoryNwo, features, logger);
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`init post-action step failed: ${error}`);
|
||||
console.log(error);
|
||||
catch (e) {
|
||||
core.setFailed(e instanceof Error ? e.message : String(e));
|
||||
console.log(e);
|
||||
await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init-post", (0, actions_util_1.getActionsStatus)(e), startedAt, String(e), e instanceof Error ? e.stack : undefined));
|
||||
return;
|
||||
}
|
||||
const statusReportBase = await (0, actions_util_1.createStatusReportBase)("init-post", "success", startedAt);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
...uploadFailedSarifResult,
|
||||
};
|
||||
await (0, actions_util_1.sendStatusReport)(statusReport);
|
||||
}
|
||||
void runWrapper();
|
||||
//# sourceMappingURL=init-action-post.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,4DAA8C;AAC9C,kEAAoD;AACpD,gFAAkE;AAElE,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,oBAAoB,CAAC,GAAG,CAC5B,cAAc,CAAC,iCAAiC,EAChD,cAAc,CAAC,uBAAuB,EACtC,WAAW,CAAC,cAAc,CAC3B,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,iCAAiC,KAAK,EAAE,CAAC,CAAC;QACzD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,iDAOwB;AACxB,6CAAgD;AAChD,kEAAoD;AACpD,mDAA2C;AAC3C,gFAAkE;AAClE,uCAA6C;AAC7C,6CAAkD;AAClD,iCAAwE;AAMxE,KAAK,UAAU,UAAU;IACvB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,uBAES,CAAC;IACd,IAAI;QACF,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QAEF,uBAAuB,GAAG,MAAM,oBAAoB,CAAC,GAAG,CACtD,cAAc,CAAC,iCAAiC,EAChD,cAAc,CAAC,uBAAuB,EACtC,6BAAc,EACd,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;QAE3D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,IAAA,+BAAgB,EACpB,MAAM,IAAA,qCAAsB,EAC1B,WAAW,EACX,IAAA,+BAAgB,EAAC,CAAC,CAAC,EACnB,SAAS,EACT,MAAM,CAAC,CAAC,CAAC,EACT,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CACzC,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAAyB;QACzC,GAAG,gBAAgB;QACnB,GAAG,uBAAuB;KAC3B,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
3
lib/init-action.js
generated
3
lib/init-action.js
generated
@@ -31,6 +31,7 @@ const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
const util_1 = require("./util");
|
||||
const workflow_1 = require("./workflow");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
async function sendSuccessStatusReport(startedAt, config, toolsVersion, logger) {
|
||||
@@ -90,7 +91,7 @@ async function run() {
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||
try {
|
||||
const workflowErrors = await (0, actions_util_1.validateWorkflow)();
|
||||
const workflowErrors = await (0, workflow_1.validateWorkflow)();
|
||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
|
||||
return;
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
4
lib/init.js
generated
4
lib/init.js
generated
@@ -30,9 +30,9 @@ const configUtils = __importStar(require("./config-utils"));
|
||||
const tracer_config_1 = require("./tracer-config");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
async function initCodeQL(codeqlURL, apiDetails, tempDir, variant, bypassToolcache, logger) {
|
||||
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, bypassToolcache, logger) {
|
||||
logger.startGroup("Setup CodeQL tools");
|
||||
const { codeql, toolsVersion } = await (0, codeql_1.setupCodeQL)(codeqlURL, apiDetails, tempDir, variant, bypassToolcache, logger, true);
|
||||
const { codeql, toolsVersion } = await (0, codeql_1.setupCodeQL)(toolsInput, apiDetails, tempDir, variant, bypassToolcache, logger, true);
|
||||
await codeql.printVersion();
|
||||
logger.endGroup();
|
||||
return { codeql, toolsVersion };
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,eAAwB,EACxB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,OAAO,EACP,eAAe,EACf,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,eAAmC,EACnC,UAA8B,EAC9B,UAA8B,EAC9B,kBAA2B,EAC3B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,iBAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,eAAe,EACf,UAAU,EACV,UAAU,EACV,kBAAkB,EAClB,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,iBAAiB,EACjB,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA5CD,gCA4CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,iBAAoC,EACpC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI;QACF,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;YAChE,0BAA0B;YAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,iBAAiB,EACjB,MAAM,CACP,CAAC;SACH;aAAM;YACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,yBAAyB;gBACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,YAAY,CAAC,CAAC,CAAC,CAAC;KACvB;IACD,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAlCD,0BAkCC;AAED;;;;;;;;GAQG;AACH,SAAS,YAAY,CAAC,CAAM;;IAC1B,IAAI,CAAC,CAAC,CAAC,YAAY,KAAK,CAAC,EAAE;QACzB,OAAO,CAAC,CAAC;KACV;IAED;IACE,2BAA2B;IAC3B,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,8BAA8B,CAAC;SACnD,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,uCAAuC,CAAC,CAAA,EAC5D;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CACvB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;KACH;IAED;IACE,+EAA+E;IAC/E,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,wCAAwC,CAAC;;QAC7D,gEAAgE;QAChE,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,qBAAqB,CAAC,CAAA,EAC1C;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;KACtC;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE;gBACpE,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAzCD,8CAyCC"}
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,UAA8B,EAC9B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,eAAwB,EACxB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,UAAU,EACV,UAAU,EACV,OAAO,EACP,OAAO,EACP,eAAe,EACf,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,eAAmC,EACnC,UAA8B,EAC9B,UAA8B,EAC9B,kBAA2B,EAC3B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,iBAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,eAAe,EACf,UAAU,EACV,UAAU,EACV,kBAAkB,EAClB,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,iBAAiB,EACjB,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA5CD,gCA4CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,iBAAoC,EACpC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI;QACF,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;YAChE,0BAA0B;YAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,iBAAiB,EACjB,MAAM,CACP,CAAC;SACH;aAAM;YACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,yBAAyB;gBACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,YAAY,CAAC,CAAC,CAAC,CAAC;KACvB;IACD,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAlCD,0BAkCC;AAED;;;;;;;;GAQG;AACH,SAAS,YAAY,CAAC,CAAM;;IAC1B,IAAI,CAAC,CAAC,CAAC,YAAY,KAAK,CAAC,EAAE;QACzB,OAAO,CAAC,CAAC;KACV;IAED;IACE,2BAA2B;IAC3B,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,8BAA8B,CAAC;SACnD,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,uCAAuC,CAAC,CAAA,EAC5D;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CACvB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;KACH;IAED;IACE,+EAA+E;IAC/E,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,wCAAwC,CAAC;;QAC7D,gEAAgE;QAChE,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,qBAAqB,CAAC,CAAA,EAC1C;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;KACtC;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE;gBACpE,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAzCD,8CAyCC"}
|
||||
23
lib/shared-environment.js
generated
23
lib/shared-environment.js
generated
@@ -1,14 +1,21 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CODEQL_ACTION_TEST_MODE = exports.CODEQL_ACTION_TESTING_ENVIRONMENT = exports.CODEQL_WORKFLOW_STARTED_AT = exports.ODASA_TRACER_CONFIGURATION = void 0;
|
||||
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
||||
// The time at which the first action (normally init) started executing.
|
||||
// If a workflow invokes a different action without first invoking the init
|
||||
// action (i.e. the upload action is being used by a third-party integrator)
|
||||
// then this variable will be assigned the start time of the action invoked
|
||||
// rather that the init action.
|
||||
exports.CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
|
||||
exports.ODASA_TRACER_CONFIGURATION = exports.CODEQL_WORKFLOW_STARTED_AT = exports.CODEQL_ACTION_TEST_MODE = exports.CODEQL_ACTION_TESTING_ENVIRONMENT = exports.CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY = void 0;
|
||||
/**
|
||||
* This environment variable is set to true when the `analyze` Action
|
||||
* completes successfully.
|
||||
*/
|
||||
exports.CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY = "CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY";
|
||||
exports.CODEQL_ACTION_TESTING_ENVIRONMENT = "CODEQL_ACTION_TESTING_ENVIRONMENT";
|
||||
/** Used to disable uploading SARIF results or status reports to the GitHub API */
|
||||
exports.CODEQL_ACTION_TEST_MODE = "CODEQL_ACTION_TEST_MODE";
|
||||
/**
|
||||
* The time at which the first action (normally init) started executing.
|
||||
* If a workflow invokes a different action without first invoking the init
|
||||
* action (i.e. the upload action is being used by a third-party integrator)
|
||||
* then this variable will be assigned the start time of the action invoked
|
||||
* rather that the init action.
|
||||
*/
|
||||
exports.CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
|
||||
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
||||
//# sourceMappingURL=shared-environment.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAE1D,QAAA,iCAAiC,GAC5C,mCAAmC,CAAC;AAEtC,kFAAkF;AACrE,QAAA,uBAAuB,GAAG,yBAAyB,CAAC"}
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;;AAAA;;;GAGG;AACU,QAAA,+CAA+C,GAC1D,iDAAiD,CAAC;AAEvC,QAAA,iCAAiC,GAC5C,mCAAmC,CAAC;AAEtC,kFAAkF;AACrE,QAAA,uBAAuB,GAAG,yBAAyB,CAAC;AAEjE;;;;;;GAMG;AACU,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAE1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"}
|
||||
3
lib/testing-utils.js
generated
3
lib/testing-utils.js
generated
@@ -21,6 +21,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createFeatures = exports.mockCodeQLVersion = exports.mockLanguagesInRepo = exports.mockFeatureFlagApiEndpoint = exports.getRecordingLogger = exports.setupActionsVars = exports.setupTests = void 0;
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const nock = __importStar(require("nock"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const apiClient = __importStar(require("./api-client"));
|
||||
const CodeQL = __importStar(require("./codeql"));
|
||||
@@ -85,6 +86,8 @@ function setupTests(test) {
|
||||
if (!t.passed) {
|
||||
process.stdout.write(t.context.testOutput);
|
||||
}
|
||||
// Undo any modifications made by nock
|
||||
nock.cleanAll();
|
||||
// Undo any modifications made by sinon
|
||||
sinon.restore();
|
||||
// Undo any modifications to the env
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAGnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAiB;IAC1C,MAAM,SAAS,GAAG,IAA2B,CAAC;IAE9C,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,2EAA2E;QAC3E,2EAA2E;QAC3E,yCAAyC;QACzC,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAC9C,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,KAAK,MAAM,CAClC,CAAC;QACF,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;YACvB,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;SAC7C;QAED,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAjDD,gCAiDC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;IAC5C,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,OAAO,CAAC;AAC5C,CAAC;AAJD,4CAIC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC;AAED,SAAgB,mBAAmB,CAAC,SAAmB;IACrD,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC;IACzD,MAAM,aAAa,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC;QAC1C,MAAM,EAAE,GAAG;QACX,IAAI,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,IAAI,EAAE,EAAE;YACnC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YACd,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,EAAE,CAAC;QACN,OAAO,EAAE,EAAE;QACX,GAAG,EAAE,mCAAmC;KACzC,CAAC,CAAC;IAEH,UAAU,CAAC,OAAO,CAAC;QACjB,KAAK,EAAE;YACL,aAAa;SACd;KACK,CAAC,CAAC;IACV,OAAO,aAAa,CAAC;AACvB,CAAC;AAlBD,kDAkBC;AAED,SAAgB,iBAAiB,CAAC,OAAO;IACvC,OAAO;QACL,KAAK,CAAC,UAAU;YACd,OAAO,OAAO,CAAC;QACjB,CAAC;KACe,CAAC;AACrB,CAAC;AAND,8CAMC;AAED;;;;GAIG;AACH,SAAgB,cAAc,CAAC,eAA0B;IACvD,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE;YAC1B,OAAO,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QAC3C,CAAC;KACF,CAAC;AACJ,CAAC;AAND,wCAMC"}
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,2CAA6B;AAC7B,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAGnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAiB;IAC1C,MAAM,SAAS,GAAG,IAA2B,CAAC;IAE9C,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,2EAA2E;QAC3E,2EAA2E;QAC3E,yCAAyC;QACzC,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAC9C,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,KAAK,MAAM,CAClC,CAAC;QACF,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;YACvB,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;SAC7C;QAED,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,sCAAsC;QACtC,IAAI,CAAC,QAAQ,EAAE,CAAC;QAEhB,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AApDD,gCAoDC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;IAC5C,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,OAAO,CAAC;AAC5C,CAAC;AAJD,4CAIC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC;AAED,SAAgB,mBAAmB,CAAC,SAAmB;IACrD,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC;IACzD,MAAM,aAAa,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC;QAC1C,MAAM,EAAE,GAAG;QACX,IAAI,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,IAAI,EAAE,EAAE;YACnC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YACd,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,EAAE,CAAC;QACN,OAAO,EAAE,EAAE;QACX,GAAG,EAAE,mCAAmC;KACzC,CAAC,CAAC;IAEH,UAAU,CAAC,OAAO,CAAC;QACjB,KAAK,EAAE;YACL,aAAa;SACd;KACK,CAAC,CAAC;IACV,OAAO,aAAa,CAAC;AACvB,CAAC;AAlBD,kDAkBC;AAED,SAAgB,iBAAiB,CAAC,OAAO;IACvC,OAAO;QACL,KAAK,CAAC,UAAU;YACd,OAAO,OAAO,CAAC;QACjB,CAAC;KACe,CAAC;AACrB,CAAC;AAND,8CAMC;AAED;;;;GAIG;AACH,SAAgB,cAAc,CAAC,eAA0B;IACvD,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE;YAC1B,OAAO,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QAC3C,CAAC;KACF,CAAC;AACJ,CAAC;AAND,wCAMC"}
|
||||
40
lib/toolrunner-error-catcher.js
generated
40
lib/toolrunner-error-catcher.js
generated
@@ -31,7 +31,7 @@ const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||
* @param args optional arguments for tool. Escaping is handled by the lib.
|
||||
* @param matchers defines specific codes and/or regexes that should lead to return of a custom error
|
||||
* @param options optional exec options. See ExecOptions
|
||||
* @returns Promise<number> exit code
|
||||
* @returns ReturnState exit code and stdout output, if applicable
|
||||
*/
|
||||
async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
|
||||
var _a, _b;
|
||||
@@ -54,40 +54,36 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
|
||||
},
|
||||
};
|
||||
// we capture the original return code or error so that if no match is found we can duplicate the behavior
|
||||
let returnState;
|
||||
let exitCode;
|
||||
try {
|
||||
returnState = await new toolrunner.ToolRunner(await safeWhich.safeWhich(commandLine), args, {
|
||||
exitCode = await new toolrunner.ToolRunner(await safeWhich.safeWhich(commandLine), args, {
|
||||
...options,
|
||||
listeners,
|
||||
ignoreReturnCode: true, // so we can check for specific codes using the matchers
|
||||
}).exec();
|
||||
}
|
||||
catch (e) {
|
||||
returnState = e instanceof Error ? e : new Error(String(e));
|
||||
}
|
||||
// if there is a zero return code then we do not apply the matchers
|
||||
if (returnState === 0)
|
||||
return returnState;
|
||||
if (matchers) {
|
||||
for (const matcher of matchers) {
|
||||
if (matcher.exitCode === returnState ||
|
||||
((_a = matcher.outputRegex) === null || _a === void 0 ? void 0 : _a.test(stderr)) ||
|
||||
((_b = matcher.outputRegex) === null || _b === void 0 ? void 0 : _b.test(stdout))) {
|
||||
throw new Error(matcher.message);
|
||||
// if there is a zero return code then we do not apply the matchers
|
||||
if (exitCode === 0)
|
||||
return { exitCode, stdout };
|
||||
if (matchers) {
|
||||
for (const matcher of matchers) {
|
||||
if (matcher.exitCode === exitCode ||
|
||||
((_a = matcher.outputRegex) === null || _a === void 0 ? void 0 : _a.test(stderr)) ||
|
||||
((_b = matcher.outputRegex) === null || _b === void 0 ? void 0 : _b.test(stdout))) {
|
||||
throw new Error(matcher.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (typeof returnState === "number") {
|
||||
// only if we were instructed to ignore the return code do we ever return it non-zero
|
||||
if (options === null || options === void 0 ? void 0 : options.ignoreReturnCode) {
|
||||
return returnState;
|
||||
return { exitCode, stdout };
|
||||
}
|
||||
else {
|
||||
throw new Error(`The process '${commandLine}' failed with exit code ${returnState}`);
|
||||
throw new Error(`The process '${commandLine}' failed with exit code ${exitCode}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw returnState;
|
||||
catch (e) {
|
||||
const error = e instanceof Error ? e : new Error(String(e));
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
exports.toolrunnerErrorCatcher = toolrunnerErrorCatcher;
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;KAC7D;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW;iBAChC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;iBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAnED,wDAmEC"}
|
||||
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AASpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,QAAgB,CAAC;IACrB,IAAI;QACF,QAAQ,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CACxC,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;QAET,mEAAmE;QACnE,IAAI,QAAQ,KAAK,CAAC;YAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;QAEhD,IAAI,QAAQ,EAAE;YACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;gBAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,QAAQ;qBAC7B,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;qBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;oBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;iBAClC;aACF;SACF;QAED,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;SAC7B;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,QAAQ,EAAE,CACjE,CAAC;SACH;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,KAAK,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5D,MAAM,KAAK,CAAC;KACb;AACH,CAAC;AAhED,wDAgEC"}
|
||||
13
lib/toolrunner-error-catcher.test.js
generated
13
lib/toolrunner-error-catcher.test.js
generated
@@ -33,7 +33,8 @@ const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "error!!!" },
|
||||
];
|
||||
t.deepEqual(await exec.exec("node", testArgs), 0);
|
||||
t.deepEqual(await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, matchers), 0);
|
||||
const returnState = await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, matchers);
|
||||
t.deepEqual(returnState.exitCode, 0);
|
||||
});
|
||||
(0, ava_1.default)("regex matchers are applied to stdout for non-zero exit code", async (t) => {
|
||||
const testArgs = buildDummyArgs("foo bar\\nblort qux", "", "", 1);
|
||||
@@ -116,9 +117,10 @@ const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||
(0, ava_1.default)("execErrorCatcher respects the ignoreReturnValue option", async (t) => {
|
||||
const testArgs = buildDummyArgs("standard output", "error output", "", 199);
|
||||
await t.throwsAsync((0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], { ignoreReturnCode: false }), { instanceOf: Error });
|
||||
t.deepEqual(await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], {
|
||||
const returnState = await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], {
|
||||
ignoreReturnCode: true,
|
||||
}), 199);
|
||||
});
|
||||
t.deepEqual(returnState.exitCode, 199);
|
||||
});
|
||||
(0, ava_1.default)("execErrorCatcher preserves behavior of provided listeners", async (t) => {
|
||||
const stdoutExpected = "standard output";
|
||||
@@ -134,9 +136,10 @@ const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||
},
|
||||
};
|
||||
const testArgs = buildDummyArgs(stdoutExpected, stderrExpected, "", 0);
|
||||
t.deepEqual(await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], {
|
||||
const returnState = await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], {
|
||||
listeners,
|
||||
}), 0);
|
||||
});
|
||||
t.deepEqual(returnState.exitCode, 0);
|
||||
t.deepEqual(stdoutActual, `${stdoutExpected}\n`);
|
||||
t.deepEqual(stderrActual, `${stderrExpected}\n`);
|
||||
});
|
||||
|
||||
File diff suppressed because one or more lines are too long
124
lib/upload-lib.js
generated
124
lib/upload-lib.js
generated
@@ -36,6 +36,7 @@ const fingerprints = __importStar(require("./fingerprints"));
|
||||
const repository_1 = require("./repository");
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util = __importStar(require("./util"));
|
||||
const workflow = __importStar(require("./workflow"));
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
function combineSarifFiles(sarifFiles) {
|
||||
@@ -127,9 +128,8 @@ function findSarifFilesInDir(sarifPath) {
|
||||
exports.findSarifFilesInDir = findSarifFilesInDir;
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFromActions(sarifPath, logger) {
|
||||
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(actionsUtil.getRequiredInput("checkout_path")), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getOptionalInput("category"), util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), logger);
|
||||
async function uploadFromActions(sarifPath, checkoutPath, category, logger) {
|
||||
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(checkoutPath), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), category, util.getRequiredEnvParam("GITHUB_WORKFLOW"), workflow.getWorkflowRunID(), checkoutPath, actionsUtil.getRequiredInput("matrix"), logger);
|
||||
}
|
||||
exports.uploadFromActions = uploadFromActions;
|
||||
function getSarifFilePaths(sarifPath) {
|
||||
@@ -269,48 +269,92 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
||||
}
|
||||
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
|
||||
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
|
||||
// Waits until either the analysis is successfully processed, a processing error is reported, or STATUS_CHECK_TIMEOUT_MILLISECONDS elapses.
|
||||
async function waitForProcessing(repositoryNwo, sarifID, logger) {
|
||||
/**
|
||||
* Waits until either the analysis is successfully processed, a processing error
|
||||
* is reported, or `STATUS_CHECK_TIMEOUT_MILLISECONDS` elapses.
|
||||
*
|
||||
* If `isUnsuccessfulExecution` is passed, will throw an error if the analysis
|
||||
* processing does not produce a single error mentioning the unsuccessful
|
||||
* execution.
|
||||
*/
|
||||
async function waitForProcessing(repositoryNwo, sarifID, logger, options = {
|
||||
isUnsuccessfulExecution: false,
|
||||
}) {
|
||||
logger.startGroup("Waiting for processing to finish");
|
||||
const client = api.getApiClient();
|
||||
const statusCheckingStarted = Date.now();
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (Date.now() >
|
||||
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
|
||||
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
||||
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
||||
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
||||
break;
|
||||
try {
|
||||
const client = api.getApiClient();
|
||||
const statusCheckingStarted = Date.now();
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (Date.now() >
|
||||
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
|
||||
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
||||
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
||||
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
||||
break;
|
||||
}
|
||||
let response = undefined;
|
||||
try {
|
||||
response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
sarif_id: sarifID,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`);
|
||||
break;
|
||||
}
|
||||
const status = response.data.processing_status;
|
||||
logger.info(`Analysis upload status is ${status}.`);
|
||||
if (status === "pending") {
|
||||
logger.debug("Analysis processing is still pending...");
|
||||
}
|
||||
else if (options.isUnsuccessfulExecution) {
|
||||
// We expect a specific processing error for unsuccessful executions, so
|
||||
// handle these separately.
|
||||
handleProcessingResultForUnsuccessfulExecution(response, status, logger);
|
||||
break;
|
||||
}
|
||||
else if (status === "complete") {
|
||||
break;
|
||||
}
|
||||
else if (status === "failed") {
|
||||
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
|
||||
}
|
||||
else {
|
||||
util.assertNever(status);
|
||||
}
|
||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||
}
|
||||
let response = undefined;
|
||||
try {
|
||||
response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
sarif_id: sarifID,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`);
|
||||
break;
|
||||
}
|
||||
const status = response.data.processing_status;
|
||||
logger.info(`Analysis upload status is ${status}.`);
|
||||
if (status === "complete") {
|
||||
break;
|
||||
}
|
||||
else if (status === "pending") {
|
||||
logger.debug("Analysis processing is still pending...");
|
||||
}
|
||||
else if (status === "failed") {
|
||||
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
|
||||
}
|
||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||
}
|
||||
logger.endGroup();
|
||||
finally {
|
||||
logger.endGroup();
|
||||
}
|
||||
}
|
||||
exports.waitForProcessing = waitForProcessing;
|
||||
/**
|
||||
* Checks the processing result for an unsuccessful execution. Throws if the
|
||||
* result is not a failure with a single "unsuccessful execution" error.
|
||||
*/
|
||||
function handleProcessingResultForUnsuccessfulExecution(response, status, logger) {
|
||||
if (status === "failed" &&
|
||||
Array.isArray(response.data.errors) &&
|
||||
response.data.errors.length === 1 &&
|
||||
response.data.errors[0].toString().startsWith("unsuccessful execution")) {
|
||||
logger.debug("Successfully uploaded a SARIF file for the unsuccessful execution. Received expected " +
|
||||
'"unsuccessful execution" error, and no other errors.');
|
||||
}
|
||||
else {
|
||||
const shortMessage = "Failed to upload a SARIF file for the unsuccessful execution. Code scanning status " +
|
||||
"information for the repository may be out of date as a result.";
|
||||
const longMessage = shortMessage + status === "failed"
|
||||
? ` Processing errors: ${response.data.errors}`
|
||||
: ' Encountered no processing errors, but expected to receive an "unsuccessful execution" error.';
|
||||
logger.debug(longMessage);
|
||||
throw new Error(shortMessage);
|
||||
}
|
||||
}
|
||||
function validateUniqueCategory(sarif) {
|
||||
var _a, _b, _c;
|
||||
// duplicate categories are allowed in the same sarif file
|
||||
|
||||
File diff suppressed because one or more lines are too long
2
lib/upload-sarif-action.js
generated
2
lib/upload-sarif-action.js
generated
@@ -43,7 +43,7 @@ async function run() {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), (0, logging_1.getActionsLogger)());
|
||||
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getOptionalInput("category"), (0, logging_1.getActionsLogger)());
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
// We don't upload results in test mode, so don't wait for processing
|
||||
if ((0, util_1.isInTestMode)()) {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAA,4BAAqB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACnC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE;YAClB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;SACjE;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YACzE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,EACnC,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAA,4BAAqB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACnC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE;YAClB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;SACjE;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YACzE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,EACnC,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
33
lib/util.js
generated
33
lib/util.js
generated
@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.shouldBypassToolcache = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.checkActionVersion = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.enrichEnvironment = exports.initializeEnvironment = exports.EnvVar = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
exports.parseMatrixInput = exports.shouldBypassToolcache = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.checkActionVersion = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.enrichEnvironment = exports.initializeEnvironment = exports.EnvVar = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
@@ -31,7 +31,6 @@ const core = __importStar(require("@actions/core"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const get_folder_size_1 = __importDefault(require("get-folder-size"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const apiCompatibility = __importStar(require("./api-compatibility.json"));
|
||||
const codeql_1 = require("./codeql");
|
||||
@@ -530,26 +529,15 @@ exports.getMlPoweredJsQueriesStatus = getMlPoweredJsQueriesStatus;
|
||||
* Prompt the customer to upgrade to CodeQL Action v2, if appropriate.
|
||||
*
|
||||
* Check whether a customer is running v1. If they are, and we can determine that the GitHub
|
||||
* instance supports v2, then log a warning about v1's upcoming deprecation prompting the customer
|
||||
* to upgrade to v2.
|
||||
* instance supports v2, then log an error that v1 is deprecated and prompt the customer to
|
||||
* upgrade to v2.
|
||||
*/
|
||||
async function checkActionVersion(version) {
|
||||
var _a;
|
||||
if (!semver.satisfies(version, ">=2")) {
|
||||
const githubVersion = await api.getGitHubVersion();
|
||||
// Only log a warning for versions of GHES that are compatible with CodeQL Action version 2.
|
||||
//
|
||||
// GHES 3.4 shipped without the v2 tag, but it also shipped without this warning message code.
|
||||
// Therefore users who are seeing this warning message code have pulled in a new version of the
|
||||
// Action, and with it the v2 tag.
|
||||
if (githubVersion.type === GitHubVariant.DOTCOM ||
|
||||
githubVersion.type === GitHubVariant.GHAE ||
|
||||
(githubVersion.type === GitHubVariant.GHES &&
|
||||
semver.satisfies((_a = semver.coerce(githubVersion.version)) !== null && _a !== void 0 ? _a : "0.0.0", ">=3.4"))) {
|
||||
core.warning("CodeQL Action v1 will be deprecated on January 18th, 2023. Please upgrade to v2. For " +
|
||||
"more information, see " +
|
||||
"https://github.blog/changelog/2022-04-27-code-scanning-deprecation-of-codeql-action-v1/");
|
||||
}
|
||||
core.error("This version of the CodeQL Action was deprecated on January 18th, 2023, and is no longer " +
|
||||
"updated or supported. For better performance, improved security, and new features, " +
|
||||
"upgrade to v2. For more information, see " +
|
||||
"https://github.blog/changelog/2023-01-18-code-scanning-codeql-action-v1-is-now-deprecated/");
|
||||
}
|
||||
}
|
||||
exports.checkActionVersion = checkActionVersion;
|
||||
@@ -750,4 +738,11 @@ async function shouldBypassToolcache(featuresEnablement, codeqlUrl, languagesInp
|
||||
return bypass;
|
||||
}
|
||||
exports.shouldBypassToolcache = shouldBypassToolcache;
|
||||
function parseMatrixInput(matrixInput) {
|
||||
if (matrixInput === undefined || matrixInput === "null") {
|
||||
return undefined;
|
||||
}
|
||||
return JSON.parse(matrixInput);
|
||||
}
|
||||
exports.parseMatrixInput = parseMatrixInput;
|
||||
//# sourceMappingURL=util.js.map
|
||||
File diff suppressed because one or more lines are too long
20
lib/util.test.js
generated
20
lib/util.test.js
generated
@@ -259,7 +259,7 @@ function formatGitHubVersion(version) {
|
||||
const CHECK_ACTION_VERSION_TESTS = [
|
||||
["1.2.1", { type: util.GitHubVariant.DOTCOM }, true],
|
||||
["1.2.1", { type: util.GitHubVariant.GHAE }, true],
|
||||
["1.2.1", { type: util.GitHubVariant.GHES, version: "3.3" }, false],
|
||||
["1.2.1", { type: util.GitHubVariant.GHES, version: "3.3" }, true],
|
||||
["1.2.1", { type: util.GitHubVariant.GHES, version: "3.4" }, true],
|
||||
["1.2.1", { type: util.GitHubVariant.GHES, version: "3.5" }, true],
|
||||
["2.2.1", { type: util.GitHubVariant.DOTCOM }, false],
|
||||
@@ -268,22 +268,22 @@ const CHECK_ACTION_VERSION_TESTS = [
|
||||
["2.2.1", { type: util.GitHubVariant.GHES, version: "3.4" }, false],
|
||||
["2.2.1", { type: util.GitHubVariant.GHES, version: "3.5" }, false],
|
||||
];
|
||||
for (const [version, githubVersion, shouldReportWarning,] of CHECK_ACTION_VERSION_TESTS) {
|
||||
const reportWarningDescription = shouldReportWarning
|
||||
? "reports warning"
|
||||
: "doesn't report warning";
|
||||
for (const [version, githubVersion, shouldReportError,] of CHECK_ACTION_VERSION_TESTS) {
|
||||
const reportErrorDescription = shouldReportError
|
||||
? "reports error"
|
||||
: "doesn't report error";
|
||||
const versionsDescription = `CodeQL Action version ${version} and GitHub version ${formatGitHubVersion(githubVersion)}`;
|
||||
(0, ava_1.default)(`checkActionVersion ${reportWarningDescription} for ${versionsDescription}`, async (t) => {
|
||||
const warningSpy = sinon.spy(core, "warning");
|
||||
(0, ava_1.default)(`checkActionVersion ${reportErrorDescription} for ${versionsDescription}`, async (t) => {
|
||||
const errorSpy = sinon.spy(core, "error");
|
||||
const versionStub = sinon
|
||||
.stub(api, "getGitHubVersion")
|
||||
.resolves(githubVersion);
|
||||
await util.checkActionVersion(version);
|
||||
if (shouldReportWarning) {
|
||||
t.true(warningSpy.calledOnceWithExactly(sinon.match("CodeQL Action v1 will be deprecated")));
|
||||
if (shouldReportError) {
|
||||
t.true(errorSpy.calledOnceWithExactly(sinon.match("This version of the CodeQL Action was deprecated on January 18th, 2023")));
|
||||
}
|
||||
else {
|
||||
t.false(warningSpy.called);
|
||||
t.false(errorSpy.called);
|
||||
}
|
||||
versionStub.restore();
|
||||
});
|
||||
|
||||
File diff suppressed because one or more lines are too long
362
lib/workflow.js
generated
Normal file
362
lib/workflow.js
generated
Normal file
@@ -0,0 +1,362 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getCheckoutPathInputOrThrow = exports.getUploadInputOrThrow = exports.getCategoryInputOrThrow = exports.getWorkflowRunID = exports.getWorkflowPath = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const util_1 = require("./util");
|
||||
function isObject(o) {
|
||||
return o !== null && typeof o === "object";
|
||||
}
|
||||
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
|
||||
function escapeRegExp(string) {
|
||||
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
||||
}
|
||||
function patternToRegExp(value) {
|
||||
return new RegExp(`^${value
|
||||
.toString()
|
||||
.split(GLOB_PATTERN)
|
||||
.reduce(function (arr, cur) {
|
||||
if (cur === "**") {
|
||||
arr.push(".*?");
|
||||
}
|
||||
else if (cur === "*") {
|
||||
arr.push("[^/]*?");
|
||||
}
|
||||
else if (cur) {
|
||||
arr.push(escapeRegExp(cur));
|
||||
}
|
||||
return arr;
|
||||
}, [])
|
||||
.join("")}$`);
|
||||
}
|
||||
// this function should return true if patternA is a superset of patternB
|
||||
// e.g: * is a superset of main-* but main-* is not a superset of *.
|
||||
function patternIsSuperset(patternA, patternB) {
|
||||
return patternToRegExp(patternA).test(patternB);
|
||||
}
|
||||
exports.patternIsSuperset = patternIsSuperset;
|
||||
function branchesToArray(branches) {
|
||||
if (typeof branches === "string") {
|
||||
return [branches];
|
||||
}
|
||||
if (Array.isArray(branches)) {
|
||||
if (branches.length === 0) {
|
||||
return "**";
|
||||
}
|
||||
return branches;
|
||||
}
|
||||
return "**";
|
||||
}
|
||||
function toCodedErrors(errors) {
|
||||
return Object.entries(errors).reduce((acc, [code, message]) => {
|
||||
acc[code] = { message, code };
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
// code to send back via status report
|
||||
// message to add as a warning annotation to the run
|
||||
exports.WorkflowErrors = toCodedErrors({
|
||||
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
||||
});
|
||||
function getWorkflowErrors(doc) {
|
||||
var _a, _b, _c, _d, _e;
|
||||
const errors = [];
|
||||
const jobName = process.env.GITHUB_JOB;
|
||||
if (jobName) {
|
||||
const job = (_a = doc === null || doc === void 0 ? void 0 : doc.jobs) === null || _a === void 0 ? void 0 : _a[jobName];
|
||||
const steps = job === null || job === void 0 ? void 0 : job.steps;
|
||||
if (Array.isArray(steps)) {
|
||||
for (const step of steps) {
|
||||
// this was advice that we used to give in the README
|
||||
// we actually want to run the analysis on the merge commit
|
||||
// to produce results that are more inline with expectations
|
||||
// (i.e: this is what will happen if you merge this PR)
|
||||
// and avoid some race conditions
|
||||
if ((step === null || step === void 0 ? void 0 : step.run) === "git checkout HEAD^2") {
|
||||
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let missingPush = false;
|
||||
if (doc.on === undefined) {
|
||||
// this is not a valid config
|
||||
}
|
||||
else if (typeof doc.on === "string") {
|
||||
if (doc.on === "pull_request") {
|
||||
missingPush = true;
|
||||
}
|
||||
}
|
||||
else if (Array.isArray(doc.on)) {
|
||||
const hasPush = doc.on.includes("push");
|
||||
const hasPullRequest = doc.on.includes("pull_request");
|
||||
if (hasPullRequest && !hasPush) {
|
||||
missingPush = true;
|
||||
}
|
||||
}
|
||||
else if (isObject(doc.on)) {
|
||||
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
||||
const hasPullRequest = Object.prototype.hasOwnProperty.call(doc.on, "pull_request");
|
||||
if (!hasPush && hasPullRequest) {
|
||||
missingPush = true;
|
||||
}
|
||||
if (hasPush && hasPullRequest) {
|
||||
const paths = (_b = doc.on.push) === null || _b === void 0 ? void 0 : _b.paths;
|
||||
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
||||
// if they didn't change any files
|
||||
// currently we cannot go back through the history and find the most recent baseline
|
||||
if (Array.isArray(paths) && paths.length > 0) {
|
||||
errors.push(exports.WorkflowErrors.PathsSpecified);
|
||||
}
|
||||
const pathsIgnore = (_c = doc.on.push) === null || _c === void 0 ? void 0 : _c["paths-ignore"];
|
||||
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
||||
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
|
||||
}
|
||||
}
|
||||
// if doc.on.pull_request is null that means 'all branches'
|
||||
// if doc.on.pull_request is undefined that means 'off'
|
||||
// we only want to check for mismatched branches if pull_request is on.
|
||||
if (doc.on.pull_request !== undefined) {
|
||||
const push = branchesToArray((_d = doc.on.push) === null || _d === void 0 ? void 0 : _d.branches);
|
||||
if (push !== "**") {
|
||||
const pull_request = branchesToArray((_e = doc.on.pull_request) === null || _e === void 0 ? void 0 : _e.branches);
|
||||
if (pull_request !== "**") {
|
||||
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
|
||||
if (difference.length > 0) {
|
||||
// there are branches in pull_request that may not have a baseline
|
||||
// because we are not building them on push
|
||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
||||
}
|
||||
}
|
||||
else if (push.length > 0) {
|
||||
// push is set up to run on a subset of branches
|
||||
// and you could open a PR against a branch with no baseline
|
||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (missingPush) {
|
||||
errors.push(exports.WorkflowErrors.MissingPushHook);
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
exports.getWorkflowErrors = getWorkflowErrors;
|
||||
async function validateWorkflow() {
|
||||
let workflow;
|
||||
try {
|
||||
workflow = await getWorkflow();
|
||||
}
|
||||
catch (e) {
|
||||
return `error: getWorkflow() failed: ${String(e)}`;
|
||||
}
|
||||
let workflowErrors;
|
||||
try {
|
||||
workflowErrors = getWorkflowErrors(workflow);
|
||||
}
|
||||
catch (e) {
|
||||
return `error: getWorkflowErrors() failed: ${String(e)}`;
|
||||
}
|
||||
if (workflowErrors.length > 0) {
|
||||
let message;
|
||||
try {
|
||||
message = formatWorkflowErrors(workflowErrors);
|
||||
}
|
||||
catch (e) {
|
||||
return `error: formatWorkflowErrors() failed: ${String(e)}`;
|
||||
}
|
||||
core.warning(message);
|
||||
}
|
||||
return formatWorkflowCause(workflowErrors);
|
||||
}
|
||||
exports.validateWorkflow = validateWorkflow;
|
||||
function formatWorkflowErrors(errors) {
|
||||
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
||||
const errorsList = errors.map((e) => e.message).join(" ");
|
||||
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
|
||||
}
|
||||
exports.formatWorkflowErrors = formatWorkflowErrors;
|
||||
function formatWorkflowCause(errors) {
|
||||
if (errors.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
return errors.map((e) => e.code).join(",");
|
||||
}
|
||||
exports.formatWorkflowCause = formatWorkflowCause;
|
||||
async function getWorkflow() {
|
||||
const relativePath = await getWorkflowPath();
|
||||
const absolutePath = path.join((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), relativePath);
|
||||
try {
|
||||
return yaml.load(fs.readFileSync(absolutePath, "utf-8"));
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error && e["code"] === "ENOENT") {
|
||||
throw new Error(`Unable to load code scanning workflow from ${absolutePath}. This can happen if the currently ` +
|
||||
"running workflow checks out a branch that doesn't contain the corresponding workflow file.");
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
exports.getWorkflow = getWorkflow;
|
||||
/**
|
||||
* Get the path of the currently executing workflow.
|
||||
*/
|
||||
async function getWorkflowPath() {
|
||||
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
|
||||
const owner = repo_nwo[0];
|
||||
const repo = repo_nwo[1];
|
||||
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
||||
const apiClient = api.getApiClient();
|
||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
||||
owner,
|
||||
repo,
|
||||
run_id,
|
||||
});
|
||||
const workflowUrl = runsResponse.data.workflow_url;
|
||||
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
||||
return workflowResponse.data.path;
|
||||
}
|
||||
exports.getWorkflowPath = getWorkflowPath;
|
||||
/**
|
||||
* Get the workflow run ID.
|
||||
*/
|
||||
function getWorkflowRunID() {
|
||||
const workflowRunID = parseInt((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"), 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
||||
}
|
||||
return workflowRunID;
|
||||
}
|
||||
exports.getWorkflowRunID = getWorkflowRunID;
|
||||
function getStepsCallingAction(job, actionName) {
|
||||
if (job.uses) {
|
||||
throw new Error(`Could not get steps calling ${actionName} since the job calls a reusable workflow.`);
|
||||
}
|
||||
const steps = job.steps;
|
||||
if (!Array.isArray(steps)) {
|
||||
throw new Error(`Could not get steps calling ${actionName} since job.steps was not an array.`);
|
||||
}
|
||||
return steps.filter((step) => { var _a; return (_a = step.uses) === null || _a === void 0 ? void 0 : _a.includes(actionName); });
|
||||
}
|
||||
/**
|
||||
* Makes a best effort attempt to retrieve the value of a particular input with which
|
||||
* an Action in the workflow would be invoked.
|
||||
*
|
||||
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||
*
|
||||
* @returns the value of the input, or undefined if no such input is passed to the Action
|
||||
* @throws an error if the value of the input could not be determined, or we could not
|
||||
* determine that no such input is passed to the Action.
|
||||
*/
|
||||
function getInputOrThrow(workflow, jobName, actionName, inputName, matrixVars) {
|
||||
var _a, _b;
|
||||
const preamble = `Could not get ${inputName} input to ${actionName} since`;
|
||||
if (!workflow.jobs) {
|
||||
throw new Error(`${preamble} the workflow has no jobs.`);
|
||||
}
|
||||
if (!workflow.jobs[jobName]) {
|
||||
throw new Error(`${preamble} the workflow has no job named ${jobName}.`);
|
||||
}
|
||||
const stepsCallingAction = getStepsCallingAction(workflow.jobs[jobName], actionName);
|
||||
if (stepsCallingAction.length === 0) {
|
||||
throw new Error(`${preamble} the ${jobName} job does not call ${actionName}.`);
|
||||
}
|
||||
else if (stepsCallingAction.length > 1) {
|
||||
throw new Error(`${preamble} the ${jobName} job calls ${actionName} multiple times.`);
|
||||
}
|
||||
let input = (_b = (_a = stepsCallingAction[0].with) === null || _a === void 0 ? void 0 : _a[inputName]) === null || _b === void 0 ? void 0 : _b.toString();
|
||||
if (input !== undefined && matrixVars !== undefined) {
|
||||
// Normalize by removing whitespace
|
||||
input = input.replace(/\${{\s+/, "${{").replace(/\s+}}/, "}}");
|
||||
// Make a basic attempt to substitute matrix variables
|
||||
for (const [key, value] of Object.entries(matrixVars)) {
|
||||
input = input.replace(`\${{matrix.${key}}}`, value);
|
||||
}
|
||||
}
|
||||
if (input !== undefined && input.includes("${{")) {
|
||||
throw new Error(`Could not get ${inputName} input to ${actionName} since it contained an unrecognized dynamic value.`);
|
||||
}
|
||||
return input;
|
||||
}
|
||||
/**
|
||||
* Get the expected name of the analyze Action.
|
||||
*
|
||||
* This allows us to test workflow parsing functionality as a CodeQL Action PR check.
|
||||
*/
|
||||
function getAnalyzeActionName() {
|
||||
if ((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY") === "github/codeql-action") {
|
||||
return "./analyze";
|
||||
}
|
||||
else {
|
||||
return "github/codeql-action/analyze";
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Makes a best effort attempt to retrieve the category input for the particular job,
|
||||
* given a set of matrix variables.
|
||||
*
|
||||
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||
*
|
||||
* @returns the category input, or undefined if the category input is not defined
|
||||
* @throws an error if the category input could not be determined
|
||||
*/
|
||||
function getCategoryInputOrThrow(workflow, jobName, matrixVars) {
|
||||
return getInputOrThrow(workflow, jobName, getAnalyzeActionName(), "category", matrixVars);
|
||||
}
|
||||
exports.getCategoryInputOrThrow = getCategoryInputOrThrow;
|
||||
/**
|
||||
* Makes a best effort attempt to retrieve the upload input for the particular job,
|
||||
* given a set of matrix variables.
|
||||
*
|
||||
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||
*
|
||||
* @returns the upload input
|
||||
* @throws an error if the upload input could not be determined
|
||||
*/
|
||||
function getUploadInputOrThrow(workflow, jobName, matrixVars) {
|
||||
return (getInputOrThrow(workflow, jobName, getAnalyzeActionName(), "upload", matrixVars) || "true" // if unspecified, upload defaults to true
|
||||
);
|
||||
}
|
||||
exports.getUploadInputOrThrow = getUploadInputOrThrow;
|
||||
/**
|
||||
* Makes a best effort attempt to retrieve the checkout_path input for the
|
||||
* particular job, given a set of matrix variables.
|
||||
*
|
||||
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||
*
|
||||
* @returns the checkout_path input
|
||||
* @throws an error if the checkout_path input could not be determined
|
||||
*/
|
||||
function getCheckoutPathInputOrThrow(workflow, jobName, matrixVars) {
|
||||
return (getInputOrThrow(workflow, jobName, getAnalyzeActionName(), "checkout_path", matrixVars) || (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE") // if unspecified, checkout_path defaults to ${{ github.workspace }}
|
||||
);
|
||||
}
|
||||
exports.getCheckoutPathInputOrThrow = getCheckoutPathInputOrThrow;
|
||||
//# sourceMappingURL=workflow.js.map
|
||||
1
lib/workflow.js.map
Normal file
1
lib/workflow.js.map
Normal file
File diff suppressed because one or more lines are too long
463
lib/workflow.test.js
generated
Normal file
463
lib/workflow.test.js
generated
Normal file
@@ -0,0 +1,463 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const workflow_1 = require("./workflow");
|
||||
function errorCodes(actual, expected) {
|
||||
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
||||
}
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("getWorkflowErrors() when on is empty", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)({ on: {} });
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)({ on: ["push"] });
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)({ on: ["pull_request"] });
|
||||
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MissingPushHook]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is valid", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||
on: ["push", "pull_request"],
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||
on: ["push", "pull_request", "schedule"],
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push should not have a path", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||
on: {
|
||||
push: { branches: ["main"], paths: ["test/*"] },
|
||||
pull_request: { branches: ["main"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.PathsSpecified]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is a correct object", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||
on: {
|
||||
push: { branches: ["main"] },
|
||||
pull_request: { branches: ["feature"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||
on: {
|
||||
push: { branches: ["main", "feature"] },
|
||||
pull_request: { branches: ["main"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||
on: {
|
||||
push: { branches: ["main"] },
|
||||
pull_request: { branches: ["main", "feature"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||
on: {
|
||||
push: 1,
|
||||
pull_request: 1,
|
||||
},
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||
on: 1,
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||
on: 1,
|
||||
jobs: 1,
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||
on: 1,
|
||||
jobs: [1],
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||
on: 1,
|
||||
jobs: { 1: 1 },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||
on: 1,
|
||||
jobs: { test: 1 },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||
on: 1,
|
||||
jobs: { test: [1] },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||
on: 1,
|
||||
jobs: { test: { steps: 1 } },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||
on: 1,
|
||||
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||
on: 1,
|
||||
jobs: { test: [undefined] },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(1), []));
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||
on: {
|
||||
push: {
|
||||
branches: 1,
|
||||
},
|
||||
pull_request: {
|
||||
branches: 1,
|
||||
},
|
||||
},
|
||||
}), []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||
on: {
|
||||
push: { branches: ["feature/*"] },
|
||||
pull_request: { branches: "feature/moose" },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||
on: {
|
||||
push: { branches: ["feature/moose"] },
|
||||
pull_request: { branches: "feature/*" },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
||||
process.env.GITHUB_JOB = "test";
|
||||
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||
on: ["push", "pull_request"],
|
||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.CheckoutWrongHead]));
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowErrors() when there is one error", (t) => {
|
||||
const message = (0, workflow_1.formatWorkflowErrors)([workflow_1.WorkflowErrors.CheckoutWrongHead]);
|
||||
t.true(message.startsWith("1 issue was detected with this workflow:"));
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowErrors() when there are multiple errors", (t) => {
|
||||
const message = (0, workflow_1.formatWorkflowErrors)([
|
||||
workflow_1.WorkflowErrors.CheckoutWrongHead,
|
||||
workflow_1.WorkflowErrors.PathsSpecified,
|
||||
]);
|
||||
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowCause() with no errors", (t) => {
|
||||
const message = (0, workflow_1.formatWorkflowCause)([]);
|
||||
t.deepEqual(message, undefined);
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowCause()", (t) => {
|
||||
const message = (0, workflow_1.formatWorkflowCause)([
|
||||
workflow_1.WorkflowErrors.CheckoutWrongHead,
|
||||
workflow_1.WorkflowErrors.PathsSpecified,
|
||||
]);
|
||||
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
||||
t.deepEqual((0, workflow_1.formatWorkflowCause)([]), undefined);
|
||||
});
|
||||
(0, ava_1.default)("patternIsSuperset()", (t) => {
|
||||
t.false((0, workflow_1.patternIsSuperset)("main-*", "main"));
|
||||
t.true((0, workflow_1.patternIsSuperset)("*", "*"));
|
||||
t.true((0, workflow_1.patternIsSuperset)("*", "main-*"));
|
||||
t.false((0, workflow_1.patternIsSuperset)("main-*", "*"));
|
||||
t.false((0, workflow_1.patternIsSuperset)("main-*", "main"));
|
||||
t.true((0, workflow_1.patternIsSuperset)("main", "main"));
|
||||
t.false((0, workflow_1.patternIsSuperset)("*", "feature/*"));
|
||||
t.true((0, workflow_1.patternIsSuperset)("**", "feature/*"));
|
||||
t.false((0, workflow_1.patternIsSuperset)("feature-*", "**"));
|
||||
t.false((0, workflow_1.patternIsSuperset)("a/**/c", "a/**/d"));
|
||||
t.false((0, workflow_1.patternIsSuperset)("a/**/c", "a/**"));
|
||||
t.true((0, workflow_1.patternIsSuperset)("a/**", "a/**/c"));
|
||||
t.true((0, workflow_1.patternIsSuperset)("a/**/c", "a/main-**/c"));
|
||||
t.false((0, workflow_1.patternIsSuperset)("a/**/b/**/c", "a/**/d/**/c"));
|
||||
t.true((0, workflow_1.patternIsSuperset)("a/**/b/**/c", "a/**/b/c/**/c"));
|
||||
t.true((0, workflow_1.patternIsSuperset)("a/**/b/**/c", "a/**/b/d/**/c"));
|
||||
t.false((0, workflow_1.patternIsSuperset)("a/**/c/d/**/c", "a/**/b/**/c"));
|
||||
t.false((0, workflow_1.patternIsSuperset)("a/main-**/c", "a/**/c"));
|
||||
t.true((0, workflow_1.patternIsSuperset)("/robin/*/release/*", "/robin/moose/release/goose"));
|
||||
t.false((0, workflow_1.patternIsSuperset)("/robin/moose/release/goose", "/robin/*/release/*"));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when branches contain dots", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||
on:
|
||||
push:
|
||||
branches: [4.1, master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [4.1, master]
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master, ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||
process.env.GITHUB_JOB = "test";
|
||||
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
jobs:
|
||||
test:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test2:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test3:
|
||||
steps: []
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.CheckoutWrongHead]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||
process.env.GITHUB_JOB = "test3";
|
||||
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
jobs:
|
||||
test:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test2:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test3:
|
||||
steps: []
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on is missing", (t) => {
|
||||
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||
name: "CodeQL"
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() with a different on setup", (t) => {
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on: "workflow_dispatch"
|
||||
`)), []));
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on: [workflow_dispatch]
|
||||
`)), []));
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
workflow_dispatch: {}
|
||||
`)), []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
`)), []));
|
||||
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on: ["push"]
|
||||
`)), []));
|
||||
});
|
||||
(0, ava_1.default)("getCategoryInputOrThrow returns category for simple workflow with category", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||
jobs:
|
||||
analysis:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: github/codeql-action/init@v2
|
||||
- uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: some-category
|
||||
`), "analysis", {}), "some-category");
|
||||
});
|
||||
(0, ava_1.default)("getCategoryInputOrThrow returns undefined for simple workflow without category", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||
jobs:
|
||||
analysis:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: github/codeql-action/init@v2
|
||||
- uses: github/codeql-action/analyze@v2
|
||||
`), "analysis", {}), undefined);
|
||||
});
|
||||
(0, ava_1.default)("getCategoryInputOrThrow returns category for workflow with multiple jobs", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||
jobs:
|
||||
foo:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: github/codeql-action/init@v2
|
||||
- runs: ./build foo
|
||||
- uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: foo-category
|
||||
bar:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: github/codeql-action/init@v2
|
||||
- runs: ./build bar
|
||||
- uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: bar-category
|
||||
`), "bar", {}), "bar-category");
|
||||
});
|
||||
(0, ava_1.default)("getCategoryInputOrThrow finds category for workflow with language matrix", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||
jobs:
|
||||
analysis:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
language: [javascript, python]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: github/codeql-action/init@v2
|
||||
with:
|
||||
language: \${{ matrix.language }}
|
||||
- uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: "/language:\${{ matrix.language }}"
|
||||
`), "analysis", { language: "javascript" }), "/language:javascript");
|
||||
});
|
||||
(0, ava_1.default)("getCategoryInputOrThrow throws error for workflow with dynamic category", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.throws(() => (0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||
jobs:
|
||||
analysis:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: github/codeql-action/init@v2
|
||||
- uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: "\${{ github.workflow }}"
|
||||
`), "analysis", {}), {
|
||||
message: "Could not get category input to github/codeql-action/analyze since it contained " +
|
||||
"an unrecognized dynamic value.",
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getCategoryInputOrThrow throws error for workflow with multiple calls to analyze", (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
t.throws(() => (0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||
jobs:
|
||||
analysis:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: github/codeql-action/init@v2
|
||||
- uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: some-category
|
||||
- uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: another-category
|
||||
`), "analysis", {}), {
|
||||
message: "Could not get category input to github/codeql-action/analyze since the analysis job " +
|
||||
"calls github/codeql-action/analyze multiple times.",
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=workflow.test.js.map
|
||||
1
lib/workflow.test.js.map
Normal file
1
lib/workflow.test.js.map
Normal file
File diff suppressed because one or more lines are too long
47
node_modules/.package-lock.json
generated
vendored
47
node_modules/.package-lock.json
generated
vendored
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "1.1.34",
|
||||
"version": "1.1.39",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
@@ -1924,13 +1924,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/decode-uri-component": {
|
||||
"version": "0.2.0",
|
||||
"integrity": "sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=",
|
||||
"engines": {
|
||||
"node": ">=0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/deep-is": {
|
||||
"version": "0.1.3",
|
||||
"integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=",
|
||||
@@ -3046,13 +3039,6 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/filter-obj": {
|
||||
"version": "1.1.0",
|
||||
"integrity": "sha1-mzERErxsYSehbgFsbF1/GeCAXFs=",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/find-up": {
|
||||
"version": "6.2.0",
|
||||
"resolved": "https://registry.npmjs.org/find-up/-/find-up-6.2.0.tgz",
|
||||
@@ -4559,23 +4545,6 @@
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/query-string": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/query-string/-/query-string-7.0.1.tgz",
|
||||
"integrity": "sha512-uIw3iRvHnk9to1blJCG3BTc+Ro56CBowJXKmNNAm3RulvPBzWLRqKSiiDk+IplJhsydwtuNMHi8UGQFcCLVfkA==",
|
||||
"dependencies": {
|
||||
"decode-uri-component": "^0.2.0",
|
||||
"filter-obj": "^1.1.0",
|
||||
"split-on-first": "^1.0.0",
|
||||
"strict-uri-encode": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/read-pkg-up": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-3.0.0.tgz",
|
||||
@@ -5041,13 +5010,6 @@
|
||||
"integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/split-on-first": {
|
||||
"version": "1.1.0",
|
||||
"integrity": "sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw==",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/sprintf-js": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
|
||||
@@ -5075,13 +5037,6 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/strict-uri-encode": {
|
||||
"version": "2.0.0",
|
||||
"integrity": "sha1-ucczDHBChi9rFC3CdLvMWGbONUY=",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/string-width": {
|
||||
"version": "4.2.3",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user