mirror of
https://github.com/github/codeql-action.git
synced 2025-12-26 01:00:20 +08:00
Compare commits
179 Commits
codeql-bun
...
henrymerce
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e8c12e1f7d | ||
|
|
2f7b9a1280 | ||
|
|
23d151db1c | ||
|
|
c3be36f5cb | ||
|
|
bd2f52fcef | ||
|
|
a76fe4f9bd | ||
|
|
a6dff04fe1 | ||
|
|
cdb90196f2 | ||
|
|
6ba0a36550 | ||
|
|
4a918790cd | ||
|
|
42d6d35dd1 | ||
|
|
e009918fbc | ||
|
|
70a288daae | ||
|
|
bdc7c5d203 | ||
|
|
272d916f23 | ||
|
|
f12f76f047 | ||
|
|
28a9b2d6d7 | ||
|
|
9f8ddbdfd7 | ||
|
|
9203e314a3 | ||
|
|
80b12d6f73 | ||
|
|
620a267204 | ||
|
|
bac4fe1a38 | ||
|
|
166d98c19e | ||
|
|
a9337bc304 | ||
|
|
4023575d64 | ||
|
|
cf1437a514 | ||
|
|
f9c9a2567c | ||
|
|
b9c859bfa1 | ||
|
|
b4187d626b | ||
|
|
bfbb7ab03c | ||
|
|
4e5a06f009 | ||
|
|
e8f7169839 | ||
|
|
6ce923c375 | ||
|
|
b2b478264a | ||
|
|
5eba74a3c9 | ||
|
|
ff3337ee1b | ||
|
|
484236cda4 | ||
|
|
f837e8e761 | ||
|
|
ef21864950 | ||
|
|
4789c1331c | ||
|
|
59ebabde5d | ||
|
|
3224214d91 | ||
|
|
e09fbf5b4a | ||
|
|
e9ff99b027 | ||
|
|
8b9e982393 | ||
|
|
8d1e008ecb | ||
|
|
579411fb6c | ||
|
|
e4818d46c4 | ||
|
|
4778dfbd93 | ||
|
|
0a3f985290 | ||
|
|
04f1897968 | ||
|
|
6ac6037211 | ||
|
|
959cbb7472 | ||
|
|
10ca836463 | ||
|
|
d58039a1e3 | ||
|
|
37a4496237 | ||
|
|
b7028afcb4 | ||
|
|
f629dada4c | ||
|
|
ccee4c68ff | ||
|
|
899bf9c076 | ||
|
|
dd7c3ef80e | ||
|
|
b7b875efff | ||
|
|
53ab991fbe | ||
|
|
54d25f56dd | ||
|
|
d827cf3d65 | ||
|
|
9438015b82 | ||
|
|
5aced81848 | ||
|
|
118e294bb9 | ||
|
|
dc9c1c1a51 | ||
|
|
a409f43c7a | ||
|
|
e67ad6aaed | ||
|
|
896079047b | ||
|
|
e58b8d6a61 | ||
|
|
01330498de | ||
|
|
a669cc5936 | ||
|
|
6fec2ab57a | ||
|
|
aab7a26877 | ||
|
|
2b971a70bb | ||
|
|
4e81e2933a | ||
|
|
bf944d782b | ||
|
|
566a5e6727 | ||
|
|
10c89976dc | ||
|
|
8121f62c54 | ||
|
|
104319fe98 | ||
|
|
aba18b82f7 | ||
|
|
4a5ad5af18 | ||
|
|
19f867a052 | ||
|
|
5e452f0d9d | ||
|
|
8bebf77dbd | ||
|
|
fb74504ab5 | ||
|
|
c51babb6c6 | ||
|
|
79166d0788 | ||
|
|
44ef9d902a | ||
|
|
384a214d60 | ||
|
|
697ed97fa5 | ||
|
|
2207a72006 | ||
|
|
4623c8edb6 | ||
|
|
9085295c40 | ||
|
|
3b0a2f607d | ||
|
|
5566638d56 | ||
|
|
27c1438455 | ||
|
|
1e8d3b8fca | ||
|
|
7fc528c3c6 | ||
|
|
2cbc140ac5 | ||
|
|
1653364141 | ||
|
|
61cc378b7f | ||
|
|
7aa5026a55 | ||
|
|
c80f00a5c9 | ||
|
|
62b14cbbad | ||
|
|
794a4b543a | ||
|
|
ee6ba9c213 | ||
|
|
81f99a8582 | ||
|
|
4b18b7bc24 | ||
|
|
4acf201e5b | ||
|
|
1e5919b22d | ||
|
|
375dacad24 | ||
|
|
e0ff272230 | ||
|
|
aa0e650c6a | ||
|
|
98b2ddc7f9 | ||
|
|
0d9b15ca93 | ||
|
|
b1c26c4c53 | ||
|
|
21044b004d | ||
|
|
0828b04e3a | ||
|
|
b2a92eb56d | ||
|
|
075b74d36e | ||
|
|
9af9a11da8 | ||
|
|
a631f4b016 | ||
|
|
77cda4d75d | ||
|
|
3d90c4f911 | ||
|
|
1384ce4ab3 | ||
|
|
6c5cad784c | ||
|
|
58b2ab08a8 | ||
|
|
e0dec83cfc | ||
|
|
00a3c456fb | ||
|
|
e628ee0ae1 | ||
|
|
605b23d10b | ||
|
|
160613c380 | ||
|
|
caf1c5057b | ||
|
|
c62445de22 | ||
|
|
7fc3c603aa | ||
|
|
24fd4c0f4e | ||
|
|
d0517be03a | ||
|
|
37b4358e44 | ||
|
|
122b180b66 | ||
|
|
8337c2be0f | ||
|
|
5296a763b1 | ||
|
|
3afc2b194c | ||
|
|
3cf2a1ba2e | ||
|
|
4d4e25083a | ||
|
|
9de6c31571 | ||
|
|
44ae944a29 | ||
|
|
e2338066a1 | ||
|
|
8f05fcd048 | ||
|
|
9dac9f748a | ||
|
|
c6e756bb39 | ||
|
|
8d8b898c0f | ||
|
|
312e093a18 | ||
|
|
aef0440821 | ||
|
|
9f2aa7ec75 | ||
|
|
7e73dedacc | ||
|
|
102e01da36 | ||
|
|
eb19ecbad1 | ||
|
|
f0a052e8b4 | ||
|
|
ead8da6199 | ||
|
|
c61f4c61f8 | ||
|
|
ad7ca9bf21 | ||
|
|
f79028af27 | ||
|
|
909c8687d5 | ||
|
|
5b7c9daecd | ||
|
|
ce90479412 | ||
|
|
bff0be7364 | ||
|
|
daf4614f68 | ||
|
|
e2d523ca5e | ||
|
|
996d04b1e5 | ||
|
|
79f8286c68 | ||
|
|
d52e657b2e | ||
|
|
187515b328 | ||
|
|
bc341c5dd1 | ||
|
|
39fe7aa8a1 |
21
.github/dependabot.yml
vendored
21
.github/dependabot.yml
vendored
@@ -1,20 +1,17 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
- package-ecosystem: npm
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Gives us a working day to merge this before our typical release
|
||||
interval: weekly
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
- Update dependencies
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/runner"
|
||||
update-types:
|
||||
- version-update:semver-minor
|
||||
- version-update:semver-patch
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Gives us a working day to merge this before our typical release
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
interval: weekly
|
||||
|
||||
32
.github/setup-swift/action.yml
vendored
Normal file
32
.github/setup-swift/action.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: "Set up Swift"
|
||||
description: Performs necessary steps to set up appropriate Swift version.
|
||||
inputs:
|
||||
codeql-path:
|
||||
required: true
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Get Swift version
|
||||
id: get_swift_version
|
||||
# We don't support Swift on Windows or prior versions of CLI.
|
||||
if: "(runner.os != 'Windows') && (matrix.version == 'cached' || matrix.version == 'latest' || matrix.version == 'nightly-latest')"
|
||||
shell: bash
|
||||
env:
|
||||
CODEQL_PATH: ${{inputs.codeql-path}}
|
||||
run: |
|
||||
if [ $RUNNER_OS = "macOS" ]; then
|
||||
PLATFORM="osx64"
|
||||
else # We do not run this step on Windows.
|
||||
PLATFORM="linux64"
|
||||
fi
|
||||
SWIFT_EXTRACTOR_DIR="$("$CODEQL_PATH" resolve languages --format json | jq -r '.swift[0]')"
|
||||
VERSION="$("$SWIFT_EXTRACTOR_DIR/tools/$PLATFORM/extractor" --version | awk '/version/ { print $3 }')"
|
||||
# Specify 5.7.0, otherwise setup Action will default to latest minor version.
|
||||
if [ $VERSION = "5.7" ]; then
|
||||
VERSION="5.7.0"
|
||||
fi
|
||||
echo "version=$VERSION" | tee -a $GITHUB_OUTPUT
|
||||
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||
if: "(runner.os != 'Windows') && (matrix.version == 'cached' || matrix.version == 'latest' || matrix.version == 'nightly-latest')"
|
||||
with:
|
||||
swift-version: "${{steps.get_swift_version.outputs.version}}"
|
||||
1
.github/workflows/__analyze-ref-input.yml
generated
vendored
1
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -7,6 +7,7 @@ name: "PR Check - Analyze: 'ref' and 'sha' from inputs"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__autobuild-action.yml
generated
vendored
1
.github/workflows/__autobuild-action.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - autobuild-action
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
12
.github/workflows/__export-file-baseline-information.yml
generated
vendored
12
.github/workflows/__export-file-baseline-information.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Export file baseline information
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -42,17 +43,16 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
if: runner.os != 'Windows'
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
env:
|
||||
CODEQL_FILE_BASELINE_INFORMATION: true
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: true
|
||||
- uses: ./../action/.github/setup-swift
|
||||
with:
|
||||
codeql-path: ${{steps.init.outputs.codeql-path}}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
@@ -61,7 +61,6 @@ jobs:
|
||||
output: ${{ runner.temp }}/results
|
||||
env:
|
||||
CODEQL_FILE_BASELINE_INFORMATION: true
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: true
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
@@ -86,4 +85,5 @@ jobs:
|
||||
fi
|
||||
done
|
||||
env:
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: true # Remove when Swift is GA.
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
1
.github/workflows/__extractor-ram-threads.yml
generated
vendored
1
.github/workflows/__extractor-ram-threads.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Extractor ram and threads options test
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__go-custom-queries.yml
generated
vendored
1
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Go: Custom queries'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
1
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Go: tracing with autobuilder step'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
1
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Go: tracing with custom build steps'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
1
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Go: tracing with legacy workflow'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__init-with-registries.yml
generated
vendored
1
.github/workflows/__init-with-registries.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Packaging: Download using registries'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__javascript-source-root.yml
generated
vendored
1
.github/workflows/__javascript-source-root.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Custom source root
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__ml-powered-queries.yml
generated
vendored
1
.github/workflows/__ml-powered-queries.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - ML-powered queries
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
15
.github/workflows/__multi-language-autodetect.yml
generated
vendored
15
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Multi-language repository
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -65,16 +66,16 @@ jobs:
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
if: runner.os != 'Windows'
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- uses: ./../action/.github/setup-swift
|
||||
with:
|
||||
codeql-path: ${{steps.init.outputs.codeql-path}}
|
||||
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
@@ -128,8 +129,8 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Check language autodetect for Swift
|
||||
if: "!startsWith(matrix.os, 'windows') && (matrix.version == 'cached' || matrix.version\
|
||||
\ == 'latest' || matrix.version == 'nightly-latest')"
|
||||
if: (matrix.version == 'cached' || matrix.version == 'latest' || matrix.version
|
||||
== 'nightly-latest')
|
||||
shell: bash
|
||||
run: |
|
||||
SWIFT_DB=${{ fromJson(steps.analysis.outputs.db-locations).swift }}
|
||||
|
||||
1
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
1
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Packaging: Config and input passed to the CLI'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
1
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Packaging: Config and input'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__packaging-config-js.yml
generated
vendored
1
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Packaging: Config file'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__packaging-inputs-js.yml
generated
vendored
1
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -7,6 +7,7 @@ name: 'PR Check - Packaging: Action input'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__remote-config.yml
generated
vendored
1
.github/workflows/__remote-config.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Remote config file
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__rubocop-multi-language.yml
generated
vendored
1
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - RuboCop multi-language
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__ruby.yml
generated
vendored
1
.github/workflows/__ruby.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Ruby analysis
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__split-workflow.yml
generated
vendored
1
.github/workflows/__split-workflow.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Split workflow
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
73
.github/workflows/__submit-sarif-failure.yml
generated
vendored
Normal file
73
.github/workflows/__submit-sarif-failure.yml
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Submit SARIF after failure
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
submit-sarif-failure:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Submit SARIF after failure
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./init
|
||||
with:
|
||||
languages: javascript
|
||||
- name: Fail
|
||||
# We want this job to pass if the Action correctly uploads the SARIF file for
|
||||
# the failed run.
|
||||
# Setting this step to continue on error means that it is marked as completing
|
||||
# successfully, so will not fail the job.
|
||||
continue-on-error: true
|
||||
run: exit 1
|
||||
- uses: ./analyze
|
||||
# In a real workflow, this step wouldn't run. Since we used `continue-on-error`
|
||||
# above, we manually disable it with an `if` condition.
|
||||
if: false
|
||||
with:
|
||||
category: /test-codeql-version:${{ matrix.version }}
|
||||
env:
|
||||
# Internal-only environment variable used to indicate that the post-init Action
|
||||
# should expect to upload a SARIF file for the failed run.
|
||||
CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF: true
|
||||
# Make sure the uploading SARIF files feature is enabled.
|
||||
CODEQL_ACTION_UPLOAD_FAILED_SARIF: true
|
||||
# Upload the failed SARIF file as an integration test of the API endpoint.
|
||||
CODEQL_ACTION_TEST_MODE: false
|
||||
# Mark telemetry for this workflow so it can be treated separately.
|
||||
CODEQL_ACTION_TESTING_ENVIRONMENT: codeql-action-pr-checks
|
||||
|
||||
13
.github/workflows/__swift-autobuild.yml
generated
vendored
13
.github/workflows/__swift-autobuild.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Swift analysis using autobuild
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -42,13 +43,17 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
languages: swift
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/.github/setup-swift
|
||||
with:
|
||||
codeql-path: ${{steps.init.outputs.codeql-path}}
|
||||
- name: Check working directory
|
||||
shell: bash
|
||||
run: pwd
|
||||
- uses: ./../action/autobuild
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
@@ -61,5 +66,5 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: 'true'
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: 'true' # Remove when Swift is GA.
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
13
.github/workflows/__swift-custom-build.yml
generated
vendored
13
.github/workflows/__swift-custom-build.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Swift analysis using a custom build command
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -48,13 +49,17 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
languages: swift
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/.github/setup-swift
|
||||
with:
|
||||
codeql-path: ${{steps.init.outputs.codeql-path}}
|
||||
- name: Check working directory
|
||||
shell: bash
|
||||
run: pwd
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
@@ -69,6 +74,6 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: 'true'
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: 'true' # Remove when Swift is GA.
|
||||
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
1
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
1
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Autobuild working directory
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__test-local-codeql.yml
generated
vendored
1
.github/workflows/__test-local-codeql.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Local CodeQL bundle
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__test-proxy.yml
generated
vendored
1
.github/workflows/__test-proxy.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Proxy test
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
6
.github/workflows/__unset-environment.yml
generated
vendored
6
.github/workflows/__unset-environment.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Test unsetting environment variables
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -59,7 +60,10 @@ jobs:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
|
||||
# Disable Kotlin analysis while it's incompatible with Kotlin 1.8, until we find a
|
||||
# workaround for our PR checks.
|
||||
run: env -i CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN=true PATH="$PATH" HOME="$HOME"
|
||||
./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
- shell: bash
|
||||
|
||||
1
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
1
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -7,6 +7,7 @@ name: "PR Check - Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
1
.github/workflows/__with-checkout-path.yml
generated
vendored
1
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -7,6 +7,7 @@ name: PR Check - Use a custom `checkout_path`
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
14
.github/workflows/codeql.yml
vendored
14
.github/workflows/codeql.yml
vendored
@@ -8,6 +8,9 @@ on:
|
||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||
# by other workflows.
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
schedule:
|
||||
# Weekly on Sunday.
|
||||
- cron: '30 1 * * 0'
|
||||
|
||||
env:
|
||||
CODEQL_ACTION_TESTING_ENVIRONMENT: codeql-action-pr-checks
|
||||
@@ -54,7 +57,7 @@ jobs:
|
||||
# be the same as `tools: null`. This allows us to make the job for each of the bundles a
|
||||
# required status check.
|
||||
#
|
||||
# If we're running on push, then we can skip running with `tools: latest` when it would be
|
||||
# If we're running on push or schedule, then we can skip running with `tools: latest` when it would be
|
||||
# the same as running with `tools: null`.
|
||||
if [[ "$GITHUB_EVENT_NAME" != "pull_request" && "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then
|
||||
VERSIONS_JSON='[null]'
|
||||
@@ -78,8 +81,10 @@ jobs:
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./init
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Initialize CodeQL
|
||||
uses: ./init
|
||||
id: init
|
||||
with:
|
||||
languages: javascript
|
||||
@@ -88,4 +93,5 @@ jobs:
|
||||
# confirm steps.init.outputs.codeql-path points to the codeql binary
|
||||
- name: Print CodeQL Version
|
||||
run: ${{steps.init.outputs.codeql-path}} version --format=json
|
||||
- uses: ./analyze
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: ./analyze
|
||||
|
||||
@@ -2,6 +2,9 @@
|
||||
# when the analyze step fails.
|
||||
name: PR Check - Debug artifacts after failure
|
||||
env:
|
||||
# Disable Kotlin analysis while it's incompatible with Kotlin 1.8, until we find a
|
||||
# workaround for our PR checks.
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: true
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
on:
|
||||
push:
|
||||
|
||||
3
.github/workflows/debug-artifacts.yml
vendored
3
.github/workflows/debug-artifacts.yml
vendored
@@ -1,6 +1,9 @@
|
||||
# Checks logs, SARIF, and database bundle debug artifacts exist.
|
||||
name: PR Check - Debug artifact upload
|
||||
env:
|
||||
# Disable Kotlin analysis while it's incompatible with Kotlin 1.8, until we find a
|
||||
# workaround for our PR checks.
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: true
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
on:
|
||||
push:
|
||||
|
||||
2
.github/workflows/pr-checks.yml
vendored
2
.github/workflows/pr-checks.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
|
||||
23
.github/workflows/python-deps.yml
vendored
23
.github/workflows/python-deps.yml
vendored
@@ -28,17 +28,7 @@ jobs:
|
||||
matrix:
|
||||
os: [ubuntu-20.04, ubuntu-22.04, macos-latest]
|
||||
python_deps_type: [pipenv, poetry, requirements, setup_py]
|
||||
python_version: [2, 3]
|
||||
exclude:
|
||||
# Python2 and poetry are not supported. See https://github.com/actions/setup-python/issues/374
|
||||
- python_version: 2
|
||||
python_deps_type: poetry
|
||||
# Python2 and pipenv are not supported since pipenv v2021.11.5
|
||||
- python_version: 2
|
||||
python_deps_type: pipenv
|
||||
# Python2 is not available on ubuntu-22.04 by default -- see https://github.com/github/codeql-action/pull/1257
|
||||
- python_version: 2
|
||||
os: ubuntu-22.04
|
||||
python_version: [3]
|
||||
|
||||
|
||||
env:
|
||||
@@ -138,14 +128,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python_deps_type: [pipenv, poetry, requirements, setup_py]
|
||||
python_version: [2, 3]
|
||||
exclude:
|
||||
# Python2 and poetry are not supported. See https://github.com/actions/setup-python/issues/374
|
||||
- python_version: 2
|
||||
python_deps_type: poetry
|
||||
# Python2 and pipenv are not supported since pipenv v2021.11.5
|
||||
- python_version: 2
|
||||
python_deps_type: pipenv
|
||||
python_version: [3]
|
||||
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
@@ -156,7 +139,7 @@ jobs:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python_version }}
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ fi
|
||||
|
||||
if [ "$#" -eq 1 ]; then
|
||||
# If we were passed an argument, use that as the SHA
|
||||
GITHUB_SHA="$0"
|
||||
GITHUB_SHA="$1"
|
||||
elif [ "$#" -gt 1 ]; then
|
||||
echo "Usage: $0 [SHA]"
|
||||
echo "Update the required checks based on the SHA, or main."
|
||||
@@ -23,7 +23,7 @@ fi
|
||||
echo "Getting checks for $GITHUB_SHA"
|
||||
|
||||
# Ignore any checks with "https://", CodeQL, LGTM, and Update checks.
|
||||
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or contains("Update") or contains("update") or contains("test-setup-python-scripts") | not)] | unique | sort')"
|
||||
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or . == "check-expected-release-files" or contains("Update") or contains("update") or contains("test-setup-python-scripts") | not)] | unique | sort')"
|
||||
|
||||
echo "$CHECKS" | jq
|
||||
|
||||
|
||||
2
.github/workflows/update-release-branch.yml
vendored
2
.github/workflows/update-release-branch.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.7"
|
||||
- name: Checkout CodeQL Action
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
env:
|
||||
ENTERPRISE_RELEASES_PATH: ${{ github.workspace }}/enterprise-releases/
|
||||
- name: Commit Changes
|
||||
uses: peter-evans/create-pull-request@c7f493a8000b8aeb17a1332e326ba76b57cb83eb # v3.4.1
|
||||
uses: peter-evans/create-pull-request@2b011faafdcbc9ceb11414d64d0573f37c774b04 # v4.2.3
|
||||
with:
|
||||
commit-message: Update supported GitHub Enterprise Server versions.
|
||||
title: Update supported GitHub Enterprise Server versions.
|
||||
|
||||
19
CHANGELOG.md
19
CHANGELOG.md
@@ -2,6 +2,25 @@
|
||||
|
||||
## [UNRELEASED]
|
||||
|
||||
- Update default CodeQL bundle version to 2.12.0. [#1466](https://github.com/github/codeql-action/pull/1466)
|
||||
|
||||
## 2.1.37 - 14 Dec 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.11.6. [#1433](https://github.com/github/codeql-action/pull/1433)
|
||||
|
||||
## 2.1.36 - 08 Dec 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.11.5. [#1412](https://github.com/github/codeql-action/pull/1412)
|
||||
- Add a step that tries to upload a SARIF file for the workflow run when that workflow run fails. This will help better surface failed code scanning workflow runs. [#1393](https://github.com/github/codeql-action/pull/1393)
|
||||
- Python automatic dependency installation will no longer consider dependecy code installed in venv as user-written, for projects using Poetry that specify `virtualenvs.in-project = true` in their `poetry.toml`. [#1419](https://github.com/github/codeql-action/pull/1419).
|
||||
|
||||
## 2.1.35 - 01 Dec 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 2.1.34 - 25 Nov 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.11.4. [#1391](https://github.com/github/codeql-action/pull/1391)
|
||||
- Fixed a bug where some the `init` action and the `analyze` action would have different sets of experimental feature flags enabled. [#1384](https://github.com/github/codeql-action/pull/1384)
|
||||
|
||||
## 2.1.33 - 16 Nov 2022
|
||||
|
||||
@@ -12,6 +12,7 @@ inputs:
|
||||
upload:
|
||||
description: Upload the SARIF file to Code Scanning
|
||||
required: false
|
||||
# If changing this, make sure to update workflow.ts accordingly.
|
||||
default: "true"
|
||||
cleanup-level:
|
||||
description: "Level of cleanup to perform on CodeQL databases at the end of the analyze step. This should either be 'none' to skip cleanup, or be a valid argument for the --mode flag of the CodeQL CLI command 'codeql database cleanup' as documented at https://codeql.github.com/docs/codeql-cli/manual/database-cleanup"
|
||||
@@ -44,6 +45,7 @@ inputs:
|
||||
checkout_path:
|
||||
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
|
||||
required: false
|
||||
# If changing this, make sure to update workflow.ts accordingly.
|
||||
default: ${{ github.workspace }}
|
||||
ref:
|
||||
description: "The ref where results will be uploaded. If not provided, the Action will use the GITHUB_REF environment variable. If provided, the sha input must be provided as well. This input is not available in pull requests from forks."
|
||||
|
||||
233
lib/actions-util.js
generated
233
lib/actions-util.js
generated
@@ -19,17 +19,17 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.workflowEventName = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.workflowEventName = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util_1 = require("./util");
|
||||
const workflow_1 = require("./workflow");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
/**
|
||||
@@ -145,225 +145,6 @@ const determineMergeBaseCommitOid = async function () {
|
||||
}
|
||||
};
|
||||
exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
|
||||
function isObject(o) {
|
||||
return o !== null && typeof o === "object";
|
||||
}
|
||||
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
|
||||
function escapeRegExp(string) {
|
||||
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
||||
}
|
||||
function patternToRegExp(value) {
|
||||
return new RegExp(`^${value
|
||||
.toString()
|
||||
.split(GLOB_PATTERN)
|
||||
.reduce(function (arr, cur) {
|
||||
if (cur === "**") {
|
||||
arr.push(".*?");
|
||||
}
|
||||
else if (cur === "*") {
|
||||
arr.push("[^/]*?");
|
||||
}
|
||||
else if (cur) {
|
||||
arr.push(escapeRegExp(cur));
|
||||
}
|
||||
return arr;
|
||||
}, [])
|
||||
.join("")}$`);
|
||||
}
|
||||
// this function should return true if patternA is a superset of patternB
|
||||
// e.g: * is a superset of main-* but main-* is not a superset of *.
|
||||
function patternIsSuperset(patternA, patternB) {
|
||||
return patternToRegExp(patternA).test(patternB);
|
||||
}
|
||||
exports.patternIsSuperset = patternIsSuperset;
|
||||
function branchesToArray(branches) {
|
||||
if (typeof branches === "string") {
|
||||
return [branches];
|
||||
}
|
||||
if (Array.isArray(branches)) {
|
||||
if (branches.length === 0) {
|
||||
return "**";
|
||||
}
|
||||
return branches;
|
||||
}
|
||||
return "**";
|
||||
}
|
||||
function toCodedErrors(errors) {
|
||||
return Object.entries(errors).reduce((acc, [key, value]) => {
|
||||
acc[key] = { message: value, code: key };
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
// code to send back via status report
|
||||
// message to add as a warning annotation to the run
|
||||
exports.WorkflowErrors = toCodedErrors({
|
||||
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
||||
});
|
||||
function getWorkflowErrors(doc) {
|
||||
var _a, _b, _c, _d, _e;
|
||||
const errors = [];
|
||||
const jobName = process.env.GITHUB_JOB;
|
||||
if (jobName) {
|
||||
const job = (_a = doc === null || doc === void 0 ? void 0 : doc.jobs) === null || _a === void 0 ? void 0 : _a[jobName];
|
||||
const steps = job === null || job === void 0 ? void 0 : job.steps;
|
||||
if (Array.isArray(steps)) {
|
||||
for (const step of steps) {
|
||||
// this was advice that we used to give in the README
|
||||
// we actually want to run the analysis on the merge commit
|
||||
// to produce results that are more inline with expectations
|
||||
// (i.e: this is what will happen if you merge this PR)
|
||||
// and avoid some race conditions
|
||||
if ((step === null || step === void 0 ? void 0 : step.run) === "git checkout HEAD^2") {
|
||||
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let missingPush = false;
|
||||
if (doc.on === undefined) {
|
||||
// this is not a valid config
|
||||
}
|
||||
else if (typeof doc.on === "string") {
|
||||
if (doc.on === "pull_request") {
|
||||
missingPush = true;
|
||||
}
|
||||
}
|
||||
else if (Array.isArray(doc.on)) {
|
||||
const hasPush = doc.on.includes("push");
|
||||
const hasPullRequest = doc.on.includes("pull_request");
|
||||
if (hasPullRequest && !hasPush) {
|
||||
missingPush = true;
|
||||
}
|
||||
}
|
||||
else if (isObject(doc.on)) {
|
||||
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
||||
const hasPullRequest = Object.prototype.hasOwnProperty.call(doc.on, "pull_request");
|
||||
if (!hasPush && hasPullRequest) {
|
||||
missingPush = true;
|
||||
}
|
||||
if (hasPush && hasPullRequest) {
|
||||
const paths = (_b = doc.on.push) === null || _b === void 0 ? void 0 : _b.paths;
|
||||
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
||||
// if they didn't change any files
|
||||
// currently we cannot go back through the history and find the most recent baseline
|
||||
if (Array.isArray(paths) && paths.length > 0) {
|
||||
errors.push(exports.WorkflowErrors.PathsSpecified);
|
||||
}
|
||||
const pathsIgnore = (_c = doc.on.push) === null || _c === void 0 ? void 0 : _c["paths-ignore"];
|
||||
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
||||
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
|
||||
}
|
||||
}
|
||||
// if doc.on.pull_request is null that means 'all branches'
|
||||
// if doc.on.pull_request is undefined that means 'off'
|
||||
// we only want to check for mismatched branches if pull_request is on.
|
||||
if (doc.on.pull_request !== undefined) {
|
||||
const push = branchesToArray((_d = doc.on.push) === null || _d === void 0 ? void 0 : _d.branches);
|
||||
if (push !== "**") {
|
||||
const pull_request = branchesToArray((_e = doc.on.pull_request) === null || _e === void 0 ? void 0 : _e.branches);
|
||||
if (pull_request !== "**") {
|
||||
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
|
||||
if (difference.length > 0) {
|
||||
// there are branches in pull_request that may not have a baseline
|
||||
// because we are not building them on push
|
||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
||||
}
|
||||
}
|
||||
else if (push.length > 0) {
|
||||
// push is set up to run on a subset of branches
|
||||
// and you could open a PR against a branch with no baseline
|
||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (missingPush) {
|
||||
errors.push(exports.WorkflowErrors.MissingPushHook);
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
exports.getWorkflowErrors = getWorkflowErrors;
|
||||
async function validateWorkflow() {
|
||||
let workflow;
|
||||
try {
|
||||
workflow = await getWorkflow();
|
||||
}
|
||||
catch (e) {
|
||||
return `error: getWorkflow() failed: ${String(e)}`;
|
||||
}
|
||||
let workflowErrors;
|
||||
try {
|
||||
workflowErrors = getWorkflowErrors(workflow);
|
||||
}
|
||||
catch (e) {
|
||||
return `error: getWorkflowErrors() failed: ${String(e)}`;
|
||||
}
|
||||
if (workflowErrors.length > 0) {
|
||||
let message;
|
||||
try {
|
||||
message = formatWorkflowErrors(workflowErrors);
|
||||
}
|
||||
catch (e) {
|
||||
return `error: formatWorkflowErrors() failed: ${String(e)}`;
|
||||
}
|
||||
core.warning(message);
|
||||
}
|
||||
return formatWorkflowCause(workflowErrors);
|
||||
}
|
||||
exports.validateWorkflow = validateWorkflow;
|
||||
function formatWorkflowErrors(errors) {
|
||||
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
||||
const errorsList = errors.map((e) => e.message).join(" ");
|
||||
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
|
||||
}
|
||||
exports.formatWorkflowErrors = formatWorkflowErrors;
|
||||
function formatWorkflowCause(errors) {
|
||||
if (errors.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
return errors.map((e) => e.code).join(",");
|
||||
}
|
||||
exports.formatWorkflowCause = formatWorkflowCause;
|
||||
async function getWorkflow() {
|
||||
const relativePath = await getWorkflowPath();
|
||||
const absolutePath = path.join((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), relativePath);
|
||||
return yaml.load(fs.readFileSync(absolutePath, "utf-8"));
|
||||
}
|
||||
exports.getWorkflow = getWorkflow;
|
||||
/**
|
||||
* Get the path of the currently executing workflow.
|
||||
*/
|
||||
async function getWorkflowPath() {
|
||||
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
|
||||
const owner = repo_nwo[0];
|
||||
const repo = repo_nwo[1];
|
||||
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
||||
const apiClient = api.getApiClient();
|
||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
||||
owner,
|
||||
repo,
|
||||
run_id,
|
||||
});
|
||||
const workflowUrl = runsResponse.data.workflow_url;
|
||||
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
||||
return workflowResponse.data.path;
|
||||
}
|
||||
/**
|
||||
* Get the workflow run ID.
|
||||
*/
|
||||
function getWorkflowRunID() {
|
||||
const workflowRunID = parseInt((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"), 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
||||
}
|
||||
return workflowRunID;
|
||||
}
|
||||
exports.getWorkflowRunID = getWorkflowRunID;
|
||||
/**
|
||||
* Get the analysis key parameter for the current job.
|
||||
*
|
||||
@@ -377,7 +158,7 @@ async function getAnalysisKey() {
|
||||
if (analysisKey !== undefined) {
|
||||
return analysisKey;
|
||||
}
|
||||
const workflowPath = await getWorkflowPath();
|
||||
const workflowPath = await (0, workflow_1.getWorkflowPath)();
|
||||
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
||||
analysisKey = `${workflowPath}:${jobName}`;
|
||||
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
||||
@@ -392,10 +173,10 @@ async function getAutomationID() {
|
||||
exports.getAutomationID = getAutomationID;
|
||||
function computeAutomationID(analysis_key, environment) {
|
||||
let automationID = `${analysis_key}/`;
|
||||
// the id has to be deterministic so we sort the fields
|
||||
if (environment !== undefined && environment !== "null") {
|
||||
const environmentObject = JSON.parse(environment);
|
||||
for (const entry of Object.entries(environmentObject).sort()) {
|
||||
const matrix = (0, util_1.parseMatrixInput)(environment);
|
||||
if (matrix !== undefined) {
|
||||
// the id has to be deterministic so we sort the fields
|
||||
for (const entry of Object.entries(matrix).sort()) {
|
||||
if (typeof entry[1] === "string") {
|
||||
automationID += `${entry[0]}:${entry[1]}/`;
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
355
lib/actions-util.test.js
generated
355
lib/actions-util.test.js
generated
@@ -25,14 +25,10 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsutil = __importStar(require("./actions-util"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
function errorCodes(actual, expected) {
|
||||
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
||||
}
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("getRef() throws on the empty string", async (t) => {
|
||||
process.env["GITHUB_REF"] = "";
|
||||
@@ -98,6 +94,30 @@ function errorCodes(actual, expected) {
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns CODE_SCANNING_REF as a fallback for GITHUB_REF", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/HEAD";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["CODE_SCANNING_REF"] = expectedRef;
|
||||
process.env["GITHUB_REF"] = "";
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns GITHUB_REF over CODE_SCANNING_REF if both are provided", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["CODE_SCANNING_REF"] = "refs/pull/1/HEAD";
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
@@ -143,333 +163,6 @@ function errorCodes(actual, expected) {
|
||||
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", undefined);
|
||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on is empty", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is valid", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: ["push", "pull_request"],
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: ["push", "pull_request", "schedule"],
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push should not have a path", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main"], paths: ["test/*"] },
|
||||
pull_request: { branches: ["main"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is a correct object", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main"] },
|
||||
pull_request: { branches: ["feature"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main", "feature"] },
|
||||
pull_request: { branches: ["main"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main"] },
|
||||
pull_request: { branches: ["main", "feature"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: 1,
|
||||
pull_request: 1,
|
||||
},
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: 1,
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: [1],
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { 1: 1 },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: 1 },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: [1] },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: { steps: 1 } },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: [undefined] },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: {
|
||||
branches: 1,
|
||||
},
|
||||
pull_request: {
|
||||
branches: 1,
|
||||
},
|
||||
},
|
||||
}), []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["feature/*"] },
|
||||
pull_request: { branches: "feature/moose" },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["feature/moose"] },
|
||||
pull_request: { branches: "feature/*" },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
||||
process.env.GITHUB_JOB = "test";
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: ["push", "pull_request"],
|
||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowErrors() when there is one error", (t) => {
|
||||
const message = actionsutil.formatWorkflowErrors([
|
||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||
]);
|
||||
t.true(message.startsWith("1 issue was detected with this workflow:"));
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowErrors() when there are multiple errors", (t) => {
|
||||
const message = actionsutil.formatWorkflowErrors([
|
||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||
actionsutil.WorkflowErrors.PathsSpecified,
|
||||
]);
|
||||
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowCause() with no errors", (t) => {
|
||||
const message = actionsutil.formatWorkflowCause([]);
|
||||
t.deepEqual(message, undefined);
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowCause()", (t) => {
|
||||
const message = actionsutil.formatWorkflowCause([
|
||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||
actionsutil.WorkflowErrors.PathsSpecified,
|
||||
]);
|
||||
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
||||
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
|
||||
});
|
||||
(0, ava_1.default)("patternIsSuperset()", (t) => {
|
||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
||||
t.true(actionsutil.patternIsSuperset("*", "*"));
|
||||
t.true(actionsutil.patternIsSuperset("*", "main-*"));
|
||||
t.false(actionsutil.patternIsSuperset("main-*", "*"));
|
||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
||||
t.true(actionsutil.patternIsSuperset("main", "main"));
|
||||
t.false(actionsutil.patternIsSuperset("*", "feature/*"));
|
||||
t.true(actionsutil.patternIsSuperset("**", "feature/*"));
|
||||
t.false(actionsutil.patternIsSuperset("feature-*", "**"));
|
||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**/d"));
|
||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**"));
|
||||
t.true(actionsutil.patternIsSuperset("a/**", "a/**/c"));
|
||||
t.true(actionsutil.patternIsSuperset("a/**/c", "a/main-**/c"));
|
||||
t.false(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/d/**/c"));
|
||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/c/**/c"));
|
||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/d/**/c"));
|
||||
t.false(actionsutil.patternIsSuperset("a/**/c/d/**/c", "a/**/b/**/c"));
|
||||
t.false(actionsutil.patternIsSuperset("a/main-**/c", "a/**/c"));
|
||||
t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
|
||||
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when branches contain dots", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
on:
|
||||
push:
|
||||
branches: [4.1, master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [4.1, master]
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master, ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||
process.env.GITHUB_JOB = "test";
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
jobs:
|
||||
test:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test2:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test3:
|
||||
steps: []
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||
process.env.GITHUB_JOB = "test3";
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
jobs:
|
||||
test:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test2:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test3:
|
||||
steps: []
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on is missing", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() with a different on setup", (t) => {
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on: "workflow_dispatch"
|
||||
`)), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on: [workflow_dispatch]
|
||||
`)), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
workflow_dispatch: {}
|
||||
`)), []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
`)), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on: ["push"]
|
||||
`)), []));
|
||||
});
|
||||
(0, ava_1.default)("initializeEnvironment", (t) => {
|
||||
(0, util_1.initializeEnvironment)("1.2.3");
|
||||
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
||||
|
||||
File diff suppressed because one or more lines are too long
1
lib/analyze-action-env.test.js
generated
1
lib/analyze-action-env.test.js
generated
@@ -45,6 +45,7 @@ const util = __importStar(require("./util"));
|
||||
.stub(actionsUtil, "createStatusReportBase")
|
||||
.resolves({});
|
||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAEnE,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
1
lib/analyze-action-input.test.js
generated
1
lib/analyze-action-input.test.js
generated
@@ -61,6 +61,7 @@ const util = __importStar(require("./util"));
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
optionalInputStub.withArgs("expect-error").returns("false");
|
||||
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
process.env["CODEQL_THREADS"] = "1";
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnE,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
12
lib/analyze-action.js
generated
12
lib/analyze-action.js
generated
@@ -39,6 +39,7 @@ const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const shared_environment_1 = require("./shared-environment");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
@@ -103,6 +104,8 @@ function doesGoExtractionOutputExist(config) {
|
||||
* - We detect whether an autobuild step is present by checking the
|
||||
* `util.DID_AUTOBUILD_GO_ENV_VAR_NAME` environment variable, which is set
|
||||
* when the autobuilder is invoked.
|
||||
* - We detect whether the Go database has already been finalized in case it
|
||||
* has been manually set in a prior Action step.
|
||||
* - We approximate whether manual build steps are present by looking at
|
||||
* whether any extraction output already exists for Go.
|
||||
*/
|
||||
@@ -114,6 +117,10 @@ async function runAutobuildIfLegacyGoWorkflow(config, logger) {
|
||||
logger.debug("Won't run Go autobuild since it has already been run.");
|
||||
return;
|
||||
}
|
||||
if ((0, analyze_1.dbIsFinalized)(config, languages_1.Language.go, logger)) {
|
||||
logger.debug("Won't run Go autobuild since there is already a finalized database for Go.");
|
||||
return;
|
||||
}
|
||||
// This captures whether a user has added manual build steps for Go
|
||||
if (doesGoExtractionOutputExist(config)) {
|
||||
logger.debug("Won't run Go autobuild since at least one file of Go code has already been extracted.");
|
||||
@@ -170,7 +177,7 @@ async function run() {
|
||||
}
|
||||
core.setOutput("db-locations", dbLocations);
|
||||
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
|
||||
uploadResult = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, logger);
|
||||
uploadResult = await upload_lib.uploadFromActions(outputDir, actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getOptionalInput("category"), logger);
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
}
|
||||
else {
|
||||
@@ -195,6 +202,7 @@ async function run() {
|
||||
if (actionsUtil.getOptionalInput("expect-error") === "true") {
|
||||
core.setFailed(`expect-error input was set to true but no error was thrown.`);
|
||||
}
|
||||
core.exportVariable(shared_environment_1.CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY, "true");
|
||||
}
|
||||
catch (origError) {
|
||||
const error = origError instanceof Error ? origError : new Error(String(origError));
|
||||
@@ -202,7 +210,6 @@ async function run() {
|
||||
hasBadExpectErrorInput()) {
|
||||
core.setFailed(error.message);
|
||||
}
|
||||
console.log(error);
|
||||
if (error instanceof analyze_1.CodeQLAnalysisError) {
|
||||
const stats = { ...error.queriesStatusReport };
|
||||
await sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
|
||||
@@ -232,7 +239,6 @@ async function runWrapper() {
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`analyze action failed: ${error}`);
|
||||
console.log(error);
|
||||
}
|
||||
await (0, util_1.checkForTimeout)();
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
3
lib/analyze.js
generated
3
lib/analyze.js
generated
@@ -159,6 +159,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
logger.info(analysisSummary);
|
||||
}
|
||||
else {
|
||||
// config was generated by the action, so must be interpreted by the action.
|
||||
logger.startGroup(`Running queries for ${language}`);
|
||||
const querySuitePaths = [];
|
||||
if (queries["builtin"].length > 0) {
|
||||
@@ -207,7 +208,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
return statusReport;
|
||||
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId, featureEnablement);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId);
|
||||
}
|
||||
async function runPrintLinesOfCode(language) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
|
||||
File diff suppressed because one or more lines are too long
272
lib/codeql.js
generated
272
lib/codeql.js
generated
@@ -18,30 +18,19 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CODEQL_VERSION_CONFIG_FILES = exports.CODEQL_DEFAULT_ACTION_REPOSITORY = exports.CommandInvocationError = void 0;
|
||||
exports.getExtraOptions = exports.getCodeQLForCmd = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.setupCodeQL = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CommandInvocationError = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const query_string_1 = __importDefault(require("query-string"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const uuid_1 = require("uuid");
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api = __importStar(require("./api-client"));
|
||||
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
||||
const error_matcher_1 = require("./error-matcher");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const setupCodeql = __importStar(require("./setup-codeql"));
|
||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
class CommandInvocationError extends Error {
|
||||
constructor(cmd, args, exitCode, error, output) {
|
||||
super(`Failure invoking ${cmd} with arguments ${args}.\n
|
||||
@@ -56,8 +45,6 @@ exports.CommandInvocationError = CommandInvocationError;
|
||||
* Can be overridden in tests using `setCodeQL`.
|
||||
*/
|
||||
let cachedCodeQL = undefined;
|
||||
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
|
||||
exports.CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||
/**
|
||||
* The oldest version of CodeQL that the Action will run with. This should be
|
||||
* at least three minor versions behind the current version and must include the
|
||||
@@ -74,9 +61,9 @@ const CODEQL_MINIMUM_VERSION = "2.6.3";
|
||||
*/
|
||||
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
|
||||
const CODEQL_VERSION_LUA_TRACER_CONFIG = "2.10.0";
|
||||
exports.CODEQL_VERSION_CONFIG_FILES = "2.10.1";
|
||||
const CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED = "2.10.4";
|
||||
exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
|
||||
const CODEQL_VERSION_FILE_BASELINE_INFORMATION = "2.11.3";
|
||||
/**
|
||||
* This variable controls using the new style of tracing from the CodeQL
|
||||
* CLI. In particular, with versions above this we will use both indirect
|
||||
@@ -103,197 +90,23 @@ exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = "2.9.0";
|
||||
* --extractor-options-verbosity that we need.
|
||||
*/
|
||||
exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = "2.10.3";
|
||||
function getCodeQLBundleName() {
|
||||
let platform;
|
||||
if (process.platform === "win32") {
|
||||
platform = "win64";
|
||||
}
|
||||
else if (process.platform === "linux") {
|
||||
platform = "linux64";
|
||||
}
|
||||
else if (process.platform === "darwin") {
|
||||
platform = "osx64";
|
||||
}
|
||||
else {
|
||||
return "codeql-bundle.tar.gz";
|
||||
}
|
||||
return `codeql-bundle-${platform}.tar.gz`;
|
||||
}
|
||||
function getCodeQLActionRepository(logger) {
|
||||
if (process.env["GITHUB_ACTION_REPOSITORY"] !== undefined) {
|
||||
return process.env["GITHUB_ACTION_REPOSITORY"];
|
||||
}
|
||||
// The Actions Runner used with GitHub Enterprise Server 2.22 did not set the GITHUB_ACTION_REPOSITORY variable.
|
||||
// This fallback logic can be removed after the end-of-support for 2.22 on 2021-09-23.
|
||||
if ((0, actions_util_1.isRunningLocalAction)()) {
|
||||
// This handles the case where the Action does not come from an Action repository,
|
||||
// e.g. our integration tests which use the Action code from the current checkout.
|
||||
logger.info("The CodeQL Action is checked out locally. Using the default CodeQL Action repository.");
|
||||
return exports.CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||
}
|
||||
logger.info("GITHUB_ACTION_REPOSITORY environment variable was not set. Falling back to legacy method of finding the GitHub Action.");
|
||||
const relativeScriptPathParts = (0, actions_util_1.getRelativeScriptPath)().split(path.sep);
|
||||
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
|
||||
}
|
||||
exports.getCodeQLActionRepository = getCodeQLActionRepository;
|
||||
async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
|
||||
const codeQLActionRepository = getCodeQLActionRepository(logger);
|
||||
const potentialDownloadSources = [
|
||||
// This GitHub instance, and this Action.
|
||||
[apiDetails.url, codeQLActionRepository],
|
||||
// This GitHub instance, and the canonical Action.
|
||||
[apiDetails.url, exports.CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
// GitHub.com, and the canonical Action.
|
||||
[util.GITHUB_DOTCOM_URL, exports.CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
];
|
||||
// We now filter out any duplicates.
|
||||
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
|
||||
const uniqueDownloadSources = potentialDownloadSources.filter((source, index, self) => {
|
||||
return !self.slice(0, index).some((other) => (0, fast_deep_equal_1.default)(source, other));
|
||||
});
|
||||
const codeQLBundleName = getCodeQLBundleName();
|
||||
if (variant === util.GitHubVariant.GHAE) {
|
||||
try {
|
||||
const release = await api
|
||||
.getApiClient()
|
||||
.request("GET /enterprise/code-scanning/codeql-bundle/find/{tag}", {
|
||||
tag: CODEQL_BUNDLE_VERSION,
|
||||
});
|
||||
const assetID = release.data.assets[codeQLBundleName];
|
||||
if (assetID !== undefined) {
|
||||
const download = await api
|
||||
.getApiClient()
|
||||
.request("GET /enterprise/code-scanning/codeql-bundle/download/{asset_id}", { asset_id: assetID });
|
||||
const downloadURL = download.data.url;
|
||||
logger.info(`Found CodeQL bundle at GitHub AE endpoint with URL ${downloadURL}.`);
|
||||
return downloadURL;
|
||||
}
|
||||
else {
|
||||
logger.info(`Attempted to fetch bundle from GitHub AE endpoint but the bundle ${codeQLBundleName} was not found in the assets ${JSON.stringify(release.data.assets)}.`);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
logger.info(`Attempted to fetch bundle from GitHub AE endpoint but got error ${e}.`);
|
||||
}
|
||||
}
|
||||
for (const downloadSource of uniqueDownloadSources) {
|
||||
const [apiURL, repository] = downloadSource;
|
||||
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
|
||||
if (apiURL === util.GITHUB_DOTCOM_URL &&
|
||||
repository === exports.CODEQL_DEFAULT_ACTION_REPOSITORY) {
|
||||
break;
|
||||
}
|
||||
const [repositoryOwner, repositoryName] = repository.split("/");
|
||||
try {
|
||||
const release = await api.getApiClient().repos.getReleaseByTag({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
tag: CODEQL_BUNDLE_VERSION,
|
||||
});
|
||||
for (const asset of release.data.assets) {
|
||||
if (asset.name === codeQLBundleName) {
|
||||
logger.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
|
||||
return asset.url;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
logger.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
|
||||
}
|
||||
}
|
||||
return `https://github.com/${exports.CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
|
||||
}
|
||||
/**
|
||||
* Set up CodeQL CLI access.
|
||||
*
|
||||
* @param codeqlURL
|
||||
* @param toolsInput
|
||||
* @param apiDetails
|
||||
* @param tempDir
|
||||
* @param variant
|
||||
* @param features
|
||||
* @param bypassToolcache
|
||||
* @param defaultCliVersion
|
||||
* @param logger
|
||||
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
||||
* version requirement. Must be set to true outside tests.
|
||||
* @returns a { CodeQL, toolsVersion } object.
|
||||
*/
|
||||
async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, features, logger, checkVersion) {
|
||||
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, bypassToolcache, defaultCliVersion, logger, checkVersion) {
|
||||
try {
|
||||
const forceLatestReason =
|
||||
// We use the special value of 'latest' to prioritize the version in the
|
||||
// defaults over any pinned cached version.
|
||||
codeqlURL === "latest"
|
||||
? '"tools: latest" was requested'
|
||||
: // If the user hasn't requested a particular CodeQL version, then bypass
|
||||
// the toolcache when the appropriate feature is enabled. This
|
||||
// allows us to quickly rollback a broken bundle that has made its way
|
||||
// into the toolcache.
|
||||
codeqlURL === undefined &&
|
||||
(await features.getValue(feature_flags_1.Feature.BypassToolcacheEnabled))
|
||||
? "a specific version of CodeQL was not requested and the bypass toolcache feature is enabled"
|
||||
: undefined;
|
||||
const forceLatest = forceLatestReason !== undefined;
|
||||
if (forceLatest) {
|
||||
logger.debug(`Forcing the latest version of the CodeQL tools since ${forceLatestReason}.`);
|
||||
codeqlURL = undefined;
|
||||
}
|
||||
let codeqlFolder;
|
||||
let codeqlURLVersion;
|
||||
if (codeqlURL && !codeqlURL.startsWith("http")) {
|
||||
codeqlFolder = await toolcache.extractTar(codeqlURL);
|
||||
codeqlURLVersion = "local";
|
||||
}
|
||||
else {
|
||||
codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
|
||||
const codeqlURLSemVer = convertToSemVer(codeqlURLVersion, logger);
|
||||
// If we find the specified version, we always use that.
|
||||
codeqlFolder = toolcache.find("CodeQL", codeqlURLSemVer);
|
||||
// If we don't find the requested version, in some cases we may allow a
|
||||
// different version to save download time if the version hasn't been
|
||||
// specified explicitly (in which case we always honor it).
|
||||
if (!codeqlFolder && !codeqlURL && !forceLatest) {
|
||||
const codeqlVersions = toolcache.findAllVersions("CodeQL");
|
||||
if (codeqlVersions.length === 1 && (0, util_1.isGoodVersion)(codeqlVersions[0])) {
|
||||
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0]);
|
||||
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
|
||||
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
|
||||
codeqlFolder = tmpCodeqlFolder;
|
||||
codeqlURLVersion = codeqlVersions[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
if (codeqlFolder) {
|
||||
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
}
|
||||
else {
|
||||
if (!codeqlURL) {
|
||||
codeqlURL = await getCodeQLBundleDownloadURL(apiDetails, variant, logger);
|
||||
}
|
||||
const parsedCodeQLURL = new URL(codeqlURL);
|
||||
const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search);
|
||||
const headers = {
|
||||
accept: "application/octet-stream",
|
||||
};
|
||||
// We only want to provide an authorization header if we are downloading
|
||||
// from the same GitHub instance the Action is running on.
|
||||
// This avoids leaking Enterprise tokens to dotcom.
|
||||
// We also don't want to send an authorization header if there's already a token provided in the URL.
|
||||
if (codeqlURL.startsWith(`${apiDetails.url}/`) &&
|
||||
parsedQueryString["token"] === undefined) {
|
||||
logger.debug("Downloading CodeQL bundle with token.");
|
||||
headers.authorization = `token ${apiDetails.auth}`;
|
||||
}
|
||||
else {
|
||||
logger.debug("Downloading CodeQL bundle without token.");
|
||||
}
|
||||
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`);
|
||||
const dest = path.join(tempDir, (0, uuid_1.v4)());
|
||||
const finalHeaders = Object.assign({ "User-Agent": "CodeQL Action" }, headers);
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL, dest, undefined, finalHeaders);
|
||||
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLSemVer);
|
||||
}
|
||||
}
|
||||
const { codeqlFolder, toolsVersion } = await setupCodeql.setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, bypassToolcache, defaultCliVersion, logger);
|
||||
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
|
||||
if (process.platform === "win32") {
|
||||
codeqlCmd += ".exe";
|
||||
@@ -302,7 +115,7 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, features, lo
|
||||
throw new Error(`Unsupported platform: ${process.platform}`);
|
||||
}
|
||||
cachedCodeQL = await getCodeQLForCmd(codeqlCmd, checkVersion);
|
||||
return { codeql: cachedCodeQL, toolsVersion: codeqlURLVersion };
|
||||
return { codeql: cachedCodeQL, toolsVersion };
|
||||
}
|
||||
catch (e) {
|
||||
logger.error(e instanceof Error ? e : new Error(String(e)));
|
||||
@@ -310,26 +123,6 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, features, lo
|
||||
}
|
||||
}
|
||||
exports.setupCodeQL = setupCodeQL;
|
||||
function getCodeQLURLVersion(url) {
|
||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
||||
if (match === null || match.length < 2) {
|
||||
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
|
||||
}
|
||||
return match[1];
|
||||
}
|
||||
exports.getCodeQLURLVersion = getCodeQLURLVersion;
|
||||
function convertToSemVer(version, logger) {
|
||||
if (!semver.valid(version)) {
|
||||
logger.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||
version = `0.0.0-${version}`;
|
||||
}
|
||||
const s = semver.clean(version);
|
||||
if (!s) {
|
||||
throw new Error(`Bundle version ${version} is not in SemVer format.`);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
exports.convertToSemVer = convertToSemVer;
|
||||
/**
|
||||
* Use the CodeQL executable located at the given path.
|
||||
*/
|
||||
@@ -378,6 +171,7 @@ function setCodeQL(partialCodeql) {
|
||||
databaseRunQueries: resolveFunction(partialCodeql, "databaseRunQueries"),
|
||||
databaseInterpretResults: resolveFunction(partialCodeql, "databaseInterpretResults"),
|
||||
databasePrintBaseline: resolveFunction(partialCodeql, "databasePrintBaseline"),
|
||||
diagnosticsExport: resolveFunction(partialCodeql, "diagnosticsExport"),
|
||||
};
|
||||
return cachedCodeQL;
|
||||
}
|
||||
@@ -514,9 +308,16 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
extraArgs.push("--no-internal-use-lua-tracing");
|
||||
}
|
||||
}
|
||||
// A config file is only generated if the CliConfigFileEnabled feature flag is enabled.
|
||||
const configLocation = await generateCodeScanningConfig(codeql, config, featureEnablement);
|
||||
// Only pass external repository token if a config file is going to be parsed by the CLI.
|
||||
let externalRepositoryToken;
|
||||
if (configLocation) {
|
||||
extraArgs.push(`--codescanning-config=${configLocation}`);
|
||||
externalRepositoryToken = (0, actions_util_1.getOptionalInput)("external-repository-token");
|
||||
if (externalRepositoryToken) {
|
||||
extraArgs.push("--external-repository-token-stdin");
|
||||
}
|
||||
}
|
||||
await runTool(cmd, [
|
||||
"database",
|
||||
@@ -526,7 +327,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
`--source-root=${sourceRoot}`,
|
||||
...extraArgs,
|
||||
...getExtraOptionsFromEnv(["database", "init"]),
|
||||
]);
|
||||
], { stdin: externalRepositoryToken });
|
||||
},
|
||||
async runAutobuild(language) {
|
||||
const cmdName = process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh";
|
||||
@@ -673,9 +474,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
if (querySuitePath) {
|
||||
codeqlArgs.push(querySuitePath);
|
||||
}
|
||||
await runTool(cmd, codeqlArgs);
|
||||
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, codeqlArgs, error_matcher_1.errorMatchers);
|
||||
},
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId, featureEnablement) {
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId) {
|
||||
const codeqlArgs = [
|
||||
"database",
|
||||
"interpret-results",
|
||||
@@ -694,7 +495,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
if (automationDetailsId !== undefined) {
|
||||
codeqlArgs.push("--sarif-category", automationDetailsId);
|
||||
}
|
||||
if (await featureEnablement.getValue(feature_flags_1.Feature.FileBaselineInformationEnabled, this)) {
|
||||
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_FILE_BASELINE_INFORMATION)) {
|
||||
codeqlArgs.push("--sarif-add-baseline-file-info");
|
||||
}
|
||||
codeqlArgs.push(databasePath);
|
||||
@@ -702,7 +503,8 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
codeqlArgs.push(...querySuitePaths);
|
||||
}
|
||||
// capture stdout, which contains analysis summaries
|
||||
return await runTool(cmd, codeqlArgs);
|
||||
const returnState = await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, codeqlArgs, error_matcher_1.errorMatchers);
|
||||
return returnState.stdout;
|
||||
},
|
||||
async databasePrintBaseline(databasePath) {
|
||||
const codeqlArgs = [
|
||||
@@ -778,6 +580,19 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
];
|
||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
async diagnosticsExport(sarifFile, automationDetailsId) {
|
||||
const args = [
|
||||
"diagnostics",
|
||||
"export",
|
||||
"--format=sarif-latest",
|
||||
`--output=${sarifFile}`,
|
||||
...getExtraOptionsFromEnv(["diagnostics", "export"]),
|
||||
];
|
||||
if (automationDetailsId !== undefined) {
|
||||
args.push("--sarif-category", automationDetailsId);
|
||||
}
|
||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
};
|
||||
// To ensure that status reports include the CodeQL CLI version wherever
|
||||
// possible, we want to call getVersion(), which populates the version value
|
||||
@@ -793,6 +608,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
}
|
||||
return codeql;
|
||||
}
|
||||
exports.getCodeQLForCmd = getCodeQLForCmd;
|
||||
/**
|
||||
* Gets the options for `path` of `options` as an array of extra option strings.
|
||||
*/
|
||||
@@ -847,20 +663,26 @@ exports.getExtraOptions = getExtraOptions;
|
||||
* status reports on GitHub.com.
|
||||
*/
|
||||
const maxErrorSize = 20000;
|
||||
async function runTool(cmd, args = []) {
|
||||
async function runTool(cmd, args = [], opts = {}) {
|
||||
let output = "";
|
||||
let error = "";
|
||||
const exitCode = await new toolrunner.ToolRunner(cmd, args, {
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
output += data.toString();
|
||||
output += data.toString("utf8");
|
||||
},
|
||||
stderr: (data) => {
|
||||
const toRead = Math.min(maxErrorSize - error.length, data.length);
|
||||
error += data.toString("utf8", 0, toRead);
|
||||
let readStartIndex = 0;
|
||||
// If the error is too large, then we only take the last 20,000 characters
|
||||
if (data.length - maxErrorSize > 0) {
|
||||
// Eg: if we have 20,000 the start index should be 2.
|
||||
readStartIndex = data.length - maxErrorSize + 1;
|
||||
}
|
||||
error += data.toString("utf8", readStartIndex);
|
||||
},
|
||||
},
|
||||
ignoreReturnCode: true,
|
||||
...(opts.stdin ? { input: Buffer.from(opts.stdin || "") } : {}),
|
||||
}).exec();
|
||||
if (exitCode !== 0)
|
||||
throw new CommandInvocationError(cmd, args, exitCode, error, output);
|
||||
|
||||
File diff suppressed because one or more lines are too long
343
lib/codeql.test.js
generated
343
lib/codeql.test.js
generated
@@ -24,16 +24,19 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.stubToolRunnerConstructor = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const nock_1 = __importDefault(require("nock"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const codeql = __importStar(require("./codeql"));
|
||||
const defaults = __importStar(require("./defaults.json"));
|
||||
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
@@ -44,14 +47,16 @@ const util_1 = require("./util");
|
||||
const sampleApiDetails = {
|
||||
auth: "token",
|
||||
url: "https://github.com",
|
||||
apiURL: undefined,
|
||||
registriesAuthTokens: undefined,
|
||||
apiURL: "https://api.github.com",
|
||||
};
|
||||
const sampleGHAEApiDetails = {
|
||||
auth: "token",
|
||||
url: "https://example.githubenterprise.com",
|
||||
apiURL: undefined,
|
||||
registriesAuthTokens: undefined,
|
||||
apiURL: "https://example.githubenterprise.com/api/v3",
|
||||
};
|
||||
const SAMPLE_DEFAULT_CLI_VERSION = {
|
||||
cliVersion: "2.0.0",
|
||||
variant: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
let stubConfig;
|
||||
ava_1.default.beforeEach(() => {
|
||||
@@ -81,7 +86,13 @@ ava_1.default.beforeEach(() => {
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
});
|
||||
async function mockApiAndSetupCodeQL({ apiDetails, featureEnablement, isPinned, tmpDir, toolsInput, version, }) {
|
||||
/**
|
||||
* Mocks the API for downloading the bundle tagged `tagName`.
|
||||
*
|
||||
* @returns the download URL for the bundle. This can be passed to the tools parameter of
|
||||
* `codeql.setupCodeQL`.
|
||||
*/
|
||||
async function mockDownloadApi({ apiDetails = sampleApiDetails, isPinned, tagName, }) {
|
||||
var _a;
|
||||
const platform = process.platform === "win32"
|
||||
? "win64"
|
||||
@@ -90,133 +101,148 @@ async function mockApiAndSetupCodeQL({ apiDetails, featureEnablement, isPinned,
|
||||
: "osx64";
|
||||
const baseUrl = (_a = apiDetails === null || apiDetails === void 0 ? void 0 : apiDetails.url) !== null && _a !== void 0 ? _a : "https://example.com";
|
||||
const relativeUrl = apiDetails
|
||||
? `/github/codeql-action/releases/download/${version}/codeql-bundle-${platform}.tar.gz`
|
||||
: `/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||
? `/github/codeql-action/releases/download/${tagName}/codeql-bundle-${platform}.tar.gz`
|
||||
: `/download/${tagName}/codeql-bundle.tar.gz`;
|
||||
(0, nock_1.default)(baseUrl)
|
||||
.get(relativeUrl)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle${isPinned ? "-pinned" : ""}.tar.gz`));
|
||||
return await codeql.setupCodeQL(toolsInput ? toolsInput.input : `${baseUrl}${relativeUrl}`, apiDetails !== null && apiDetails !== void 0 ? apiDetails : sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, featureEnablement !== null && featureEnablement !== void 0 ? featureEnablement : (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
.replyWithFile(200, path_1.default.join(__dirname, `/../src/testdata/codeql-bundle${isPinned ? "-pinned" : ""}.tar.gz`));
|
||||
return `${baseUrl}${relativeUrl}`;
|
||||
}
|
||||
(0, ava_1.default)("download codeql bundle cache", async (t) => {
|
||||
async function installIntoToolcache({ apiDetails = sampleApiDetails, cliVersion, isPinned, tagName, tmpDir, }) {
|
||||
const url = await mockDownloadApi({ apiDetails, isPinned, tagName });
|
||||
await codeql.setupCodeQL(cliVersion !== undefined ? undefined : url, apiDetails, tmpDir, util.GitHubVariant.GHES, false, cliVersion !== undefined
|
||||
? { cliVersion, tagName, variant: util.GitHubVariant.GHES }
|
||||
: SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||
}
|
||||
(0, ava_1.default)("downloads and caches explicitly requested bundles that aren't in the toolcache", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const versions = ["20200601", "20200610"];
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
const version = versions[i];
|
||||
const codeQLConfig = await mockApiAndSetupCodeQL({ version, tmpDir });
|
||||
const url = await mockDownloadApi({
|
||||
tagName: `codeql-bundle-${version}`,
|
||||
isPinned: false,
|
||||
});
|
||||
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
||||
t.deepEqual(codeQLConfig.toolsVersion, version);
|
||||
t.is(result.toolsVersion, `0.0.0-${version}`);
|
||||
}
|
||||
t.is(toolcache.findAllVersions("CodeQL").length, 2);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("download codeql bundle cache explicitly requested with pinned different version cached", async (t) => {
|
||||
(0, ava_1.default)("downloads an explicitly requested bundle even if a different version is cached", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const pinnedCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: "20200601",
|
||||
await installIntoToolcache({
|
||||
tagName: "codeql-bundle-20200601",
|
||||
isPinned: true,
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
t.deepEqual(pinnedCodeQLConfig.toolsVersion, "20200601");
|
||||
const unpinnedCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: "20200610",
|
||||
tmpDir,
|
||||
const url = await mockDownloadApi({
|
||||
tagName: "codeql-bundle-20200610",
|
||||
});
|
||||
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
||||
t.deepEqual(unpinnedCodeQLConfig.toolsVersion, "20200610");
|
||||
t.deepEqual(result.toolsVersion, "0.0.0-20200610");
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("don't download codeql bundle cache with pinned different version cached", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const pinnedCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: "20200601",
|
||||
isPinned: true,
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
t.deepEqual(pinnedCodeQLConfig.toolsVersion, "20200601");
|
||||
const codeQLConfig = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(codeQLConfig.toolsVersion, "0.0.0-20200601");
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 1);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("download codeql bundle cache with different version cached (not pinned)", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const cachedCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: "20200601",
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
t.deepEqual(cachedCodeQLConfig.toolsVersion, "20200601");
|
||||
const codeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: defaults.bundleVersion,
|
||||
tmpDir,
|
||||
apiDetails: sampleApiDetails,
|
||||
toolsInput: { input: undefined },
|
||||
});
|
||||
t.deepEqual(codeQLConfig.toolsVersion, defaults.bundleVersion.replace("codeql-bundle-", ""));
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)('download codeql bundle cache with pinned different version cached if "latest" tools specified', async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const pinnedCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: "20200601",
|
||||
isPinned: true,
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
t.deepEqual(pinnedCodeQLConfig.toolsVersion, "20200601");
|
||||
const latestCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: defaults.bundleVersion,
|
||||
apiDetails: sampleApiDetails,
|
||||
toolsInput: { input: "latest" },
|
||||
tmpDir,
|
||||
});
|
||||
t.deepEqual(latestCodeQLConfig.toolsVersion, defaults.bundleVersion.replace("codeql-bundle-", ""));
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
const TOOLCACHE_BYPASS_TEST_CASES = [
|
||||
[true, undefined, true],
|
||||
[false, undefined, false],
|
||||
[
|
||||
true,
|
||||
"https://github.com/github/codeql-action/releases/download/codeql-bundle-20200601/codeql-bundle.tar.gz",
|
||||
false,
|
||||
],
|
||||
];
|
||||
for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCACHE_BYPASS_TEST_CASES) {
|
||||
(0, ava_1.default)(`download codeql bundle ${shouldToolcacheBeBypassed ? "bypasses" : "does not bypass"} toolcache when feature ${isFeatureEnabled ? "enabled" : "disabled"} and tools: ${toolsInput} passed`, async (t) => {
|
||||
for (const isCached of [true, false]) {
|
||||
(0, ava_1.default)(`uses default version on Dotcom when default version bundle is ${isCached ? "" : "not "}cached`, async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
await mockApiAndSetupCodeQL({
|
||||
version: "codeql-bundle-20200601",
|
||||
apiDetails: sampleApiDetails,
|
||||
isPinned: true,
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
await mockApiAndSetupCodeQL({
|
||||
version: defaults.bundleVersion,
|
||||
apiDetails: sampleApiDetails,
|
||||
featureEnablement: (0, testing_utils_1.createFeatures)(isFeatureEnabled ? [feature_flags_1.Feature.BypassToolcacheEnabled] : []),
|
||||
toolsInput: { input: toolsInput },
|
||||
tmpDir,
|
||||
});
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, shouldToolcacheBeBypassed ? 2 : 1);
|
||||
const tagName = `codeql-bundle-20230101`;
|
||||
if (isCached) {
|
||||
await installIntoToolcache({
|
||||
cliVersion: SAMPLE_DEFAULT_CLI_VERSION.cliVersion,
|
||||
tagName,
|
||||
isPinned: true,
|
||||
tmpDir,
|
||||
});
|
||||
}
|
||||
else {
|
||||
await mockDownloadApi({
|
||||
tagName,
|
||||
});
|
||||
sinon.stub(api, "getApiClient").value(() => ({
|
||||
repos: {
|
||||
listReleases: sinon.stub().resolves(undefined),
|
||||
},
|
||||
paginate: sinon.stub().resolves([
|
||||
{
|
||||
assets: [
|
||||
{
|
||||
name: "cli-version-2.0.0.txt",
|
||||
},
|
||||
],
|
||||
tag_name: tagName,
|
||||
},
|
||||
]),
|
||||
}));
|
||||
}
|
||||
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.is(result.toolsVersion, SAMPLE_DEFAULT_CLI_VERSION.cliVersion);
|
||||
});
|
||||
});
|
||||
}
|
||||
for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
||||
(0, ava_1.default)(`uses a cached bundle when no tools input is given on ${util.GitHubVariant[variant]}`, async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
await installIntoToolcache({
|
||||
tagName: "codeql-bundle-20200601",
|
||||
isPinned: true,
|
||||
tmpDir,
|
||||
});
|
||||
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, variant, false, {
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
variant,
|
||||
}, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(result.toolsVersion, "0.0.0-20200601");
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 1);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)(`downloads bundle if only an unpinned version is cached on ${util.GitHubVariant[variant]}`, async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
await installIntoToolcache({
|
||||
tagName: "codeql-bundle-20200601",
|
||||
isPinned: false,
|
||||
tmpDir,
|
||||
});
|
||||
await mockDownloadApi({
|
||||
tagName: defaults.bundleVersion,
|
||||
});
|
||||
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, variant, false, {
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
variant,
|
||||
}, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
}
|
||||
(0, ava_1.default)('downloads bundle if "latest" tools specified but not cached', async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
await installIntoToolcache({
|
||||
tagName: "codeql-bundle-20200601",
|
||||
isPinned: true,
|
||||
tmpDir,
|
||||
});
|
||||
await mockDownloadApi({
|
||||
tagName: defaults.bundleVersion,
|
||||
});
|
||||
const result = await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("download codeql bundle from github ae endpoint", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
@@ -239,34 +265,33 @@ for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCAC
|
||||
});
|
||||
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, util.GitHubVariant.GHAE, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
.replyWithFile(200, path_1.default.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
// This is a workaround to mock `api.getApiDetails()` since it doesn't seem to be possible to
|
||||
// mock this directly. The difficulty is that `getApiDetails()` is called locally in
|
||||
// `api-client.ts`, but `sinon.stub(api, "getApiDetails")` only affects calls to
|
||||
// `getApiDetails()` via an imported `api` module.
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("token")
|
||||
.returns(sampleGHAEApiDetails.auth);
|
||||
const requiredEnvParamStub = sinon.stub(util, "getRequiredEnvParam");
|
||||
requiredEnvParamStub
|
||||
.withArgs("GITHUB_SERVER_URL")
|
||||
.returns(sampleGHAEApiDetails.url);
|
||||
requiredEnvParamStub
|
||||
.withArgs("GITHUB_API_URL")
|
||||
.returns(sampleGHAEApiDetails.apiURL);
|
||||
sinon.stub(actionsUtil, "isRunningLocalAction").returns(false);
|
||||
process.env["GITHUB_ACTION_REPOSITORY"] = "github/codeql-action";
|
||||
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, util.GitHubVariant.GHAE, false, {
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
variant: util.GitHubVariant.GHAE,
|
||||
}, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 1);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("parse codeql bundle url version", (t) => {
|
||||
t.deepEqual(codeql.getCodeQLURLVersion("https://github.com/.../codeql-bundle-20200601/..."), "20200601");
|
||||
});
|
||||
(0, ava_1.default)("convert to semver", (t) => {
|
||||
const tests = {
|
||||
"20200601": "0.0.0-20200601",
|
||||
"20200601.0": "0.0.0-20200601.0",
|
||||
"20200601.0.0": "20200601.0.0",
|
||||
"1.2.3": "1.2.3",
|
||||
"1.2.3-alpha": "1.2.3-alpha",
|
||||
"1.2.3-beta.1": "1.2.3-beta.1",
|
||||
};
|
||||
for (const [version, expectedVersion] of Object.entries(tests)) {
|
||||
try {
|
||||
const parsedVersion = codeql.convertToSemVer(version, (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(parsedVersion, expectedVersion);
|
||||
}
|
||||
catch (e) {
|
||||
t.fail(e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
}
|
||||
});
|
||||
(0, ava_1.default)("getExtraOptions works for explicit paths", (t) => {
|
||||
t.deepEqual(codeql.getExtraOptions({}, ["foo"], []), []);
|
||||
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ["foo"], []), ["42"]);
|
||||
@@ -289,30 +314,22 @@ for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCAC
|
||||
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ["foo"], []));
|
||||
t.throws(() => codeql.getExtraOptions({ "*": [42], foo: { "*": 87, bar: [99] } }, ["foo", "bar"], []));
|
||||
});
|
||||
(0, ava_1.default)("getCodeQLActionRepository", (t) => {
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
(0, util_1.initializeEnvironment)("1.2.3");
|
||||
// isRunningLocalAction() === true
|
||||
delete process.env["GITHUB_ACTION_REPOSITORY"];
|
||||
process.env["RUNNER_TEMP"] = path.dirname(__dirname);
|
||||
const repoLocalRunner = codeql.getCodeQLActionRepository(logger);
|
||||
t.deepEqual(repoLocalRunner, "github/codeql-action");
|
||||
process.env["GITHUB_ACTION_REPOSITORY"] = "xxx/yyy";
|
||||
const repoEnv = codeql.getCodeQLActionRepository(logger);
|
||||
t.deepEqual(repoEnv, "xxx/yyy");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-query-help for 2.7.0", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
|
||||
});
|
||||
(0, ava_1.default)("databaseInitCluster() without injected codescanning config", async (t) => {
|
||||
@@ -320,6 +337,8 @@ for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCAC
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.8.1");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
const thisStubConfig = {
|
||||
...stubConfig,
|
||||
tempDir,
|
||||
@@ -344,7 +363,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon
|
||||
.stub(codeqlObject, "getVersion")
|
||||
.resolves(codeql.CODEQL_VERSION_CONFIG_FILES);
|
||||
.resolves(feature_flags_1.featureConfig[feature_flags_1.Feature.CliConfigFileEnabled].minimumVersion);
|
||||
const thisStubConfig = {
|
||||
...stubConfig,
|
||||
...configOverride,
|
||||
@@ -374,7 +393,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
}, {}, {
|
||||
packs: ["codeql/javascript-experimental-atm-queries@~0.3.0"],
|
||||
packs: ["codeql/javascript-experimental-atm-queries@~0.4.0"],
|
||||
});
|
||||
(0, ava_1.default)("injected ML queries with existing packs", injectedConfigMacro, {
|
||||
injectedMlQueries: true,
|
||||
@@ -388,7 +407,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
packs: {
|
||||
javascript: [
|
||||
"codeql/something-else",
|
||||
"codeql/javascript-experimental-atm-queries@~0.3.0",
|
||||
"codeql/javascript-experimental-atm-queries@~0.4.0",
|
||||
],
|
||||
},
|
||||
});
|
||||
@@ -403,7 +422,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
}, {
|
||||
packs: {
|
||||
cpp: ["codeql/something-else"],
|
||||
javascript: ["codeql/javascript-experimental-atm-queries@~0.3.0"],
|
||||
javascript: ["codeql/javascript-experimental-atm-queries@~0.4.0"],
|
||||
},
|
||||
});
|
||||
(0, ava_1.default)("injected packs from input", injectedConfigMacro, {
|
||||
@@ -456,7 +475,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
},
|
||||
},
|
||||
}, {
|
||||
packs: ["xxx", "yyy", "codeql/javascript-experimental-atm-queries@~0.3.0"],
|
||||
packs: ["xxx", "yyy", "codeql/javascript-experimental-atm-queries@~0.4.0"],
|
||||
});
|
||||
// similar, but with queries
|
||||
(0, ava_1.default)("injected queries from input", injectedConfigMacro, {
|
||||
@@ -550,7 +569,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon
|
||||
.stub(codeqlObject, "getVersion")
|
||||
.resolves(codeql.CODEQL_VERSION_CONFIG_FILES);
|
||||
.resolves(feature_flags_1.featureConfig[feature_flags_1.Feature.CliConfigFileEnabled].minimumVersion);
|
||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
// should have used an config file
|
||||
@@ -561,24 +580,22 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = origCODEQL_PASS_CONFIG_TO_CLI;
|
||||
}
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-baseline-file-info when feature enabled", async (t) => {
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-baseline-file-info for 2.11.3", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
|
||||
// The version of CodeQL is checked separately to determine feature enablement, and does not
|
||||
// otherwise impact this test, so set it to 0.0.0.
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.FileBaselineInformationEnabled]));
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.11.3");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info should be present, but it is absent");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-baseline-file-info if feature disabled", async (t) => {
|
||||
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-baseline-file-info for 2.11.2", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
|
||||
// The version of CodeQL is checked upstream to determine feature enablement, so it does not
|
||||
// affect this test.
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.11.2");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info must be absent, but it is present");
|
||||
});
|
||||
function stubToolRunnerConstructor() {
|
||||
|
||||
File diff suppressed because one or more lines are too long
89
lib/config-utils.js
generated
89
lib/config-utils.js
generated
@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadPacks = exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePackSpecification = exports.prettyPrintPack = exports.parsePacksSpecification = exports.parsePacksFromConfig = exports.calculateAugmentation = exports.getDefaultConfig = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesMissingUses = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = exports.defaultAugmentationProperties = void 0;
|
||||
exports.downloadPacks = exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePackSpecification = exports.prettyPrintPack = exports.parsePacksSpecification = exports.parsePacksFromConfig = exports.calculateAugmentation = exports.getDefaultConfig = exports.getRawLanguages = exports.getLanguages = exports.getLanguagesInRepo = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesMissingUses = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = exports.defaultAugmentationProperties = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
// We need to import `performance` on Node 12
|
||||
@@ -240,8 +240,12 @@ async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, te
|
||||
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
|
||||
return await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureEnablement, configFile);
|
||||
}
|
||||
// Otherwise, must be a reference to another repo
|
||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
||||
// Otherwise, must be a reference to another repo.
|
||||
// If config parsing is handled in CLI, then this repo will be downloaded
|
||||
// later by the CLI.
|
||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, featureEnablement))) {
|
||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
// Regex validating stars in paths or paths-ignore entries.
|
||||
@@ -385,7 +389,8 @@ function getUnknownLanguagesError(languages) {
|
||||
}
|
||||
exports.getUnknownLanguagesError = getUnknownLanguagesError;
|
||||
/**
|
||||
* Gets the set of languages in the current repository
|
||||
* Gets the set of languages in the current repository that are
|
||||
* scannable by CodeQL.
|
||||
*/
|
||||
async function getLanguagesInRepo(repository, logger) {
|
||||
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
|
||||
@@ -407,6 +412,7 @@ async function getLanguagesInRepo(repository, logger) {
|
||||
}
|
||||
return [...languages];
|
||||
}
|
||||
exports.getLanguagesInRepo = getLanguagesInRepo;
|
||||
/**
|
||||
* Get the languages to analyse.
|
||||
*
|
||||
@@ -418,18 +424,16 @@ async function getLanguagesInRepo(repository, logger) {
|
||||
* then throw an error.
|
||||
*/
|
||||
async function getLanguages(codeQL, languagesInput, repository, logger) {
|
||||
// Obtain from action input 'languages' if set
|
||||
let languages = (languagesInput || "")
|
||||
.split(",")
|
||||
.map((x) => x.trim())
|
||||
.filter((x) => x.length > 0);
|
||||
logger.info(`Languages from configuration: ${JSON.stringify(languages)}`);
|
||||
if (languages.length === 0) {
|
||||
// Obtain languages as all languages in the repo that can be analysed
|
||||
languages = await getLanguagesInRepo(repository, logger);
|
||||
// Obtain languages without filtering them.
|
||||
const { rawLanguages, autodetected } = await getRawLanguages(languagesInput, repository, logger);
|
||||
let languages = rawLanguages.map(languages_1.resolveAlias);
|
||||
if (autodetected) {
|
||||
const availableLanguages = await codeQL.resolveLanguages();
|
||||
languages = languages.filter((value) => value in availableLanguages);
|
||||
logger.info(`Automatically detected languages: ${JSON.stringify(languages)}`);
|
||||
logger.info(`Automatically detected languages: ${languages.join(", ")}`);
|
||||
}
|
||||
else {
|
||||
logger.info(`Languages from configuration: ${languages.join(", ")}`);
|
||||
}
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
@@ -440,19 +444,51 @@ async function getLanguages(codeQL, languagesInput, repository, logger) {
|
||||
const parsedLanguages = [];
|
||||
const unknownLanguages = [];
|
||||
for (const language of languages) {
|
||||
// We know this is not an alias since we resolved it above.
|
||||
const parsedLanguage = (0, languages_1.parseLanguage)(language);
|
||||
if (parsedLanguage === undefined) {
|
||||
unknownLanguages.push(language);
|
||||
}
|
||||
else if (parsedLanguages.indexOf(parsedLanguage) === -1) {
|
||||
else if (!parsedLanguages.includes(parsedLanguage)) {
|
||||
parsedLanguages.push(parsedLanguage);
|
||||
}
|
||||
}
|
||||
// Any unknown languages here would have come directly from the input
|
||||
// since we filter unknown languages coming from the GitHub API.
|
||||
if (unknownLanguages.length > 0) {
|
||||
throw new Error(getUnknownLanguagesError(unknownLanguages));
|
||||
}
|
||||
return parsedLanguages;
|
||||
}
|
||||
exports.getLanguages = getLanguages;
|
||||
/**
|
||||
* Gets the set of languages in the current repository without checking to
|
||||
* see if these languages are actually supported by CodeQL.
|
||||
*
|
||||
* @param languagesInput The languages from the workflow input
|
||||
* @param repository the owner/name of the repository
|
||||
* @param logger a logger
|
||||
* @returns A tuple containing a list of languages in this repository that might be
|
||||
* analyzable and whether or not this list was determined automatically.
|
||||
*/
|
||||
async function getRawLanguages(languagesInput, repository, logger) {
|
||||
// Obtain from action input 'languages' if set
|
||||
let rawLanguages = (languagesInput || "")
|
||||
.split(",")
|
||||
.map((x) => x.trim().toLowerCase())
|
||||
.filter((x) => x.length > 0);
|
||||
let autodetected;
|
||||
if (rawLanguages.length) {
|
||||
autodetected = false;
|
||||
}
|
||||
else {
|
||||
autodetected = true;
|
||||
// Obtain all languages in the repo that can be analysed
|
||||
rawLanguages = (await getLanguagesInRepo(repository, logger));
|
||||
}
|
||||
return { rawLanguages, autodetected };
|
||||
}
|
||||
exports.getRawLanguages = getRawLanguages;
|
||||
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger) {
|
||||
let injectedMlQueries = false;
|
||||
queriesInput = queriesInput.trim();
|
||||
@@ -893,22 +929,23 @@ async function initConfig(languagesInput, queriesInput, packsInput, registriesIn
|
||||
else {
|
||||
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger);
|
||||
}
|
||||
// The list of queries should not be empty for any language. If it is then
|
||||
// it is a user configuration error.
|
||||
for (const language of config.languages) {
|
||||
const hasBuiltinQueries = ((_a = config.queries[language]) === null || _a === void 0 ? void 0 : _a.builtin.length) > 0;
|
||||
const hasCustomQueries = ((_b = config.queries[language]) === null || _b === void 0 ? void 0 : _b.custom.length) > 0;
|
||||
const hasPacks = (((_c = config.packs[language]) === null || _c === void 0 ? void 0 : _c.length) || 0) > 0;
|
||||
if (!hasPacks && !hasBuiltinQueries && !hasCustomQueries) {
|
||||
throw new Error(`Did not detect any queries to run for ${language}. ` +
|
||||
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
||||
}
|
||||
}
|
||||
// When using the codescanning config in the CLI, pack downloads
|
||||
// happen in the CLI during the `database init` command, so no need
|
||||
// to download them here.
|
||||
await (0, util_1.logCodeScanningConfigInCli)(codeQL, featureEnablement, logger);
|
||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, featureEnablement))) {
|
||||
// The list of queries should not be empty for any language. If it is then
|
||||
// it is a user configuration error.
|
||||
// This check occurs in the CLI when it parses the config file.
|
||||
for (const language of config.languages) {
|
||||
const hasBuiltinQueries = ((_a = config.queries[language]) === null || _a === void 0 ? void 0 : _a.builtin.length) > 0;
|
||||
const hasCustomQueries = ((_b = config.queries[language]) === null || _b === void 0 ? void 0 : _b.custom.length) > 0;
|
||||
const hasPacks = (((_c = config.packs[language]) === null || _c === void 0 ? void 0 : _c.length) || 0) > 0;
|
||||
if (!hasPacks && !hasBuiltinQueries && !hasCustomQueries) {
|
||||
throw new Error(`Did not detect any queries to run for ${language}. ` +
|
||||
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
||||
}
|
||||
}
|
||||
const registries = parseRegistries(registriesInput);
|
||||
await downloadPacks(codeQL, config.languages, config.packs, registries, apiDetails, config.tempDir, logger);
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
89
lib/config-utils.test.js
generated
89
lib/config-utils.test.js
generated
@@ -34,6 +34,7 @@ const configUtils = __importStar(require("./config-utils"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
@@ -933,11 +934,7 @@ function parseInputAndConfigMacro(t, packsFromConfig, packsFromInput, languages,
|
||||
languages, "/a/b", mockLogger), expected);
|
||||
}
|
||||
parseInputAndConfigMacro.title = (providedTitle) => `Parse Packs input and config: ${providedTitle}`;
|
||||
const mockLogger = {
|
||||
info: (message) => {
|
||||
console.log(message);
|
||||
},
|
||||
};
|
||||
const mockLogger = (0, logging_1.getRunnerLogger)(true);
|
||||
function parseInputAndConfigErrorMacro(t, packsFromConfig, packsFromInput, languages, packsFromInputOverride, expected) {
|
||||
t.throws(() => {
|
||||
configUtils.parsePacks(packsFromConfig, packsFromInput, packsFromInputOverride, languages, "/a/b", mockLogger);
|
||||
@@ -1237,4 +1234,86 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
}, { instanceOf: Error }, "Invalid 'registries' input. Must be an array of objects with 'url' and 'packages' properties.");
|
||||
});
|
||||
});
|
||||
// getLanguages
|
||||
const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
||||
// eslint-disable-next-line github/array-foreach
|
||||
[
|
||||
{
|
||||
name: "languages from input",
|
||||
codeqlResolvedLanguages: ["javascript", "java", "python"],
|
||||
languagesInput: "jAvAscript, \n jaVa",
|
||||
languagesInRepository: ["SwiFt", "other"],
|
||||
expectedLanguages: ["javascript", "java"],
|
||||
expectedApiCall: false,
|
||||
},
|
||||
{
|
||||
name: "languages from github api",
|
||||
codeqlResolvedLanguages: ["javascript", "java", "python"],
|
||||
languagesInput: "",
|
||||
languagesInRepository: [" jAvAscript\n \t", " jaVa", "SwiFt", "other"],
|
||||
expectedLanguages: ["javascript", "java"],
|
||||
expectedApiCall: true,
|
||||
},
|
||||
{
|
||||
name: "aliases from input",
|
||||
codeqlResolvedLanguages: ["javascript", "csharp", "cpp", "java", "python"],
|
||||
languagesInput: " typEscript\n \t, C#, c , KoTlin",
|
||||
languagesInRepository: ["SwiFt", "other"],
|
||||
expectedLanguages: ["javascript", "csharp", "cpp", "java"],
|
||||
expectedApiCall: false,
|
||||
},
|
||||
{
|
||||
name: "duplicate languages from input",
|
||||
codeqlResolvedLanguages: ["javascript", "java", "python"],
|
||||
languagesInput: "jAvAscript, \n jaVa, kotlin, typescript",
|
||||
languagesInRepository: ["SwiFt", "other"],
|
||||
expectedLanguages: ["javascript", "java"],
|
||||
expectedApiCall: false,
|
||||
},
|
||||
{
|
||||
name: "aliases from github api",
|
||||
codeqlResolvedLanguages: ["javascript", "csharp", "cpp", "java", "python"],
|
||||
languagesInput: "",
|
||||
languagesInRepository: [" typEscript\n \t", " C#", "c", "other"],
|
||||
expectedLanguages: ["javascript", "csharp", "cpp"],
|
||||
expectedApiCall: true,
|
||||
},
|
||||
{
|
||||
name: "no languages",
|
||||
codeqlResolvedLanguages: ["javascript", "java", "python"],
|
||||
languagesInput: "",
|
||||
languagesInRepository: [],
|
||||
expectedApiCall: true,
|
||||
expectedError: configUtils.getNoLanguagesError(),
|
||||
},
|
||||
{
|
||||
name: "unrecognized languages from input",
|
||||
codeqlResolvedLanguages: ["javascript", "java", "python"],
|
||||
languagesInput: "a, b, c, javascript",
|
||||
languagesInRepository: [],
|
||||
expectedApiCall: false,
|
||||
expectedError: configUtils.getUnknownLanguagesError(["a", "b"]),
|
||||
},
|
||||
].forEach((args) => {
|
||||
(0, ava_1.default)(`getLanguages: ${args.name}`, async (t) => {
|
||||
const mockRequest = (0, testing_utils_1.mockLanguagesInRepo)(args.languagesInRepository);
|
||||
const languages = args.codeqlResolvedLanguages.reduce((acc, lang) => ({
|
||||
...acc,
|
||||
[lang]: true,
|
||||
}), {});
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
resolveLanguages: () => Promise.resolve(languages),
|
||||
});
|
||||
if (args.expectedLanguages) {
|
||||
// happy path
|
||||
const actualLanguages = await configUtils.getLanguages(codeQL, args.languagesInput, mockRepositoryNwo, mockLogger);
|
||||
t.deepEqual(actualLanguages.sort(), args.expectedLanguages.sort());
|
||||
}
|
||||
else {
|
||||
// there is an error
|
||||
await t.throwsAsync(async () => await configUtils.getLanguages(codeQL, args.languagesInput, mockRepositoryNwo, mockLogger), { message: args.expectedError });
|
||||
}
|
||||
t.deepEqual(mockRequest.called, args.expectedApiCall);
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
File diff suppressed because one or more lines are too long
42
lib/database-upload.js
generated
42
lib/database-upload.js
generated
@@ -44,24 +44,32 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||
const client = (0, api_client_1.getApiClient)();
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
// Upload the database bundle.
|
||||
// Although we are uploading arbitrary file contents to the API, it's worth
|
||||
// noting that it's the API's job to validate that the contents is acceptable.
|
||||
// This API method is available to anyone with write access to the repo.
|
||||
const payload = fs.readFileSync(await (0, util_1.bundleDb)(config, language, codeql, language));
|
||||
try {
|
||||
await client.request(`POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name`, {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
language,
|
||||
name: `${language}-database`,
|
||||
data: payload,
|
||||
headers: {
|
||||
authorization: `token ${apiDetails.auth}`,
|
||||
"Content-Type": "application/zip",
|
||||
},
|
||||
});
|
||||
logger.debug(`Successfully uploaded database for ${language}`);
|
||||
// Upload the database bundle.
|
||||
// Although we are uploading arbitrary file contents to the API, it's worth
|
||||
// noting that it's the API's job to validate that the contents is acceptable.
|
||||
// This API method is available to anyone with write access to the repo.
|
||||
const bundledDb = await (0, util_1.bundleDb)(config, language, codeql, language);
|
||||
const bundledDbSize = fs.statSync(bundledDb).size;
|
||||
const bundledDbReadStream = fs.createReadStream(bundledDb);
|
||||
try {
|
||||
await client.request(`POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name`, {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
language,
|
||||
name: `${language}-database`,
|
||||
data: bundledDbReadStream,
|
||||
headers: {
|
||||
authorization: `token ${apiDetails.auth}`,
|
||||
"Content-Type": "application/zip",
|
||||
"Content-Length": bundledDbSize,
|
||||
},
|
||||
});
|
||||
logger.debug(`Successfully uploaded database for ${language}`);
|
||||
}
|
||||
finally {
|
||||
bundledDbReadStream.close();
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
console.log(e);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,GAAE,CAAC;IAC9B,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,8BAA8B;QAC9B,2EAA2E;QAC3E,8EAA8E;QAC9E,wEAAwE;QACxE,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAC7B,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CACnD,CAAC;QACF,IAAI;YACF,MAAM,MAAM,CAAC,OAAO,CAClB,wGAAwG,EACxG;gBACE,KAAK,EAAE,aAAa,CAAC,KAAK;gBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;gBACxB,QAAQ;gBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;gBAC5B,IAAI,EAAE,OAAO;gBACb,OAAO,EAAE;oBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;oBACzC,cAAc,EAAE,iBAAiB;iBAClC;aACF,CACF,CAAC;YACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAxDD,0CAwDC"}
|
||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,GAAE,CAAC;IAC9B,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI;YACF,8BAA8B;YAC9B,2EAA2E;YAC3E,8EAA8E;YAC9E,wEAAwE;YACxE,MAAM,SAAS,GAAG,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;YACrE,MAAM,aAAa,GAAG,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC;YAClD,MAAM,mBAAmB,GAAG,EAAE,CAAC,gBAAgB,CAAC,SAAS,CAAC,CAAC;YAC3D,IAAI;gBACF,MAAM,MAAM,CAAC,OAAO,CAClB,wGAAwG,EACxG;oBACE,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;oBAC5B,IAAI,EAAE,mBAAmB;oBACzB,OAAO,EAAE;wBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;wBACzC,cAAc,EAAE,iBAAiB;wBACjC,gBAAgB,EAAE,aAAa;qBAChC;iBACF,CACF,CAAC;gBACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;aAChE;oBAAS;gBACR,mBAAmB,CAAC,KAAK,EAAE,CAAC;aAC7B;SACF;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AA7DD,0CA6DC"}
|
||||
@@ -1,3 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20221105"
|
||||
"bundleVersion": "codeql-bundle-20230105",
|
||||
"cliVersion": "2.12.0",
|
||||
"priorBundleVersion": "codeql-bundle-20221211",
|
||||
"priorCliVersion": "2.11.6"
|
||||
}
|
||||
|
||||
4
lib/error-matcher.js
generated
4
lib/error-matcher.js
generated
@@ -12,6 +12,10 @@ exports.namedMatchersForTesting = {
|
||||
message: "No code found during the build. Please see:\n" +
|
||||
"https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning#no-code-found-during-the-build",
|
||||
},
|
||||
fatalError: {
|
||||
outputRegex: new RegExp("A fatal error occurred"),
|
||||
message: "A fatal error occurred.",
|
||||
},
|
||||
};
|
||||
// we collapse the matches into an array for use in execErrorCatcher
|
||||
exports.errorMatchers = Object.values(exports.namedMatchersForTesting);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"error-matcher.js","sourceRoot":"","sources":["../src/error-matcher.ts"],"names":[],"mappings":";;;AAQA,qCAAqC;AACxB,QAAA,uBAAuB,GAAoC;IACtE;;MAEE;IACF,iBAAiB,EAAE;QACjB,QAAQ,EAAE,EAAE;QACZ,WAAW,EAAE,IAAI,MAAM,CAAC,2CAA2C,CAAC;QACpE,OAAO,EACL,+CAA+C;YAC/C,yJAAyJ;KAC5J;CACF,CAAC;AAEF,oEAAoE;AACvD,QAAA,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC,+BAAuB,CAAC,CAAC"}
|
||||
{"version":3,"file":"error-matcher.js","sourceRoot":"","sources":["../src/error-matcher.ts"],"names":[],"mappings":";;;AAQA,qCAAqC;AACxB,QAAA,uBAAuB,GAAoC;IACtE;;MAEE;IACF,iBAAiB,EAAE;QACjB,QAAQ,EAAE,EAAE;QACZ,WAAW,EAAE,IAAI,MAAM,CAAC,2CAA2C,CAAC;QACpE,OAAO,EACL,+CAA+C;YAC/C,yJAAyJ;KAC5J;IACD,UAAU,EAAE;QACV,WAAW,EAAE,IAAI,MAAM,CAAC,wBAAwB,CAAC;QACjD,OAAO,EAAE,yBAAyB;KACnC;CACF,CAAC;AAEF,oEAAoE;AACvD,QAAA,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC,+BAAuB,CAAC,CAAC"}
|
||||
3
lib/error-matcher.test.js
generated
3
lib/error-matcher.test.js
generated
@@ -16,6 +16,9 @@ NB We test the regexes for all the matchers against example log output snippets.
|
||||
2020-09-07T17:39:53.9251124Z [2020-09-07 17:39:53] [ERROR] Spawned process exited abnormally (code 255; tried to run: [/opt/hostedtoolcache/CodeQL/0.0.0-20200630/x64/codeql/javascript/tools/autobuild.sh])
|
||||
`));
|
||||
});
|
||||
(0, ava_1.default)("fatalError matches against example log output", async (t) => {
|
||||
t.assert(testErrorMatcher("fatalError", "A fatal error occurred: Could not process query metadata for test-query.ql"));
|
||||
});
|
||||
function testErrorMatcher(matcherName, logSample) {
|
||||
if (!(matcherName in error_matcher_1.namedMatchersForTesting)) {
|
||||
throw new Error(`Unknown matcher ${matcherName}`);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"error-matcher.test.js","sourceRoot":"","sources":["../src/error-matcher.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,mDAA0D;AAE1D;;EAEE;AAEF,IAAA,aAAI,EAAC,6DAA6D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9E,CAAC,CAAC,MAAM,CACN,gBAAgB,CACd,mBAAmB,EACnB;;;;;GAKH,CACE,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,gBAAgB,CAAC,WAAmB,EAAE,SAAiB;IAC9D,IAAI,CAAC,CAAC,WAAW,IAAI,uCAAuB,CAAC,EAAE;QAC7C,MAAM,IAAI,KAAK,CAAC,mBAAmB,WAAW,EAAE,CAAC,CAAC;KACnD;IACD,MAAM,KAAK,GAAG,uCAAuB,CAAC,WAAW,CAAC,CAAC,WAAW,CAAC;IAC/D,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,uBAAuB,WAAW,kBAAkB,CAAC,CAAC;KACvE;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AAC/B,CAAC"}
|
||||
{"version":3,"file":"error-matcher.test.js","sourceRoot":"","sources":["../src/error-matcher.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,mDAA0D;AAE1D;;EAEE;AAEF,IAAA,aAAI,EAAC,6DAA6D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9E,CAAC,CAAC,MAAM,CACN,gBAAgB,CACd,mBAAmB,EACnB;;;;;GAKH,CACE,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,+CAA+C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChE,CAAC,CAAC,MAAM,CACN,gBAAgB,CACd,YAAY,EACZ,4EAA4E,CAC7E,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,gBAAgB,CAAC,WAAmB,EAAE,SAAiB;IAC9D,IAAI,CAAC,CAAC,WAAW,IAAI,uCAAuB,CAAC,EAAE;QAC7C,MAAM,IAAI,KAAK,CAAC,mBAAmB,WAAW,EAAE,CAAC,CAAC;KACnD;IACD,MAAM,KAAK,GAAG,uCAAuB,CAAC,WAAW,CAAC,CAAC,WAAW,CAAC;IAC/D,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,uBAAuB,WAAW,kBAAkB,CAAC,CAAC;KACvE;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AAC/B,CAAC"}
|
||||
77
lib/feature-flags.js
generated
77
lib/feature-flags.js
generated
@@ -22,20 +22,34 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Features = exports.FEATURE_FLAGS_FILE_NAME = exports.featureConfig = exports.Feature = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
||||
const util = __importStar(require("./util"));
|
||||
const DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_";
|
||||
const DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled";
|
||||
const MINIMUM_ENABLED_CODEQL_VERSION = "2.11.6";
|
||||
var Feature;
|
||||
(function (Feature) {
|
||||
Feature["BypassToolcacheEnabled"] = "bypass_toolcache_enabled";
|
||||
Feature["BypassToolcacheKotlinSwiftEnabled"] = "bypass_toolcache_kotlin_swift_enabled";
|
||||
Feature["CliConfigFileEnabled"] = "cli_config_file_enabled";
|
||||
Feature["DisableKotlinAnalysisEnabled"] = "disable_kotlin_analysis_enabled";
|
||||
Feature["FileBaselineInformationEnabled"] = "file_baseline_information_enabled";
|
||||
Feature["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
||||
Feature["TrapCachingEnabled"] = "trap_caching_enabled";
|
||||
Feature["UploadFailedSarifEnabled"] = "upload_failed_sarif_enabled";
|
||||
})(Feature = exports.Feature || (exports.Feature = {}));
|
||||
exports.featureConfig = {
|
||||
[Feature.BypassToolcacheEnabled]: {
|
||||
envVar: "CODEQL_BYPASS_TOOLCACHE",
|
||||
// Cannot specify a minimum version because this flag is checked before we have
|
||||
// access to the CodeQL instance.
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.BypassToolcacheKotlinSwiftEnabled]: {
|
||||
envVar: "CODEQL_BYPASS_TOOLCACHE_KOTLIN_SWIFT",
|
||||
// Cannot specify a minimum version because this flag is checked before we have
|
||||
// access to the CodeQL instance.
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.DisableKotlinAnalysisEnabled]: {
|
||||
@@ -44,11 +58,7 @@ exports.featureConfig = {
|
||||
},
|
||||
[Feature.CliConfigFileEnabled]: {
|
||||
envVar: "CODEQL_PASS_CONFIG_TO_CLI",
|
||||
minimumVersion: "2.11.1",
|
||||
},
|
||||
[Feature.FileBaselineInformationEnabled]: {
|
||||
envVar: "CODEQL_FILE_BASELINE_INFORMATION",
|
||||
minimumVersion: "2.11.3",
|
||||
minimumVersion: "2.11.6",
|
||||
},
|
||||
[Feature.MlPoweredQueriesEnabled]: {
|
||||
envVar: "CODEQL_ML_POWERED_QUERIES",
|
||||
@@ -58,6 +68,10 @@ exports.featureConfig = {
|
||||
envVar: "CODEQL_TRAP_CACHING",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.UploadFailedSarifEnabled]: {
|
||||
envVar: "CODEQL_ACTION_UPLOAD_FAILED_SARIF",
|
||||
minimumVersion: "2.11.3",
|
||||
},
|
||||
};
|
||||
exports.FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json";
|
||||
/**
|
||||
@@ -69,6 +83,9 @@ class Features {
|
||||
constructor(gitHubVersion, repositoryNwo, tempDir, logger) {
|
||||
this.gitHubFeatureFlags = new GitHubFeatureFlags(gitHubVersion, repositoryNwo, path.join(tempDir, exports.FEATURE_FLAGS_FILE_NAME), logger);
|
||||
}
|
||||
async getDefaultCliVersion(variant) {
|
||||
return await this.gitHubFeatureFlags.getDefaultCliVersion(variant);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param feature The feature to check.
|
||||
@@ -118,6 +135,48 @@ class GitHubFeatureFlags {
|
||||
this.logger = logger;
|
||||
/**/
|
||||
}
|
||||
getCliVersionFromFeatureFlag(f) {
|
||||
if (!f.startsWith(DEFAULT_VERSION_FEATURE_FLAG_PREFIX) ||
|
||||
!f.endsWith(DEFAULT_VERSION_FEATURE_FLAG_SUFFIX)) {
|
||||
return undefined;
|
||||
}
|
||||
const version = f
|
||||
.substring(DEFAULT_VERSION_FEATURE_FLAG_PREFIX.length, f.length - DEFAULT_VERSION_FEATURE_FLAG_SUFFIX.length)
|
||||
.replace(/_/g, ".");
|
||||
if (!semver.valid(version)) {
|
||||
this.logger.warning(`Ignoring feature flag ${f} as it does not specify a valid CodeQL version.`);
|
||||
return undefined;
|
||||
}
|
||||
return version;
|
||||
}
|
||||
async getDefaultCliVersion(variant) {
|
||||
if (variant === util.GitHubVariant.DOTCOM) {
|
||||
return {
|
||||
cliVersion: await this.getDefaultDotcomCliVersion(),
|
||||
variant,
|
||||
};
|
||||
}
|
||||
return {
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
variant,
|
||||
};
|
||||
}
|
||||
async getDefaultDotcomCliVersion() {
|
||||
const response = await this.getAllFeatures();
|
||||
const enabledFeatureFlagCliVersions = Object.entries(response)
|
||||
.map(([f, isEnabled]) => isEnabled ? this.getCliVersionFromFeatureFlag(f) : undefined)
|
||||
.filter((f) => f !== undefined)
|
||||
.map((f) => f);
|
||||
if (enabledFeatureFlagCliVersions.length === 0) {
|
||||
this.logger.debug("Feature flags do not specify a default CLI version. Falling back to CLI version " +
|
||||
`${MINIMUM_ENABLED_CODEQL_VERSION}.`);
|
||||
return MINIMUM_ENABLED_CODEQL_VERSION;
|
||||
}
|
||||
const maxCliVersion = enabledFeatureFlagCliVersions.reduce((maxVersion, currentVersion) => currentVersion > maxVersion ? currentVersion : maxVersion, enabledFeatureFlagCliVersions[0]);
|
||||
this.logger.debug(`Derived default CLI version of ${maxCliVersion} from feature flags.`);
|
||||
return maxCliVersion;
|
||||
}
|
||||
async getValue(feature) {
|
||||
const response = await this.getAllFeatures();
|
||||
if (response === undefined) {
|
||||
@@ -185,7 +244,10 @@ class GitHubFeatureFlags {
|
||||
owner: this.repositoryNwo.owner,
|
||||
repo: this.repositoryNwo.repo,
|
||||
});
|
||||
return response.data;
|
||||
const remoteFlags = response.data;
|
||||
this.logger.debug("Loaded the following default values for the feature flags from the Code Scanning API: " +
|
||||
`${JSON.stringify(remoteFlags)}`);
|
||||
return remoteFlags;
|
||||
}
|
||||
catch (e) {
|
||||
if (util.isHTTPError(e) && e.status === 403) {
|
||||
@@ -193,6 +255,7 @@ class GitHubFeatureFlags {
|
||||
"As a result, it will not be opted into any experimental features. " +
|
||||
"This could be because the Action is running on a pull request from a fork. If not, " +
|
||||
`please ensure the Action has the 'security-events: write' permission. Details: ${e}`);
|
||||
return {};
|
||||
}
|
||||
else {
|
||||
// Some features, such as `ml_powered_queries_enabled` affect the produced alerts.
|
||||
|
||||
File diff suppressed because one or more lines are too long
60
lib/feature-flags.test.js
generated
60
lib/feature-flags.test.js
generated
@@ -25,6 +25,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
@@ -208,6 +209,65 @@ for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
||||
t.false(await featureEnablement.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be disabled after setting env var");
|
||||
});
|
||||
});
|
||||
for (const variant of [util_1.GitHubVariant.GHAE, util_1.GitHubVariant.GHES]) {
|
||||
(0, ava_1.default)(`selects CLI from defaults.json on ${util_1.GitHubVariant[variant]}`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
t.deepEqual(await features.getDefaultCliVersion(variant), {
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
variant,
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
(0, ava_1.default)("selects CLI v2.12.1 on Dotcom when feature flags enable v2.12.0 and v2.12.1", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_0_enabled"] = true;
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_1_enabled"] = true;
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_2_enabled"] = false;
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_3_enabled"] = false;
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_4_enabled"] = false;
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_5_enabled"] = false;
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
t.deepEqual(await featureEnablement.getDefaultCliVersion(util_1.GitHubVariant.DOTCOM), {
|
||||
cliVersion: "2.12.1",
|
||||
variant: util_1.GitHubVariant.DOTCOM,
|
||||
});
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)(`selects CLI v2.11.6 on Dotcom when no default version feature flags are enabled`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
t.deepEqual(await featureEnablement.getDefaultCliVersion(util_1.GitHubVariant.DOTCOM), {
|
||||
cliVersion: "2.11.6",
|
||||
variant: util_1.GitHubVariant.DOTCOM,
|
||||
});
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("ignores invalid version numbers in default version feature flags", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const loggedMessages = [];
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_0_enabled"] = true;
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_1_enabled"] = true;
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_invalid_enabled"] =
|
||||
true;
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
t.deepEqual(await featureEnablement.getDefaultCliVersion(util_1.GitHubVariant.DOTCOM), {
|
||||
cliVersion: "2.12.1",
|
||||
variant: util_1.GitHubVariant.DOTCOM,
|
||||
});
|
||||
t.assert(loggedMessages.find((v) => v.type === "warning" &&
|
||||
v.message ===
|
||||
"Ignoring feature flag default_codeql_version_2_12_invalid_enabled as it does not specify a valid CodeQL version.") !== undefined);
|
||||
});
|
||||
});
|
||||
function assertAllFeaturesUndefinedInApi(t, loggedMessages) {
|
||||
for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
|
||||
File diff suppressed because one or more lines are too long
80
lib/init-action-post-helper.js
generated
80
lib/init-action-post-helper.js
generated
@@ -19,24 +19,94 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.run = void 0;
|
||||
exports.run = exports.tryUploadSarifIfRunFailed = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const logging_1 = require("./logging");
|
||||
async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, printDebugLogs) {
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const shared_environment_1 = require("./shared-environment");
|
||||
const uploadLib = __importStar(require("./upload-lib"));
|
||||
const util_1 = require("./util");
|
||||
const workflow_1 = require("./workflow");
|
||||
function createFailedUploadFailedSarifResult(error) {
|
||||
return {
|
||||
upload_failed_run_error: error instanceof Error ? error.message : String(error),
|
||||
upload_failed_run_stack_trace: error instanceof Error ? error.stack : undefined,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Upload a failed SARIF file if we can verify that SARIF upload is enabled and determine the SARIF
|
||||
* category for the workflow.
|
||||
*/
|
||||
async function maybeUploadFailedSarif(config, repositoryNwo, featureEnablement, logger) {
|
||||
var _a;
|
||||
if (!config.codeQLCmd) {
|
||||
return { upload_failed_run_skipped_because: "CodeQL command not found" };
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
if (!(await featureEnablement.getValue(feature_flags_1.Feature.UploadFailedSarifEnabled, codeql))) {
|
||||
return { upload_failed_run_skipped_because: "Feature disabled" };
|
||||
}
|
||||
const workflow = await (0, workflow_1.getWorkflow)();
|
||||
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
||||
const matrix = (0, util_1.parseMatrixInput)(actionsUtil.getRequiredInput("matrix"));
|
||||
if ((0, workflow_1.getUploadInputOrThrow)(workflow, jobName, matrix) !== "true" ||
|
||||
(0, util_1.isInTestMode)()) {
|
||||
return { upload_failed_run_skipped_because: "SARIF upload is disabled" };
|
||||
}
|
||||
const category = (0, workflow_1.getCategoryInputOrThrow)(workflow, jobName, matrix);
|
||||
const checkoutPath = (0, workflow_1.getCheckoutPathInputOrThrow)(workflow, jobName, matrix);
|
||||
const sarifFile = "../codeql-failed-run.sarif";
|
||||
await codeql.diagnosticsExport(sarifFile, category);
|
||||
core.info(`Uploading failed SARIF file ${sarifFile}`);
|
||||
const uploadResult = await uploadLib.uploadFromActions(sarifFile, checkoutPath, category, logger);
|
||||
await uploadLib.waitForProcessing(repositoryNwo, uploadResult.sarifID, logger, { isUnsuccessfulExecution: true });
|
||||
return (_a = uploadResult === null || uploadResult === void 0 ? void 0 : uploadResult.statusReport) !== null && _a !== void 0 ? _a : {};
|
||||
}
|
||||
async function tryUploadSarifIfRunFailed(config, repositoryNwo, featureEnablement, logger) {
|
||||
if (process.env[shared_environment_1.CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY] !== "true") {
|
||||
try {
|
||||
return await maybeUploadFailedSarif(config, repositoryNwo, featureEnablement, logger);
|
||||
}
|
||||
catch (e) {
|
||||
logger.debug(`Failed to upload a SARIF file for this failed CodeQL code scanning run. ${e}`);
|
||||
return createFailedUploadFailedSarifResult(e);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return {
|
||||
upload_failed_run_skipped_because: "Analyze Action completed successfully",
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.tryUploadSarifIfRunFailed = tryUploadSarifIfRunFailed;
|
||||
async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, printDebugLogs, repositoryNwo, featureEnablement, logger) {
|
||||
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||
if (config === undefined) {
|
||||
logger.warning("Debugging artifacts are unavailable since the 'init' Action failed before it could produce any.");
|
||||
return;
|
||||
}
|
||||
const uploadFailedSarifResult = await tryUploadSarifIfRunFailed(config, repositoryNwo, featureEnablement, logger);
|
||||
if (uploadFailedSarifResult.upload_failed_run_skipped_because) {
|
||||
logger.debug("Won't upload a failed SARIF file for this CodeQL code scanning run because: " +
|
||||
`${uploadFailedSarifResult.upload_failed_run_skipped_because}.`);
|
||||
}
|
||||
// Throw an error if in integration tests, we expected to upload a SARIF file for a failed run
|
||||
// but we didn't upload anything.
|
||||
if (process.env["CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF"] === "true" &&
|
||||
!uploadFailedSarifResult.raw_upload_size_bytes) {
|
||||
throw new Error("Expected to upload a failed SARIF file for this CodeQL code scanning run, " +
|
||||
`but the result was instead ${uploadFailedSarifResult}.`);
|
||||
}
|
||||
// Upload appropriate Actions artifacts for debugging
|
||||
if (config === null || config === void 0 ? void 0 : config.debugMode) {
|
||||
if (config.debugMode) {
|
||||
core.info("Debug mode is on. Uploading available database bundles and logs as Actions debugging artifacts...");
|
||||
await uploadDatabaseBundleDebugArtifact(config, logger);
|
||||
await uploadLogsDebugArtifact(config);
|
||||
await printDebugLogs(config);
|
||||
}
|
||||
return uploadFailedSarifResult;
|
||||
}
|
||||
exports.run = run;
|
||||
//# sourceMappingURL=init-action-post-helper.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAA2C;AAC3C,uCAA6C;AAEtC,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB;IAExB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,SAAS,EAAE;QACrB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;AACH,CAAC;AAxBD,kBAwBC"}
|
||||
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,qCAAqC;AACrC,iDAAmD;AACnD,mDAA6D;AAG7D,6DAAuF;AACvF,wDAA0C;AAC1C,iCAA6E;AAC7E,yCAKoB;AAWpB,SAAS,mCAAmC,CAC1C,KAAc;IAEd,OAAO;QACL,uBAAuB,EACrB,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;QACxD,6BAA6B,EAC3B,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS;KACnD,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,KAAK,UAAU,sBAAsB,CACnC,MAAc,EACd,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;;IAEd,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;QACrB,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IACE,CAAC,CAAC,MAAM,iBAAiB,CAAC,QAAQ,CAChC,uBAAO,CAAC,wBAAwB,EAChC,MAAM,CACP,CAAC,EACF;QACA,OAAO,EAAE,iCAAiC,EAAE,kBAAkB,EAAE,CAAC;KAClE;IACD,MAAM,QAAQ,GAAG,MAAM,IAAA,sBAAW,GAAE,CAAC;IACrC,MAAM,OAAO,GAAG,IAAA,0BAAmB,EAAC,YAAY,CAAC,CAAC;IAClD,MAAM,MAAM,GAAG,IAAA,uBAAgB,EAAC,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC,CAAC;IACxE,IACE,IAAA,gCAAqB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,KAAK,MAAM;QAC3D,IAAA,mBAAY,GAAE,EACd;QACA,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,QAAQ,GAAG,IAAA,kCAAuB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACpE,MAAM,YAAY,GAAG,IAAA,sCAA2B,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IAE5E,MAAM,SAAS,GAAG,4BAA4B,CAAC;IAC/C,MAAM,MAAM,CAAC,iBAAiB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;IAEpD,IAAI,CAAC,IAAI,CAAC,+BAA+B,SAAS,EAAE,CAAC,CAAC;IACtD,MAAM,YAAY,GAAG,MAAM,SAAS,CAAC,iBAAiB,CACpD,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,MAAM,CACP,CAAC;IACF,MAAM,SAAS,CAAC,iBAAiB,CAC/B,aAAa,EACb,YAAY,CAAC,OAAO,EACpB,MAAM,EACN,EAAE,uBAAuB,EAAE,IAAI,EAAE,CAClC,CAAC;IACF,OAAO,MAAA,YAAY,aAAZ,YAAY,uBAAZ,YAAY,CAAE,YAAY,mCAAI,EAAE,CAAC;AAC1C,CAAC;AAEM,KAAK,UAAU,yBAAyB,CAC7C,MAAc,EACd,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;IAEd,IAAI,OAAO,CAAC,GAAG,CAAC,oEAA+C,CAAC,KAAK,MAAM,EAAE;QAC3E,IAAI;YACF,OAAO,MAAM,sBAAsB,CACjC,MAAM,EACN,aAAa,EACb,iBAAiB,EACjB,MAAM,CACP,CAAC;SACH;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,KAAK,CACV,2EAA2E,CAAC,EAAE,CAC/E,CAAC;YACF,OAAO,mCAAmC,CAAC,CAAC,CAAC,CAAC;SAC/C;KACF;SAAM;QACL,OAAO;YACL,iCAAiC,EAC/B,uCAAuC;SAC1C,CAAC;KACH;AACH,CAAC;AA1BD,8DA0BC;AAEM,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB,EACxB,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;QACF,OAAO;KACR;IAED,MAAM,uBAAuB,GAAG,MAAM,yBAAyB,CAC7D,MAAM,EACN,aAAa,EACb,iBAAiB,EACjB,MAAM,CACP,CAAC;IACF,IAAI,uBAAuB,CAAC,iCAAiC,EAAE;QAC7D,MAAM,CAAC,KAAK,CACV,8EAA8E;YAC5E,GAAG,uBAAuB,CAAC,iCAAiC,GAAG,CAClE,CAAC;KACH;IACD,8FAA8F;IAC9F,iCAAiC;IACjC,IACE,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC,KAAK,MAAM;QAClE,CAAC,uBAAuB,CAAC,qBAAqB,EAC9C;QACA,MAAM,IAAI,KAAK,CACb,4EAA4E;YAC1E,8BAA8B,uBAAuB,GAAG,CAC3D,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,CAAC,SAAS,EAAE;QACpB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;IAED,OAAO,uBAAuB,CAAC;AACjC,CAAC;AApDD,kBAoDC"}
|
||||
196
lib/init-action-post-helper.test.js
generated
196
lib/init-action-post-helper.test.js
generated
@@ -24,13 +24,21 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const codeql = __importStar(require("./codeql"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const initActionPostHelper = __importStar(require("./init-action-post-helper"));
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const uploadLib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
const workflow = __importStar(require("./workflow"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("post: init action with debug mode off", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
@@ -44,7 +52,7 @@ const util = __importStar(require("./util"));
|
||||
const uploadDatabaseBundleSpy = sinon.spy();
|
||||
const uploadLogsSpy = sinon.spy();
|
||||
const printDebugLogsSpy = sinon.spy();
|
||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy);
|
||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy, (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.assert(uploadDatabaseBundleSpy.notCalled);
|
||||
t.assert(uploadLogsSpy.notCalled);
|
||||
t.assert(printDebugLogsSpy.notCalled);
|
||||
@@ -52,6 +60,7 @@ const util = __importStar(require("./util"));
|
||||
});
|
||||
(0, ava_1.default)("post: init action with debug mode on", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
@@ -65,10 +74,193 @@ const util = __importStar(require("./util"));
|
||||
const uploadDatabaseBundleSpy = sinon.spy();
|
||||
const uploadLogsSpy = sinon.spy();
|
||||
const printDebugLogsSpy = sinon.spy();
|
||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy);
|
||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy, (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.assert(uploadDatabaseBundleSpy.called);
|
||||
t.assert(uploadLogsSpy.called);
|
||||
t.assert(printDebugLogsSpy.called);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("uploads failed SARIF run for typical workflow", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v2",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v2",
|
||||
with: {
|
||||
category: "my-category",
|
||||
},
|
||||
},
|
||||
]);
|
||||
await testFailedSarifUpload(t, actionsWorkflow, { category: "my-category" });
|
||||
});
|
||||
(0, ava_1.default)("doesn't upload failed SARIF for workflow with upload: false", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v2",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v2",
|
||||
with: {
|
||||
category: "my-category",
|
||||
upload: false,
|
||||
},
|
||||
},
|
||||
]);
|
||||
const result = await testFailedSarifUpload(t, actionsWorkflow, {
|
||||
expectUpload: false,
|
||||
});
|
||||
t.is(result.upload_failed_run_skipped_because, "SARIF upload is disabled");
|
||||
});
|
||||
(0, ava_1.default)("uploading failed SARIF run succeeds when workflow uses an input with a matrix var", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v2",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v2",
|
||||
with: {
|
||||
category: "/language:${{ matrix.language }}",
|
||||
},
|
||||
},
|
||||
]);
|
||||
await testFailedSarifUpload(t, actionsWorkflow, {
|
||||
category: "/language:csharp",
|
||||
matrix: { language: "csharp" },
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("uploading failed SARIF run fails when workflow uses a complex upload input", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v2",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v2",
|
||||
with: {
|
||||
upload: "${{ matrix.language != 'csharp' }}",
|
||||
},
|
||||
},
|
||||
]);
|
||||
const result = await testFailedSarifUpload(t, actionsWorkflow, {
|
||||
expectUpload: false,
|
||||
});
|
||||
t.is(result.upload_failed_run_error, "Could not get upload input to github/codeql-action/analyze since it contained an " +
|
||||
"unrecognized dynamic value.");
|
||||
});
|
||||
(0, ava_1.default)("uploading failed SARIF run fails when workflow does not reference github/codeql-action", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
]);
|
||||
const result = await testFailedSarifUpload(t, actionsWorkflow, {
|
||||
expectUpload: false,
|
||||
});
|
||||
t.is(result.upload_failed_run_error, "Could not get upload input to github/codeql-action/analyze since the analyze job does not " +
|
||||
"call github/codeql-action/analyze.");
|
||||
t.truthy(result.upload_failed_run_stack_trace);
|
||||
});
|
||||
function createTestWorkflow(steps) {
|
||||
return {
|
||||
name: "CodeQL",
|
||||
on: {
|
||||
push: {
|
||||
branches: ["main"],
|
||||
},
|
||||
pull_request: {
|
||||
branches: ["main"],
|
||||
},
|
||||
},
|
||||
jobs: {
|
||||
analyze: {
|
||||
name: "CodeQL Analysis",
|
||||
"runs-on": "ubuntu-latest",
|
||||
steps,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
async function testFailedSarifUpload(t, actionsWorkflow, { category, expectUpload = true, matrix = {}, } = {}) {
|
||||
const config = {
|
||||
codeQLCmd: "codeql",
|
||||
debugMode: true,
|
||||
languages: [],
|
||||
packs: [],
|
||||
};
|
||||
process.env["GITHUB_JOB"] = "analyze";
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["GITHUB_WORKSPACE"] =
|
||||
"/home/runner/work/codeql-action/codeql-action";
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("matrix")
|
||||
.returns(JSON.stringify(matrix));
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeql, "getCodeQL").resolves(codeqlObject);
|
||||
const diagnosticsExportStub = sinon.stub(codeqlObject, "diagnosticsExport");
|
||||
sinon.stub(workflow, "getWorkflow").resolves(actionsWorkflow);
|
||||
const uploadFromActions = sinon.stub(uploadLib, "uploadFromActions");
|
||||
uploadFromActions.resolves({
|
||||
sarifID: "42",
|
||||
statusReport: { raw_upload_size_bytes: 20, zipped_upload_size_bytes: 10 },
|
||||
});
|
||||
const waitForProcessing = sinon.stub(uploadLib, "waitForProcessing");
|
||||
const result = await initActionPostHelper.tryUploadSarifIfRunFailed(config, (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.UploadFailedSarifEnabled]), (0, logging_1.getRunnerLogger)(true));
|
||||
if (expectUpload) {
|
||||
t.deepEqual(result, {
|
||||
raw_upload_size_bytes: 20,
|
||||
zipped_upload_size_bytes: 10,
|
||||
});
|
||||
}
|
||||
if (expectUpload) {
|
||||
t.true(diagnosticsExportStub.calledOnceWith(sinon.match.string, category), `Actual args were: ${diagnosticsExportStub.args}`);
|
||||
t.true(uploadFromActions.calledOnceWith(sinon.match.string, sinon.match.string, category, sinon.match.any), `Actual args were: ${uploadFromActions.args}`);
|
||||
t.true(waitForProcessing.calledOnceWith(sinon.match.any, "42", sinon.match.any, {
|
||||
isUnsuccessfulExecution: true,
|
||||
}));
|
||||
}
|
||||
else {
|
||||
t.true(diagnosticsExportStub.notCalled);
|
||||
t.true(uploadFromActions.notCalled);
|
||||
t.true(waitForProcessing.notCalled);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
//# sourceMappingURL=init-action-post-helper.test.js.map
|
||||
File diff suppressed because one or more lines are too long
30
lib/init-action-post.js
generated
30
lib/init-action-post.js
generated
@@ -25,17 +25,37 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api_client_1 = require("./api-client");
|
||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const initActionPostHelper = __importStar(require("./init-action-post-helper"));
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const util_1 = require("./util");
|
||||
async function runWrapper() {
|
||||
const startedAt = new Date();
|
||||
let uploadFailedSarifResult;
|
||||
try {
|
||||
await initActionPostHelper.run(debugArtifacts.uploadDatabaseBundleDebugArtifact, debugArtifacts.uploadLogsDebugArtifact, actionsUtil.printDebugLogs);
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||
uploadFailedSarifResult = await initActionPostHelper.run(debugArtifacts.uploadDatabaseBundleDebugArtifact, debugArtifacts.uploadLogsDebugArtifact, actions_util_1.printDebugLogs, repositoryNwo, features, logger);
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`init post-action step failed: ${error}`);
|
||||
console.log(error);
|
||||
catch (e) {
|
||||
core.setFailed(e instanceof Error ? e.message : String(e));
|
||||
console.log(e);
|
||||
await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init-post", (0, actions_util_1.getActionsStatus)(e), startedAt, String(e), e instanceof Error ? e.stack : undefined));
|
||||
return;
|
||||
}
|
||||
const statusReportBase = await (0, actions_util_1.createStatusReportBase)("init-post", "success", startedAt);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
...uploadFailedSarifResult,
|
||||
};
|
||||
await (0, actions_util_1.sendStatusReport)(statusReport);
|
||||
}
|
||||
void runWrapper();
|
||||
//# sourceMappingURL=init-action-post.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,4DAA8C;AAC9C,kEAAoD;AACpD,gFAAkE;AAElE,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,oBAAoB,CAAC,GAAG,CAC5B,cAAc,CAAC,iCAAiC,EAChD,cAAc,CAAC,uBAAuB,EACtC,WAAW,CAAC,cAAc,CAC3B,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,iCAAiC,KAAK,EAAE,CAAC,CAAC;QACzD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,iDAOwB;AACxB,6CAAgD;AAChD,kEAAoD;AACpD,mDAA2C;AAC3C,gFAAkE;AAClE,uCAA6C;AAC7C,6CAAkD;AAClD,iCAAwE;AAMxE,KAAK,UAAU,UAAU;IACvB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,uBAES,CAAC;IACd,IAAI;QACF,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QAEF,uBAAuB,GAAG,MAAM,oBAAoB,CAAC,GAAG,CACtD,cAAc,CAAC,iCAAiC,EAChD,cAAc,CAAC,uBAAuB,EACtC,6BAAc,EACd,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;QAE3D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,IAAA,+BAAgB,EACpB,MAAM,IAAA,qCAAsB,EAC1B,WAAW,EACX,IAAA,+BAAgB,EAAC,CAAC,CAAC,EACnB,SAAS,EACT,MAAM,CAAC,CAAC,CAAC,EACT,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CACzC,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAAyB;QACzC,GAAG,gBAAgB;QACnB,GAAG,uBAAuB;KAC3B,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
6
lib/init-action.js
generated
6
lib/init-action.js
generated
@@ -31,6 +31,7 @@ const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
const util_1 = require("./util");
|
||||
const workflow_1 = require("./workflow");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
async function sendSuccessStatusReport(startedAt, config, toolsVersion, logger) {
|
||||
@@ -90,11 +91,12 @@ async function run() {
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||
try {
|
||||
const workflowErrors = await (0, actions_util_1.validateWorkflow)();
|
||||
const workflowErrors = await (0, workflow_1.validateWorkflow)();
|
||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
|
||||
return;
|
||||
}
|
||||
const initCodeQLResult = await (0, init_1.initCodeQL)((0, actions_util_1.getOptionalInput)("tools"), apiDetails, (0, actions_util_1.getTemporaryDirectory)(), gitHubVersion.type, features, logger);
|
||||
const defaultCliVersion = await features.getDefaultCliVersion(gitHubVersion.type);
|
||||
const initCodeQLResult = await (0, init_1.initCodeQL)((0, actions_util_1.getOptionalInput)("tools"), apiDetails, (0, actions_util_1.getTemporaryDirectory)(), gitHubVersion.type, await (0, util_1.shouldBypassToolcache)(features, (0, actions_util_1.getOptionalInput)("tools"), (0, actions_util_1.getOptionalInput)("languages"), repositoryNwo, logger), defaultCliVersion, logger);
|
||||
codeql = initCodeQLResult.codeql;
|
||||
toolsVersion = initCodeQLResult.toolsVersion;
|
||||
await (0, util_1.enrichEnvironment)(codeql);
|
||||
|
||||
File diff suppressed because one or more lines are too long
4
lib/init.js
generated
4
lib/init.js
generated
@@ -30,9 +30,9 @@ const configUtils = __importStar(require("./config-utils"));
|
||||
const tracer_config_1 = require("./tracer-config");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
async function initCodeQL(codeqlURL, apiDetails, tempDir, variant, featureEnablement, logger) {
|
||||
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, bypassToolcache, defaultCliVersion, logger) {
|
||||
logger.startGroup("Setup CodeQL tools");
|
||||
const { codeql, toolsVersion } = await (0, codeql_1.setupCodeQL)(codeqlURL, apiDetails, tempDir, variant, featureEnablement, logger, true);
|
||||
const { codeql, toolsVersion } = await (0, codeql_1.setupCodeQL)(toolsInput, apiDetails, tempDir, variant, bypassToolcache, defaultCliVersion, logger, true);
|
||||
await codeql.printVersion();
|
||||
logger.endGroup();
|
||||
return { codeql, toolsVersion };
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,iBAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,OAAO,EACP,iBAAiB,EACjB,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,eAAmC,EACnC,UAA8B,EAC9B,UAA8B,EAC9B,kBAA2B,EAC3B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,iBAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,eAAe,EACf,UAAU,EACV,UAAU,EACV,kBAAkB,EAClB,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,iBAAiB,EACjB,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA5CD,gCA4CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,iBAAoC,EACpC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI;QACF,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;YAChE,0BAA0B;YAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,iBAAiB,EACjB,MAAM,CACP,CAAC;SACH;aAAM;YACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,yBAAyB;gBACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,YAAY,CAAC,CAAC,CAAC,CAAC;KACvB;IACD,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAlCD,0BAkCC;AAED;;;;;;;;GAQG;AACH,SAAS,YAAY,CAAC,CAAM;;IAC1B,IAAI,CAAC,CAAC,CAAC,YAAY,KAAK,CAAC,EAAE;QACzB,OAAO,CAAC,CAAC;KACV;IAED;IACE,2BAA2B;IAC3B,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,8BAA8B,CAAC;SACnD,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,uCAAuC,CAAC,CAAA,EAC5D;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CACvB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;KACH;IAED;IACE,+EAA+E;IAC/E,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,wCAAwC,CAAC;;QAC7D,gEAAgE;QAChE,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,qBAAqB,CAAC,CAAA,EAC1C;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;KACtC;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE;gBACpE,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAzCD,8CAyCC"}
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,UAA8B,EAC9B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,eAAwB,EACxB,iBAA2C,EAC3C,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,UAAU,EACV,UAAU,EACV,OAAO,EACP,OAAO,EACP,eAAe,EACf,iBAAiB,EACjB,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AAvBD,gCAuBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,eAAmC,EACnC,UAA8B,EAC9B,UAA8B,EAC9B,kBAA2B,EAC3B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,iBAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,eAAe,EACf,UAAU,EACV,UAAU,EACV,kBAAkB,EAClB,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,iBAAiB,EACjB,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA5CD,gCA4CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,iBAAoC,EACpC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI;QACF,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;YAChE,0BAA0B;YAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,iBAAiB,EACjB,MAAM,CACP,CAAC;SACH;aAAM;YACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,yBAAyB;gBACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,YAAY,CAAC,CAAC,CAAC,CAAC;KACvB;IACD,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAlCD,0BAkCC;AAED;;;;;;;;GAQG;AACH,SAAS,YAAY,CAAC,CAAM;;IAC1B,IAAI,CAAC,CAAC,CAAC,YAAY,KAAK,CAAC,EAAE;QACzB,OAAO,CAAC,CAAC;KACV;IAED;IACE,2BAA2B;IAC3B,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,8BAA8B,CAAC;SACnD,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,uCAAuC,CAAC,CAAA,EAC5D;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CACvB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;KACH;IAED;IACE,+EAA+E;IAC/E,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,wCAAwC,CAAC;;QAC7D,gEAAgE;QAChE,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,qBAAqB,CAAC,CAAA,EAC1C;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;KACtC;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE;gBACpE,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAzCD,8CAyCC"}
|
||||
29
lib/languages.js
generated
29
lib/languages.js
generated
@@ -1,6 +1,6 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isScannedLanguage = exports.isTracedLanguage = exports.parseLanguage = exports.Language = void 0;
|
||||
exports.isScannedLanguage = exports.isTracedLanguage = exports.parseLanguage = exports.resolveAlias = exports.KOTLIN_SWIFT_BYPASS = exports.LANGUAGE_ALIASES = exports.Language = void 0;
|
||||
// All the languages supported by CodeQL
|
||||
var Language;
|
||||
(function (Language) {
|
||||
@@ -14,24 +14,39 @@ var Language;
|
||||
Language["swift"] = "swift";
|
||||
})(Language = exports.Language || (exports.Language = {}));
|
||||
// Additional names for languages
|
||||
const LANGUAGE_ALIASES = {
|
||||
exports.LANGUAGE_ALIASES = {
|
||||
c: Language.cpp,
|
||||
"c++": Language.cpp,
|
||||
"c#": Language.csharp,
|
||||
kotlin: Language.java,
|
||||
typescript: Language.javascript,
|
||||
};
|
||||
// Translate from user input or GitHub's API names for languages to CodeQL's names for languages
|
||||
exports.KOTLIN_SWIFT_BYPASS = ["kotlin", "swift"];
|
||||
function resolveAlias(lang) {
|
||||
return exports.LANGUAGE_ALIASES[lang] || lang;
|
||||
}
|
||||
exports.resolveAlias = resolveAlias;
|
||||
/**
|
||||
* Translate from user input or GitHub's API names for languages to CodeQL's
|
||||
* names for languages. This does not translate a language alias to the actual
|
||||
* language used by CodeQL.
|
||||
*
|
||||
* @param language The language to translate.
|
||||
* @returns A language supported by CodeQL, an alias for a language, or
|
||||
* `undefined` if the input language cannot be parsed into a langauge supported
|
||||
* by CodeQL.
|
||||
*/
|
||||
function parseLanguage(language) {
|
||||
// Normalise to lower case
|
||||
language = language.toLowerCase();
|
||||
language = language.trim().toLowerCase();
|
||||
// See if it's an exact match
|
||||
if (language in Language) {
|
||||
return language;
|
||||
}
|
||||
// Check language aliases
|
||||
if (language in LANGUAGE_ALIASES) {
|
||||
return LANGUAGE_ALIASES[language];
|
||||
// Check language aliases, but return the original language name,
|
||||
// the alias will be resolved later.
|
||||
if (language in exports.LANGUAGE_ALIASES) {
|
||||
return language;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;AAAA,wCAAwC;AACxC,IAAY,QASX;AATD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;IACjB,yBAAa,CAAA;IACb,2BAAe,CAAA;AACjB,CAAC,EATW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QASnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAAiC;IACrD,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,MAAM,EAAE,QAAQ,CAAC,IAAI;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO;QACL,QAAQ,CAAC,GAAG;QACZ,QAAQ,CAAC,MAAM;QACf,QAAQ,CAAC,EAAE;QACX,QAAQ,CAAC,IAAI;QACb,QAAQ,CAAC,KAAK;KACf,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACvB,CAAC;AARD,4CAQC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
|
||||
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;AAAA,wCAAwC;AACxC,IAAY,QASX;AATD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;IACjB,yBAAa,CAAA;IACb,2BAAe,CAAA;AACjB,CAAC,EATW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QASnB;AAED,iCAAiC;AACpB,QAAA,gBAAgB,GAAiC;IAC5D,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,MAAM,EAAE,QAAQ,CAAC,IAAI;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAIW,QAAA,mBAAmB,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;AAEvD,SAAgB,YAAY,CAAC,IAAqB;IAChD,OAAO,wBAAgB,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC;AACxC,CAAC;AAFD,oCAEC;AAED;;;;;;;;;GASG;AACH,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;IAEzC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,iEAAiE;IACjE,oCAAoC;IACpC,IAAI,QAAQ,IAAI,wBAAgB,EAAE;QAChC,OAAO,QAAQ,CAAC;KACjB;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAhBD,sCAgBC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO;QACL,QAAQ,CAAC,GAAG;QACZ,QAAQ,CAAC,MAAM;QACf,QAAQ,CAAC,EAAE;QACX,QAAQ,CAAC,IAAI;QACb,QAAQ,CAAC,KAAK;KACf,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACvB,CAAC;AARD,4CAQC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
|
||||
13
lib/languages.test.js
generated
13
lib/languages.test.js
generated
@@ -16,11 +16,14 @@ const testing_utils_1 = require("./testing-utils");
|
||||
t.deepEqual((0, languages_1.parseLanguage)("javascript"), languages_1.Language.javascript);
|
||||
t.deepEqual((0, languages_1.parseLanguage)("python"), languages_1.Language.python);
|
||||
// Aliases
|
||||
t.deepEqual((0, languages_1.parseLanguage)("c"), languages_1.Language.cpp);
|
||||
t.deepEqual((0, languages_1.parseLanguage)("c++"), languages_1.Language.cpp);
|
||||
t.deepEqual((0, languages_1.parseLanguage)("c#"), languages_1.Language.csharp);
|
||||
t.deepEqual((0, languages_1.parseLanguage)("kotlin"), languages_1.Language.java);
|
||||
t.deepEqual((0, languages_1.parseLanguage)("typescript"), languages_1.Language.javascript);
|
||||
t.deepEqual((0, languages_1.parseLanguage)("c"), "c");
|
||||
t.deepEqual((0, languages_1.parseLanguage)("c++"), "c++");
|
||||
t.deepEqual((0, languages_1.parseLanguage)("c#"), "c#");
|
||||
t.deepEqual((0, languages_1.parseLanguage)("kotlin"), "kotlin");
|
||||
t.deepEqual((0, languages_1.parseLanguage)("typescript"), "typescript");
|
||||
// spaces and case-insensitivity
|
||||
t.deepEqual((0, languages_1.parseLanguage)(" \t\nCsHaRp\t\t"), languages_1.Language.csharp);
|
||||
t.deepEqual((0, languages_1.parseLanguage)(" \t\nkOtLin\t\t"), "kotlin");
|
||||
// Not matches
|
||||
t.deepEqual((0, languages_1.parseLanguage)("foo"), undefined);
|
||||
t.deepEqual((0, languages_1.parseLanguage)(" "), undefined);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"languages.test.js","sourceRoot":"","sources":["../src/languages.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,2CAKqB;AACrB,mDAA6C;AAE7C,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,gBAAgB;IAChB,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IACtD,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,MAAM,CAAC,EAAE,oBAAQ,CAAC,IAAI,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAC9D,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAEtD,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,GAAG,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,IAAI,CAAC,CAAC;IACpD,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAE9D,cAAc;IACd,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC7C,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,GAAG,CAAC,EAAE,SAAS,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAC;AAC5C,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,CAAC,CAAC,IAAI,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAC1C,CAAC,CAAC,IAAI,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACtC,CAAC,CAAC,IAAI,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IACxC,CAAC,CAAC,IAAI,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;IAEzC,CAAC,CAAC,KAAK,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,KAAK,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAC3C,CAAC,CAAC,KAAK,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3C,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,mBAAmB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACpC,CAAC,CAAC,KAAK,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACzC,CAAC,CAAC,KAAK,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAC5C,CAAC,CAAC,KAAK,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACxC,CAAC,CAAC,KAAK,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC,CAAC,KAAK,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;IAE3C,CAAC,CAAC,IAAI,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,IAAI,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAC3C,CAAC,CAAC,IAAI,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3C,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"languages.test.js","sourceRoot":"","sources":["../src/languages.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,2CAKqB;AACrB,mDAA6C;AAE7C,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,gBAAgB;IAChB,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IACtD,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,MAAM,CAAC,EAAE,oBAAQ,CAAC,IAAI,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAC9D,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAEtD,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,GAAG,CAAC,EAAE,GAAG,CAAC,CAAC;IACrC,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,KAAK,CAAC,EAAE,KAAK,CAAC,CAAC;IACzC,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAC;IACvC,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,QAAQ,CAAC,EAAE,QAAQ,CAAC,CAAC;IAC/C,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,YAAY,CAAC,EAAE,YAAY,CAAC,CAAC;IAEvD,gCAAgC;IAChC,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,kBAAkB,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAChE,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,kBAAkB,CAAC,EAAE,QAAQ,CAAC,CAAC;IAEzD,cAAc;IACd,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC7C,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,GAAG,CAAC,EAAE,SAAS,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,IAAA,yBAAa,EAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAC;AAC5C,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,CAAC,CAAC,IAAI,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAC1C,CAAC,CAAC,IAAI,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACtC,CAAC,CAAC,IAAI,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IACxC,CAAC,CAAC,IAAI,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;IAEzC,CAAC,CAAC,KAAK,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,KAAK,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAC3C,CAAC,CAAC,KAAK,CAAC,IAAA,4BAAgB,EAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3C,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,mBAAmB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACpC,CAAC,CAAC,KAAK,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACzC,CAAC,CAAC,KAAK,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAC5C,CAAC,CAAC,KAAK,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACxC,CAAC,CAAC,KAAK,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC,CAAC,KAAK,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;IAE3C,CAAC,CAAC,IAAI,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,IAAI,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAC3C,CAAC,CAAC,IAAI,CAAC,IAAA,6BAAiB,EAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3C,CAAC,CAAC,CAAC"}
|
||||
382
lib/setup-codeql.js
generated
Normal file
382
lib/setup-codeql.js
generated
Normal file
@@ -0,0 +1,382 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.setupCodeQLBundle = exports.getCodeQLURLVersion = exports.downloadCodeQL = exports.getCodeQLSource = exports.convertToSemVer = exports.getBundleTagNameFromUrl = exports.findCodeQLBundleTagDotcomOnly = exports.getCodeQLActionRepository = exports.CODEQL_DEFAULT_ACTION_REPOSITORY = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const uuid_1 = require("uuid");
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api = __importStar(require("./api-client"));
|
||||
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
exports.CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||
function getCodeQLBundleName() {
|
||||
let platform;
|
||||
if (process.platform === "win32") {
|
||||
platform = "win64";
|
||||
}
|
||||
else if (process.platform === "linux") {
|
||||
platform = "linux64";
|
||||
}
|
||||
else if (process.platform === "darwin") {
|
||||
platform = "osx64";
|
||||
}
|
||||
else {
|
||||
return "codeql-bundle.tar.gz";
|
||||
}
|
||||
return `codeql-bundle-${platform}.tar.gz`;
|
||||
}
|
||||
function getCodeQLActionRepository(logger) {
|
||||
if ((0, actions_util_1.isRunningLocalAction)()) {
|
||||
// This handles the case where the Action does not come from an Action repository,
|
||||
// e.g. our integration tests which use the Action code from the current checkout.
|
||||
// In these cases, the GITHUB_ACTION_REPOSITORY environment variable is not set.
|
||||
logger.info("The CodeQL Action is checked out locally. Using the default CodeQL Action repository.");
|
||||
return exports.CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||
}
|
||||
return util.getRequiredEnvParam("GITHUB_ACTION_REPOSITORY");
|
||||
}
|
||||
exports.getCodeQLActionRepository = getCodeQLActionRepository;
|
||||
async function findCodeQLBundleTagDotcomOnly(cliVersion, logger) {
|
||||
logger.debug(`Trying to find the CodeQL bundle release for CLI version ${cliVersion}.`);
|
||||
const apiClient = api.getApiClient();
|
||||
const codeQLActionRepository = getCodeQLActionRepository(logger);
|
||||
const releases = await apiClient.paginate(apiClient.repos.listReleases, {
|
||||
owner: codeQLActionRepository.split("/")[0],
|
||||
repo: codeQLActionRepository.split("/")[1],
|
||||
});
|
||||
logger.debug(`Found ${releases.length} releases.`);
|
||||
for (const release of releases) {
|
||||
const cliVersionFileVersions = release.assets
|
||||
.map((asset) => { var _a; return (_a = asset.name.match(/cli-version-(.*)\.txt/)) === null || _a === void 0 ? void 0 : _a[1]; })
|
||||
.filter((v) => v)
|
||||
.map((v) => v);
|
||||
if (cliVersionFileVersions.length === 0) {
|
||||
logger.debug(`Ignoring release ${release.tag_name} with no CLI version marker file.`);
|
||||
continue;
|
||||
}
|
||||
if (cliVersionFileVersions.length > 1) {
|
||||
logger.warning(`Ignoring release ${release.tag_name} with multiple CLI version marker files.`);
|
||||
continue;
|
||||
}
|
||||
if (cliVersionFileVersions[0] === cliVersion) {
|
||||
return release.tag_name;
|
||||
}
|
||||
}
|
||||
throw new Error(`Failed to find a CodeQL bundle release for CLI version ${cliVersion}.`);
|
||||
}
|
||||
exports.findCodeQLBundleTagDotcomOnly = findCodeQLBundleTagDotcomOnly;
|
||||
async function getCodeQLBundleDownloadURL(tagName, apiDetails, variant, logger) {
|
||||
const codeQLActionRepository = getCodeQLActionRepository(logger);
|
||||
const potentialDownloadSources = [
|
||||
// This GitHub instance, and this Action.
|
||||
[apiDetails.url, codeQLActionRepository],
|
||||
// This GitHub instance, and the canonical Action.
|
||||
[apiDetails.url, exports.CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
// GitHub.com, and the canonical Action.
|
||||
[util.GITHUB_DOTCOM_URL, exports.CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
];
|
||||
// We now filter out any duplicates.
|
||||
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
|
||||
const uniqueDownloadSources = potentialDownloadSources.filter((source, index, self) => {
|
||||
return !self.slice(0, index).some((other) => (0, fast_deep_equal_1.default)(source, other));
|
||||
});
|
||||
const codeQLBundleName = getCodeQLBundleName();
|
||||
if (variant === util.GitHubVariant.GHAE) {
|
||||
try {
|
||||
const release = await api
|
||||
.getApiClient()
|
||||
.request("GET /enterprise/code-scanning/codeql-bundle/find/{tag}", {
|
||||
tag: tagName,
|
||||
});
|
||||
const assetID = release.data.assets[codeQLBundleName];
|
||||
if (assetID !== undefined) {
|
||||
const download = await api
|
||||
.getApiClient()
|
||||
.request("GET /enterprise/code-scanning/codeql-bundle/download/{asset_id}", { asset_id: assetID });
|
||||
const downloadURL = download.data.url;
|
||||
logger.info(`Found CodeQL bundle at GitHub AE endpoint with URL ${downloadURL}.`);
|
||||
return downloadURL;
|
||||
}
|
||||
else {
|
||||
logger.info(`Attempted to fetch bundle from GitHub AE endpoint but the bundle ${codeQLBundleName} was not found in the assets ${JSON.stringify(release.data.assets)}.`);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
logger.info(`Attempted to fetch bundle from GitHub AE endpoint but got error ${e}.`);
|
||||
}
|
||||
}
|
||||
for (const downloadSource of uniqueDownloadSources) {
|
||||
const [apiURL, repository] = downloadSource;
|
||||
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
|
||||
if (apiURL === util.GITHUB_DOTCOM_URL &&
|
||||
repository === exports.CODEQL_DEFAULT_ACTION_REPOSITORY) {
|
||||
break;
|
||||
}
|
||||
const [repositoryOwner, repositoryName] = repository.split("/");
|
||||
try {
|
||||
const release = await api.getApiClient().repos.getReleaseByTag({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
tag: tagName,
|
||||
});
|
||||
for (const asset of release.data.assets) {
|
||||
if (asset.name === codeQLBundleName) {
|
||||
logger.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
|
||||
return asset.url;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
logger.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
|
||||
}
|
||||
}
|
||||
return `https://github.com/${exports.CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${tagName}/${codeQLBundleName}`;
|
||||
}
|
||||
function getBundleTagNameFromUrl(url) {
|
||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
||||
if (match === null || match.length < 2) {
|
||||
throw new Error(`Malformed tools url: ${url}. Tag name could not be inferred`);
|
||||
}
|
||||
return match[1];
|
||||
}
|
||||
exports.getBundleTagNameFromUrl = getBundleTagNameFromUrl;
|
||||
function convertToSemVer(version, logger) {
|
||||
if (!semver.valid(version)) {
|
||||
logger.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||
version = `0.0.0-${version}`;
|
||||
}
|
||||
const s = semver.clean(version);
|
||||
if (!s) {
|
||||
throw new Error(`Bundle version ${version} is not in SemVer format.`);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
exports.convertToSemVer = convertToSemVer;
|
||||
async function getOrFindBundleTagName(version, logger) {
|
||||
if (version.variant === util.GitHubVariant.DOTCOM) {
|
||||
return await findCodeQLBundleTagDotcomOnly(version.cliVersion, logger);
|
||||
}
|
||||
else {
|
||||
return version.tagName;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Look for a version of the CodeQL tools in the cache which could override the requested CLI version.
|
||||
*/
|
||||
async function findOverridingToolsInCache(requestedCliVersion, logger) {
|
||||
const candidates = toolcache
|
||||
.findAllVersions("CodeQL")
|
||||
.filter(util_1.isGoodVersion)
|
||||
.map((version) => ({
|
||||
folder: toolcache.find("CodeQL", version),
|
||||
version,
|
||||
}))
|
||||
.filter(({ folder }) => fs.existsSync(path.join(folder, "pinned-version")));
|
||||
if (candidates.length === 1) {
|
||||
const candidate = candidates[0];
|
||||
logger.debug(`CodeQL tools version ${candidate.version} in toolcache overriding version ${requestedCliVersion}.`);
|
||||
return {
|
||||
codeqlFolder: candidate.folder,
|
||||
sourceType: "toolcache",
|
||||
toolsVersion: candidate.version,
|
||||
};
|
||||
}
|
||||
else if (candidates.length === 0) {
|
||||
logger.debug("Did not find any candidate pinned versions of the CodeQL tools in the toolcache.");
|
||||
}
|
||||
else {
|
||||
logger.debug("Could not use CodeQL tools from the toolcache since more than one candidate pinned " +
|
||||
"version was found in the toolcache.");
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
async function getCodeQLSource(toolsInput, bypassToolcache, defaultCliVersion, apiDetails, variant, logger) {
|
||||
if (toolsInput && toolsInput !== "latest" && !toolsInput.startsWith("http")) {
|
||||
return {
|
||||
codeqlTarPath: toolsInput,
|
||||
sourceType: "local",
|
||||
toolsVersion: "local",
|
||||
};
|
||||
}
|
||||
const forceLatestReason =
|
||||
// We use the special value of 'latest' to prioritize the version in the
|
||||
// defaults over any pinned cached version.
|
||||
toolsInput === "latest"
|
||||
? '"tools: latest" was requested'
|
||||
: // If the user hasn't requested a particular CodeQL version, then bypass
|
||||
// the toolcache when the appropriate feature is enabled. This
|
||||
// allows us to quickly rollback a broken bundle that has made its way
|
||||
// into the toolcache.
|
||||
toolsInput === undefined && bypassToolcache
|
||||
? "a specific version of the CodeQL tools was not requested and the bypass toolcache feature is enabled"
|
||||
: undefined;
|
||||
const forceLatest = forceLatestReason !== undefined;
|
||||
if (forceLatest) {
|
||||
logger.debug(`Forcing the latest version of the CodeQL tools since ${forceLatestReason}.`);
|
||||
}
|
||||
/**
|
||||
* The requested version is:
|
||||
*
|
||||
* 1. The one in `defaults.json`, if forceLatest is true.
|
||||
* 2. The version specified by the tools input URL, if one was provided.
|
||||
* 3. The default CLI version, otherwise.
|
||||
|
||||
* We include a `variant` property to let us verify using the type system that
|
||||
* `tagName` is only undefined when the variant is Dotcom. This lets us ensure
|
||||
* that we can always compute `tagName`, either by using the existing tag name
|
||||
* on enterprise instances, or safely calling `findCodeQLBundleTagDotcomOnly`
|
||||
* on Dotcom.
|
||||
*/
|
||||
const requestedVersion = forceLatest
|
||||
? // case 1
|
||||
{
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
variant,
|
||||
}
|
||||
: toolsInput !== undefined
|
||||
? // case 2
|
||||
{
|
||||
cliVersion: convertToSemVer(getBundleTagNameFromUrl(toolsInput), logger),
|
||||
tagName: getBundleTagNameFromUrl(toolsInput),
|
||||
url: toolsInput,
|
||||
variant,
|
||||
}
|
||||
: // case 3
|
||||
defaultCliVersion;
|
||||
// If we find the specified version, we always use that.
|
||||
let codeqlFolder = toolcache.find("CodeQL", requestedVersion.cliVersion);
|
||||
let tagName = requestedVersion["tagName"];
|
||||
if (!codeqlFolder && !requestedVersion.cliVersion.startsWith("0.0.0")) {
|
||||
// Fall back to accepting a `0.0.0-<tagName>` version if we didn't find the
|
||||
// `x.y.z` version. This is to support old versions of the toolcache.
|
||||
tagName =
|
||||
tagName || (await getOrFindBundleTagName(requestedVersion, logger));
|
||||
const fallbackVersion = convertToSemVer(tagName, logger);
|
||||
logger.debug(`Computed a fallback toolcache version number of ${fallbackVersion} for CodeQL tools version ${requestedVersion.cliVersion}.`);
|
||||
codeqlFolder = toolcache.find("CodeQL", fallbackVersion);
|
||||
}
|
||||
if (codeqlFolder) {
|
||||
return {
|
||||
codeqlFolder,
|
||||
sourceType: "toolcache",
|
||||
toolsVersion: requestedVersion.cliVersion,
|
||||
};
|
||||
}
|
||||
logger.debug(`Did not find CodeQL tools version ${requestedVersion.cliVersion} in the toolcache.`);
|
||||
// If we don't find the requested version on Enterprise, we may allow a
|
||||
// different version to save download time if the version hasn't been
|
||||
// specified explicitly (in which case we always honor it).
|
||||
if (variant !== util.GitHubVariant.DOTCOM && !forceLatest && !toolsInput) {
|
||||
const result = await findOverridingToolsInCache(requestedVersion.cliVersion, logger);
|
||||
if (result !== undefined) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return {
|
||||
codeqlURL: requestedVersion["url"] ||
|
||||
(await getCodeQLBundleDownloadURL(tagName || (await getOrFindBundleTagName(requestedVersion, logger)), apiDetails, variant, logger)),
|
||||
semanticVersion: requestedVersion.cliVersion,
|
||||
sourceType: "download",
|
||||
toolsVersion: requestedVersion.cliVersion,
|
||||
};
|
||||
}
|
||||
exports.getCodeQLSource = getCodeQLSource;
|
||||
async function downloadCodeQL(codeqlURL, semanticVersion, apiDetails, tempDir, logger) {
|
||||
const parsedCodeQLURL = new URL(codeqlURL);
|
||||
const searchParams = new URLSearchParams(parsedCodeQLURL.search);
|
||||
const headers = {
|
||||
accept: "application/octet-stream",
|
||||
};
|
||||
// We only want to provide an authorization header if we are downloading
|
||||
// from the same GitHub instance the Action is running on.
|
||||
// This avoids leaking Enterprise tokens to dotcom.
|
||||
// We also don't want to send an authorization header if there's already a token provided in the URL.
|
||||
if (searchParams.has("token")) {
|
||||
logger.debug("CodeQL tools URL contains an authorization token.");
|
||||
}
|
||||
else if (codeqlURL.startsWith(`${apiDetails.url}/`)) {
|
||||
logger.debug("Providing an authorization token to download CodeQL tools.");
|
||||
headers.authorization = `token ${apiDetails.auth}`;
|
||||
}
|
||||
else {
|
||||
logger.debug("Downloading CodeQL tools without an authorization token.");
|
||||
}
|
||||
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`);
|
||||
const dest = path.join(tempDir, (0, uuid_1.v4)());
|
||||
const finalHeaders = Object.assign({ "User-Agent": "CodeQL Action" }, headers);
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL, dest, undefined, finalHeaders);
|
||||
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
return await toolcache.cacheDir(codeqlExtracted, "CodeQL", semanticVersion);
|
||||
}
|
||||
exports.downloadCodeQL = downloadCodeQL;
|
||||
function getCodeQLURLVersion(url) {
|
||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
||||
if (match === null || match.length < 2) {
|
||||
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
|
||||
}
|
||||
return match[1];
|
||||
}
|
||||
exports.getCodeQLURLVersion = getCodeQLURLVersion;
|
||||
/**
|
||||
* Obtains the CodeQL bundle, installs it in the toolcache if appropriate, and extracts it.
|
||||
*
|
||||
* @param toolsInput
|
||||
* @param apiDetails
|
||||
* @param tempDir
|
||||
* @param variant
|
||||
* @param bypassToolcache
|
||||
* @param defaultCliVersion
|
||||
* @param logger
|
||||
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
||||
* version requirement. Must be set to true outside tests.
|
||||
* @returns the path to the extracted bundle, and the version of the tools
|
||||
*/
|
||||
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, bypassToolcache, defaultCliVersion, logger) {
|
||||
const source = await getCodeQLSource(toolsInput, bypassToolcache, defaultCliVersion, apiDetails, variant, logger);
|
||||
let codeqlFolder;
|
||||
switch (source.sourceType) {
|
||||
case "local":
|
||||
codeqlFolder = await toolcache.extractTar(source.codeqlTarPath);
|
||||
break;
|
||||
case "toolcache":
|
||||
codeqlFolder = source.codeqlFolder;
|
||||
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
break;
|
||||
case "download":
|
||||
codeqlFolder = await downloadCodeQL(source.codeqlURL, source.semanticVersion, apiDetails, tempDir, logger);
|
||||
break;
|
||||
default:
|
||||
util.assertNever(source);
|
||||
}
|
||||
return { codeqlFolder, toolsVersion: source.toolsVersion };
|
||||
}
|
||||
exports.setupCodeQLBundle = setupCodeQLBundle;
|
||||
//# sourceMappingURL=setup-codeql.js.map
|
||||
1
lib/setup-codeql.js.map
Normal file
1
lib/setup-codeql.js.map
Normal file
File diff suppressed because one or more lines are too long
116
lib/setup-codeql.test.js
generated
Normal file
116
lib/setup-codeql.test.js
generated
Normal file
@@ -0,0 +1,116 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = __importStar(require("path"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const logging_1 = require("./logging");
|
||||
const setupCodeql = __importStar(require("./setup-codeql"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
ava_1.default.beforeEach(() => {
|
||||
(0, util_1.initializeEnvironment)("1.2.3");
|
||||
});
|
||||
(0, ava_1.default)("parse codeql bundle url version", (t) => {
|
||||
t.deepEqual(setupCodeql.getCodeQLURLVersion("https://github.com/.../codeql-bundle-20200601/..."), "20200601");
|
||||
});
|
||||
(0, ava_1.default)("convert to semver", (t) => {
|
||||
const tests = {
|
||||
"20200601": "0.0.0-20200601",
|
||||
"20200601.0": "0.0.0-20200601.0",
|
||||
"20200601.0.0": "20200601.0.0",
|
||||
"1.2.3": "1.2.3",
|
||||
"1.2.3-alpha": "1.2.3-alpha",
|
||||
"1.2.3-beta.1": "1.2.3-beta.1",
|
||||
};
|
||||
for (const [version, expectedVersion] of Object.entries(tests)) {
|
||||
try {
|
||||
const parsedVersion = setupCodeql.convertToSemVer(version, (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(parsedVersion, expectedVersion);
|
||||
}
|
||||
catch (e) {
|
||||
t.fail(e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
}
|
||||
});
|
||||
(0, ava_1.default)("getCodeQLActionRepository", (t) => {
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
(0, util_1.initializeEnvironment)("1.2.3");
|
||||
// isRunningLocalAction() === true
|
||||
delete process.env["GITHUB_ACTION_REPOSITORY"];
|
||||
process.env["RUNNER_TEMP"] = path.dirname(__dirname);
|
||||
const repoLocalRunner = setupCodeql.getCodeQLActionRepository(logger);
|
||||
t.deepEqual(repoLocalRunner, "github/codeql-action");
|
||||
// isRunningLocalAction() === false
|
||||
sinon.stub(actionsUtil, "isRunningLocalAction").returns(false);
|
||||
process.env["GITHUB_ACTION_REPOSITORY"] = "xxx/yyy";
|
||||
const repoEnv = setupCodeql.getCodeQLActionRepository(logger);
|
||||
t.deepEqual(repoEnv, "xxx/yyy");
|
||||
});
|
||||
(0, ava_1.default)("findCodeQLBundleTagDotcomOnly() matches GitHub Release with marker file", async (t) => {
|
||||
// Look for GitHub Releases in github/codeql-action
|
||||
sinon.stub(actionsUtil, "isRunningLocalAction").resolves(true);
|
||||
sinon.stub(api, "getApiClient").value(() => ({
|
||||
repos: {
|
||||
listReleases: sinon.stub().resolves(undefined),
|
||||
},
|
||||
paginate: sinon.stub().resolves([
|
||||
{
|
||||
assets: [
|
||||
{
|
||||
name: "cli-version-2.12.0.txt",
|
||||
},
|
||||
],
|
||||
tag_name: "codeql-bundle-20230106",
|
||||
},
|
||||
]),
|
||||
}));
|
||||
t.is(await setupCodeql.findCodeQLBundleTagDotcomOnly("2.12.0", (0, logging_1.getRunnerLogger)(true)), "codeql-bundle-20230106");
|
||||
});
|
||||
(0, ava_1.default)("findCodeQLBundleTagDotcomOnly() errors if no GitHub Release matches marker file", async (t) => {
|
||||
// Look for GitHub Releases in github/codeql-action
|
||||
sinon.stub(actionsUtil, "isRunningLocalAction").resolves(true);
|
||||
sinon.stub(api, "getApiClient").value(() => ({
|
||||
repos: {
|
||||
listReleases: sinon.stub().resolves(undefined),
|
||||
},
|
||||
paginate: sinon.stub().resolves([
|
||||
{
|
||||
assets: [
|
||||
{
|
||||
name: "cli-version-2.12.0.txt",
|
||||
},
|
||||
],
|
||||
tag_name: "codeql-bundle-20230106",
|
||||
},
|
||||
]),
|
||||
}));
|
||||
await t.throwsAsync(async () => await setupCodeql.findCodeQLBundleTagDotcomOnly("2.12.1", (0, logging_1.getRunnerLogger)(true)), {
|
||||
message: "Failed to find a CodeQL bundle release for CLI version 2.12.1.",
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=setup-codeql.test.js.map
|
||||
1
lib/setup-codeql.test.js.map
Normal file
1
lib/setup-codeql.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"setup-codeql.test.js","sourceRoot":"","sources":["../src/setup-codeql.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,kDAAoC;AACpC,uCAA4C;AAC5C,4DAA8C;AAC9C,mDAA6C;AAC7C,iCAA+C;AAE/C,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,OAAO,CAAC,CAAC;AACjC,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,CAAC,CAAC,SAAS,CACT,WAAW,CAAC,mBAAmB,CAC7B,mDAAmD,CACpD,EACD,UAAU,CACX,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,mBAAmB,EAAE,CAAC,CAAC,EAAE,EAAE;IAC9B,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,IAAI;YACF,MAAM,aAAa,GAAG,WAAW,CAAC,eAAe,CAC/C,OAAO,EACP,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;SACpD;KACF;AACH,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,2BAA2B,EAAE,CAAC,CAAC,EAAE,EAAE;IACtC,MAAM,MAAM,GAAG,IAAA,yBAAe,EAAC,IAAI,CAAC,CAAC;IAErC,IAAA,4BAAqB,EAAC,OAAO,CAAC,CAAC;IAE/B,kCAAkC;IAClC,OAAO,OAAO,CAAC,GAAG,CAAC,0BAA0B,CAAC,CAAC;IAC/C,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACrD,MAAM,eAAe,GAAG,WAAW,CAAC,yBAAyB,CAAC,MAAM,CAAC,CAAC;IACtE,CAAC,CAAC,SAAS,CAAC,eAAe,EAAE,sBAAsB,CAAC,CAAC;IAErD,mCAAmC;IACnC,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,sBAAsB,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,CAAC,GAAG,CAAC,0BAA0B,CAAC,GAAG,SAAS,CAAC;IACpD,MAAM,OAAO,GAAG,WAAW,CAAC,yBAAyB,CAAC,MAAM,CAAC,CAAC;IAC9D,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;AAClC,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,yEAAyE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1F,mDAAmD;IACnD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,sBAAsB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;IAC/D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC;QAC3C,KAAK,EAAE;YACL,YAAY,EAAE,KAAK,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC;SAC/C;QACD,QAAQ,EAAE,KAAK,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC;YAC9B;gBACE,MAAM,EAAE;oBACN;wBACE,IAAI,EAAE,wBAAwB;qBAC/B;iBACF;gBACD,QAAQ,EAAE,wBAAwB;aACnC;SACF,CAAC;KACH,CAAC,CAAC,CAAC;IACJ,CAAC,CAAC,EAAE,CACF,MAAM,WAAW,CAAC,6BAA6B,CAC7C,QAAQ,EACR,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,EACD,wBAAwB,CACzB,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iFAAiF,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClG,mDAAmD;IACnD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,sBAAsB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;IAC/D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC;QAC3C,KAAK,EAAE;YACL,YAAY,EAAE,KAAK,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC;SAC/C;QACD,QAAQ,EAAE,KAAK,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC;YAC9B;gBACE,MAAM,EAAE;oBACN;wBACE,IAAI,EAAE,wBAAwB;qBAC/B;iBACF;gBACD,QAAQ,EAAE,wBAAwB;aACnC;SACF,CAAC;KACH,CAAC,CAAC,CAAC;IACJ,MAAM,CAAC,CAAC,WAAW,CACjB,KAAK,IAAI,EAAE,CACT,MAAM,WAAW,CAAC,6BAA6B,CAC7C,QAAQ,EACR,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,EACH;QACE,OAAO,EAAE,gEAAgE;KAC1E,CACF,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
||||
23
lib/shared-environment.js
generated
23
lib/shared-environment.js
generated
@@ -1,14 +1,21 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CODEQL_ACTION_TEST_MODE = exports.CODEQL_ACTION_TESTING_ENVIRONMENT = exports.CODEQL_WORKFLOW_STARTED_AT = exports.ODASA_TRACER_CONFIGURATION = void 0;
|
||||
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
||||
// The time at which the first action (normally init) started executing.
|
||||
// If a workflow invokes a different action without first invoking the init
|
||||
// action (i.e. the upload action is being used by a third-party integrator)
|
||||
// then this variable will be assigned the start time of the action invoked
|
||||
// rather that the init action.
|
||||
exports.CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
|
||||
exports.ODASA_TRACER_CONFIGURATION = exports.CODEQL_WORKFLOW_STARTED_AT = exports.CODEQL_ACTION_TEST_MODE = exports.CODEQL_ACTION_TESTING_ENVIRONMENT = exports.CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY = void 0;
|
||||
/**
|
||||
* This environment variable is set to true when the `analyze` Action
|
||||
* completes successfully.
|
||||
*/
|
||||
exports.CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY = "CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY";
|
||||
exports.CODEQL_ACTION_TESTING_ENVIRONMENT = "CODEQL_ACTION_TESTING_ENVIRONMENT";
|
||||
/** Used to disable uploading SARIF results or status reports to the GitHub API */
|
||||
exports.CODEQL_ACTION_TEST_MODE = "CODEQL_ACTION_TEST_MODE";
|
||||
/**
|
||||
* The time at which the first action (normally init) started executing.
|
||||
* If a workflow invokes a different action without first invoking the init
|
||||
* action (i.e. the upload action is being used by a third-party integrator)
|
||||
* then this variable will be assigned the start time of the action invoked
|
||||
* rather that the init action.
|
||||
*/
|
||||
exports.CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
|
||||
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
||||
//# sourceMappingURL=shared-environment.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAE1D,QAAA,iCAAiC,GAC5C,mCAAmC,CAAC;AAEtC,kFAAkF;AACrE,QAAA,uBAAuB,GAAG,yBAAyB,CAAC"}
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;;AAAA;;;GAGG;AACU,QAAA,+CAA+C,GAC1D,iDAAiD,CAAC;AAEvC,QAAA,iCAAiC,GAC5C,mCAAmC,CAAC;AAEtC,kFAAkF;AACrE,QAAA,uBAAuB,GAAG,yBAAyB,CAAC;AAEjE;;;;;;GAMG;AACU,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAE1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"}
|
||||
27
lib/testing-utils.js
generated
27
lib/testing-utils.js
generated
@@ -19,8 +19,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createFeatures = exports.mockCodeQLVersion = exports.mockFeatureFlagApiEndpoint = exports.getRecordingLogger = exports.setupActionsVars = exports.setupTests = void 0;
|
||||
exports.createFeatures = exports.mockCodeQLVersion = exports.mockLanguagesInRepo = exports.mockFeatureFlagApiEndpoint = exports.getRecordingLogger = exports.setupActionsVars = exports.setupTests = void 0;
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const nock = __importStar(require("nock"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const apiClient = __importStar(require("./api-client"));
|
||||
const CodeQL = __importStar(require("./codeql"));
|
||||
@@ -85,6 +86,8 @@ function setupTests(test) {
|
||||
if (!t.passed) {
|
||||
process.stdout.write(t.context.testOutput);
|
||||
}
|
||||
// Undo any modifications made by nock
|
||||
nock.cleanAll();
|
||||
// Undo any modifications made by sinon
|
||||
sinon.restore();
|
||||
// Undo any modifications to the env
|
||||
@@ -144,6 +147,25 @@ function mockFeatureFlagApiEndpoint(responseStatusCode, response) {
|
||||
sinon.stub(apiClient, "getApiClient").value(() => client);
|
||||
}
|
||||
exports.mockFeatureFlagApiEndpoint = mockFeatureFlagApiEndpoint;
|
||||
function mockLanguagesInRepo(languages) {
|
||||
const mockClient = sinon.stub(apiClient, "getApiClient");
|
||||
const listLanguages = sinon.stub().resolves({
|
||||
status: 200,
|
||||
data: languages.reduce((acc, lang) => {
|
||||
acc[lang] = 1;
|
||||
return acc;
|
||||
}, {}),
|
||||
headers: {},
|
||||
url: "GET /repos/:owner/:repo/languages",
|
||||
});
|
||||
mockClient.returns({
|
||||
repos: {
|
||||
listLanguages,
|
||||
},
|
||||
});
|
||||
return listLanguages;
|
||||
}
|
||||
exports.mockLanguagesInRepo = mockLanguagesInRepo;
|
||||
function mockCodeQLVersion(version) {
|
||||
return {
|
||||
async getVersion() {
|
||||
@@ -159,6 +181,9 @@ exports.mockCodeQLVersion = mockCodeQLVersion;
|
||||
*/
|
||||
function createFeatures(enabledFeatures) {
|
||||
return {
|
||||
getDefaultCliVersion: async () => {
|
||||
throw new Error("not implemented");
|
||||
},
|
||||
getValue: async (feature) => {
|
||||
return enabledFeatures.includes(feature);
|
||||
},
|
||||
|
||||
File diff suppressed because one or more lines are too long
40
lib/toolrunner-error-catcher.js
generated
40
lib/toolrunner-error-catcher.js
generated
@@ -31,7 +31,7 @@ const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||
* @param args optional arguments for tool. Escaping is handled by the lib.
|
||||
* @param matchers defines specific codes and/or regexes that should lead to return of a custom error
|
||||
* @param options optional exec options. See ExecOptions
|
||||
* @returns Promise<number> exit code
|
||||
* @returns ReturnState exit code and stdout output, if applicable
|
||||
*/
|
||||
async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
|
||||
var _a, _b;
|
||||
@@ -54,40 +54,36 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
|
||||
},
|
||||
};
|
||||
// we capture the original return code or error so that if no match is found we can duplicate the behavior
|
||||
let returnState;
|
||||
let exitCode;
|
||||
try {
|
||||
returnState = await new toolrunner.ToolRunner(await safeWhich.safeWhich(commandLine), args, {
|
||||
exitCode = await new toolrunner.ToolRunner(await safeWhich.safeWhich(commandLine), args, {
|
||||
...options,
|
||||
listeners,
|
||||
ignoreReturnCode: true, // so we can check for specific codes using the matchers
|
||||
}).exec();
|
||||
}
|
||||
catch (e) {
|
||||
returnState = e instanceof Error ? e : new Error(String(e));
|
||||
}
|
||||
// if there is a zero return code then we do not apply the matchers
|
||||
if (returnState === 0)
|
||||
return returnState;
|
||||
if (matchers) {
|
||||
for (const matcher of matchers) {
|
||||
if (matcher.exitCode === returnState ||
|
||||
((_a = matcher.outputRegex) === null || _a === void 0 ? void 0 : _a.test(stderr)) ||
|
||||
((_b = matcher.outputRegex) === null || _b === void 0 ? void 0 : _b.test(stdout))) {
|
||||
throw new Error(matcher.message);
|
||||
// if there is a zero return code then we do not apply the matchers
|
||||
if (exitCode === 0)
|
||||
return { exitCode, stdout };
|
||||
if (matchers) {
|
||||
for (const matcher of matchers) {
|
||||
if (matcher.exitCode === exitCode ||
|
||||
((_a = matcher.outputRegex) === null || _a === void 0 ? void 0 : _a.test(stderr)) ||
|
||||
((_b = matcher.outputRegex) === null || _b === void 0 ? void 0 : _b.test(stdout))) {
|
||||
throw new Error(matcher.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (typeof returnState === "number") {
|
||||
// only if we were instructed to ignore the return code do we ever return it non-zero
|
||||
if (options === null || options === void 0 ? void 0 : options.ignoreReturnCode) {
|
||||
return returnState;
|
||||
return { exitCode, stdout };
|
||||
}
|
||||
else {
|
||||
throw new Error(`The process '${commandLine}' failed with exit code ${returnState}`);
|
||||
throw new Error(`The process '${commandLine}' failed with exit code ${exitCode}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw returnState;
|
||||
catch (e) {
|
||||
const error = e instanceof Error ? e : new Error(String(e));
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
exports.toolrunnerErrorCatcher = toolrunnerErrorCatcher;
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;KAC7D;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW;iBAChC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;iBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAnED,wDAmEC"}
|
||||
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AASpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,QAAgB,CAAC;IACrB,IAAI;QACF,QAAQ,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CACxC,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;QAET,mEAAmE;QACnE,IAAI,QAAQ,KAAK,CAAC;YAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;QAEhD,IAAI,QAAQ,EAAE;YACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;gBAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,QAAQ;qBAC7B,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;qBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;oBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;iBAClC;aACF;SACF;QAED,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;SAC7B;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,QAAQ,EAAE,CACjE,CAAC;SACH;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,KAAK,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5D,MAAM,KAAK,CAAC;KACb;AACH,CAAC;AAhED,wDAgEC"}
|
||||
13
lib/toolrunner-error-catcher.test.js
generated
13
lib/toolrunner-error-catcher.test.js
generated
@@ -33,7 +33,8 @@ const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "error!!!" },
|
||||
];
|
||||
t.deepEqual(await exec.exec("node", testArgs), 0);
|
||||
t.deepEqual(await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, matchers), 0);
|
||||
const returnState = await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, matchers);
|
||||
t.deepEqual(returnState.exitCode, 0);
|
||||
});
|
||||
(0, ava_1.default)("regex matchers are applied to stdout for non-zero exit code", async (t) => {
|
||||
const testArgs = buildDummyArgs("foo bar\\nblort qux", "", "", 1);
|
||||
@@ -116,9 +117,10 @@ const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||
(0, ava_1.default)("execErrorCatcher respects the ignoreReturnValue option", async (t) => {
|
||||
const testArgs = buildDummyArgs("standard output", "error output", "", 199);
|
||||
await t.throwsAsync((0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], { ignoreReturnCode: false }), { instanceOf: Error });
|
||||
t.deepEqual(await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], {
|
||||
const returnState = await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], {
|
||||
ignoreReturnCode: true,
|
||||
}), 199);
|
||||
});
|
||||
t.deepEqual(returnState.exitCode, 199);
|
||||
});
|
||||
(0, ava_1.default)("execErrorCatcher preserves behavior of provided listeners", async (t) => {
|
||||
const stdoutExpected = "standard output";
|
||||
@@ -134,9 +136,10 @@ const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||
},
|
||||
};
|
||||
const testArgs = buildDummyArgs(stdoutExpected, stderrExpected, "", 0);
|
||||
t.deepEqual(await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], {
|
||||
const returnState = await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], {
|
||||
listeners,
|
||||
}), 0);
|
||||
});
|
||||
t.deepEqual(returnState.exitCode, 0);
|
||||
t.deepEqual(stdoutActual, `${stdoutExpected}\n`);
|
||||
t.deepEqual(stderrActual, `${stderrExpected}\n`);
|
||||
});
|
||||
|
||||
File diff suppressed because one or more lines are too long
167
lib/upload-lib.js
generated
167
lib/upload-lib.js
generated
@@ -30,13 +30,13 @@ const zlib_1 = __importDefault(require("zlib"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const file_url_1 = __importDefault(require("file-url"));
|
||||
const jsonschema = __importStar(require("jsonschema"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const fingerprints = __importStar(require("./fingerprints"));
|
||||
const repository_1 = require("./repository");
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util = __importStar(require("./util"));
|
||||
const workflow = __importStar(require("./workflow"));
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
function combineSarifFiles(sarifFiles) {
|
||||
@@ -128,9 +128,8 @@ function findSarifFilesInDir(sarifPath) {
|
||||
exports.findSarifFilesInDir = findSarifFilesInDir;
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFromActions(sarifPath, gitHubVersion, logger) {
|
||||
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(actionsUtil.getRequiredInput("checkout_path")), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getOptionalInput("category"), util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), gitHubVersion, logger);
|
||||
async function uploadFromActions(sarifPath, checkoutPath, category, logger) {
|
||||
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(checkoutPath), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), category, util.getRequiredEnvParam("GITHUB_WORKFLOW"), workflow.getWorkflowRunID(), checkoutPath, actionsUtil.getRequiredInput("matrix"), logger);
|
||||
}
|
||||
exports.uploadFromActions = uploadFromActions;
|
||||
function getSarifFilePaths(sarifPath) {
|
||||
@@ -193,7 +192,7 @@ function validateSarifFileSchema(sarifFilePath, logger) {
|
||||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||
// buildPayload constructs a map ready to be uploaded to the API from the given
|
||||
// parameters, respecting the current mode and target GitHub instance version.
|
||||
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion, mergeBaseCommitOid) {
|
||||
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, mergeBaseCommitOid) {
|
||||
const payloadObj = {
|
||||
commit_oid: commitOid,
|
||||
ref,
|
||||
@@ -208,26 +207,22 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
||||
base_ref: undefined,
|
||||
base_sha: undefined,
|
||||
};
|
||||
// This behaviour can be made the default when support for GHES 3.0 is discontinued.
|
||||
if (gitHubVersion.type !== util.GitHubVariant.GHES ||
|
||||
semver.satisfies(gitHubVersion.version, `>=3.1`)) {
|
||||
if (actionsUtil.workflowEventName() === "pull_request") {
|
||||
if (commitOid === util.getRequiredEnvParam("GITHUB_SHA") &&
|
||||
mergeBaseCommitOid) {
|
||||
// We're uploading results for the merge commit
|
||||
// and were able to determine the merge base.
|
||||
// So we use that as the most accurate base.
|
||||
payloadObj.base_ref = `refs/heads/${util.getRequiredEnvParam("GITHUB_BASE_REF")}`;
|
||||
payloadObj.base_sha = mergeBaseCommitOid;
|
||||
}
|
||||
else if (process.env.GITHUB_EVENT_PATH) {
|
||||
// Either we're not uploading results for the merge commit
|
||||
// or we could not determine the merge base.
|
||||
// Using the PR base is the only option here
|
||||
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
|
||||
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
|
||||
payloadObj.base_sha = githubEvent.pull_request.base.sha;
|
||||
}
|
||||
if (actionsUtil.workflowEventName() === "pull_request") {
|
||||
if (commitOid === util.getRequiredEnvParam("GITHUB_SHA") &&
|
||||
mergeBaseCommitOid) {
|
||||
// We're uploading results for the merge commit
|
||||
// and were able to determine the merge base.
|
||||
// So we use that as the most accurate base.
|
||||
payloadObj.base_ref = `refs/heads/${util.getRequiredEnvParam("GITHUB_BASE_REF")}`;
|
||||
payloadObj.base_sha = mergeBaseCommitOid;
|
||||
}
|
||||
else if (process.env.GITHUB_EVENT_PATH) {
|
||||
// Either we're not uploading results for the merge commit
|
||||
// or we could not determine the merge base.
|
||||
// Using the PR base is the only option here
|
||||
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
|
||||
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
|
||||
payloadObj.base_sha = githubEvent.pull_request.base.sha;
|
||||
}
|
||||
}
|
||||
return payloadObj;
|
||||
@@ -235,7 +230,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
||||
exports.buildPayload = buildPayload;
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, sourceRoot, environment, gitHubVersion, logger) {
|
||||
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, sourceRoot, environment, logger) {
|
||||
logger.startGroup("Uploading results");
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
@@ -252,7 +247,7 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
||||
const sarifPayload = JSON.stringify(sarif);
|
||||
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = (0, file_url_1.default)(sourceRoot);
|
||||
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion, await actionsUtil.determineMergeBaseCommitOid());
|
||||
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, await actionsUtil.determineMergeBaseCommitOid());
|
||||
// Log some useful debug info about the info
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
@@ -274,48 +269,92 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
||||
}
|
||||
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
|
||||
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
|
||||
// Waits until either the analysis is successfully processed, a processing error is reported, or STATUS_CHECK_TIMEOUT_MILLISECONDS elapses.
|
||||
async function waitForProcessing(repositoryNwo, sarifID, logger) {
|
||||
/**
|
||||
* Waits until either the analysis is successfully processed, a processing error
|
||||
* is reported, or `STATUS_CHECK_TIMEOUT_MILLISECONDS` elapses.
|
||||
*
|
||||
* If `isUnsuccessfulExecution` is passed, will throw an error if the analysis
|
||||
* processing does not produce a single error mentioning the unsuccessful
|
||||
* execution.
|
||||
*/
|
||||
async function waitForProcessing(repositoryNwo, sarifID, logger, options = {
|
||||
isUnsuccessfulExecution: false,
|
||||
}) {
|
||||
logger.startGroup("Waiting for processing to finish");
|
||||
const client = api.getApiClient();
|
||||
const statusCheckingStarted = Date.now();
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (Date.now() >
|
||||
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
|
||||
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
||||
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
||||
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
||||
break;
|
||||
try {
|
||||
const client = api.getApiClient();
|
||||
const statusCheckingStarted = Date.now();
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (Date.now() >
|
||||
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
|
||||
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
||||
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
||||
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
||||
break;
|
||||
}
|
||||
let response = undefined;
|
||||
try {
|
||||
response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
sarif_id: sarifID,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`);
|
||||
break;
|
||||
}
|
||||
const status = response.data.processing_status;
|
||||
logger.info(`Analysis upload status is ${status}.`);
|
||||
if (status === "pending") {
|
||||
logger.debug("Analysis processing is still pending...");
|
||||
}
|
||||
else if (options.isUnsuccessfulExecution) {
|
||||
// We expect a specific processing error for unsuccessful executions, so
|
||||
// handle these separately.
|
||||
handleProcessingResultForUnsuccessfulExecution(response, status, logger);
|
||||
break;
|
||||
}
|
||||
else if (status === "complete") {
|
||||
break;
|
||||
}
|
||||
else if (status === "failed") {
|
||||
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
|
||||
}
|
||||
else {
|
||||
util.assertNever(status);
|
||||
}
|
||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||
}
|
||||
let response = undefined;
|
||||
try {
|
||||
response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
sarif_id: sarifID,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`);
|
||||
break;
|
||||
}
|
||||
const status = response.data.processing_status;
|
||||
logger.info(`Analysis upload status is ${status}.`);
|
||||
if (status === "complete") {
|
||||
break;
|
||||
}
|
||||
else if (status === "pending") {
|
||||
logger.debug("Analysis processing is still pending...");
|
||||
}
|
||||
else if (status === "failed") {
|
||||
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
|
||||
}
|
||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||
}
|
||||
logger.endGroup();
|
||||
finally {
|
||||
logger.endGroup();
|
||||
}
|
||||
}
|
||||
exports.waitForProcessing = waitForProcessing;
|
||||
/**
|
||||
* Checks the processing result for an unsuccessful execution. Throws if the
|
||||
* result is not a failure with a single "unsuccessful execution" error.
|
||||
*/
|
||||
function handleProcessingResultForUnsuccessfulExecution(response, status, logger) {
|
||||
if (status === "failed" &&
|
||||
Array.isArray(response.data.errors) &&
|
||||
response.data.errors.length === 1 &&
|
||||
response.data.errors[0].toString().startsWith("unsuccessful execution")) {
|
||||
logger.debug("Successfully uploaded a SARIF file for the unsuccessful execution. Received expected " +
|
||||
'"unsuccessful execution" error, and no other errors.');
|
||||
}
|
||||
else {
|
||||
const shortMessage = "Failed to upload a SARIF file for the unsuccessful execution. Code scanning status " +
|
||||
"information for the repository may be out of date as a result.";
|
||||
const longMessage = shortMessage + status === "failed"
|
||||
? ` Processing errors: ${response.data.errors}`
|
||||
: ' Encountered no processing errors, but expected to receive an "unsuccessful execution" error.';
|
||||
logger.debug(longMessage);
|
||||
throw new Error(shortMessage);
|
||||
}
|
||||
}
|
||||
function validateUniqueCategory(sarif) {
|
||||
var _a, _b, _c;
|
||||
// duplicate categories are allowed in the same sarif file
|
||||
|
||||
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user