mirror of
https://github.com/github/codeql-action.git
synced 2025-12-07 16:28:15 +08:00
Compare commits
48 Commits
mbg/fix/pr
...
mbg/api/re
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
456086d251 | ||
|
|
d71accc5fa | ||
|
|
7f44048739 | ||
|
|
8a84c17a9d | ||
|
|
3837f2e205 | ||
|
|
b8806eca8c | ||
|
|
33da5f0b36 | ||
|
|
8a9ef89a8a | ||
|
|
6b6d1ddcf9 | ||
|
|
2b07444ab3 | ||
|
|
5ab5aef079 | ||
|
|
4901f549de | ||
|
|
e2e36b17af | ||
|
|
bd516303e1 | ||
|
|
6117099fe1 | ||
|
|
79e0afb999 | ||
|
|
a25c57cebe | ||
|
|
48017e960d | ||
|
|
39be66afb0 | ||
|
|
67427c612a | ||
|
|
9e8cbee7cb | ||
|
|
0f4529ee05 | ||
|
|
0c4919df84 | ||
|
|
2d8d6395ef | ||
|
|
6fcf631e73 | ||
|
|
a067418f51 | ||
|
|
0337c4c06e | ||
|
|
c22ae04dd3 | ||
|
|
80273e2bc1 | ||
|
|
dc1166cacb | ||
|
|
ddc6d540f0 | ||
|
|
6222edff53 | ||
|
|
3305d21389 | ||
|
|
db37d924ee | ||
|
|
6249793233 | ||
|
|
e33b0ab3ac | ||
|
|
7bea0e2e12 | ||
|
|
d378195403 | ||
|
|
12dda79905 | ||
|
|
a2ce099060 | ||
|
|
696b467654 | ||
|
|
c8e017d3e7 | ||
|
|
8185897cad | ||
|
|
a6161a8092 | ||
|
|
35454d39b2 | ||
|
|
b73659a4ff | ||
|
|
2f35a47982 | ||
|
|
242ca1c0a1 |
26
.github/actions/prepare-test/action.yml
vendored
26
.github/actions/prepare-test/action.yml
vendored
@@ -2,7 +2,7 @@ name: "Prepare test"
|
||||
description: Performs some preparation to run tests
|
||||
inputs:
|
||||
version:
|
||||
description: "The version of the CodeQL CLI to use. Can be 'linked', 'default', 'nightly-latest', 'nightly-YYYYMMDD', or 'stable-vX.Y.Z"
|
||||
description: "The version of the CodeQL CLI to use. Can be 'linked', 'default', 'nightly', 'nightly-latest', 'nightly-YYYYMMDD', or 'stable-vX.Y.Z"
|
||||
required: true
|
||||
use-all-platform-bundle:
|
||||
description: "If true, we output a tools URL with codeql-bundle.tar.gz file rather than platform-specific URL"
|
||||
@@ -35,7 +35,10 @@ runs:
|
||||
run: |
|
||||
set -e # Fail this Action if `gh release list` fails.
|
||||
|
||||
if [[ "$VERSION" == "linked" ]]; then
|
||||
if [[ "$VERSION" == "nightly" || "$VERSION" == "nightly-latest" ]]; then
|
||||
echo "tools-url=nightly" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
elif [[ "$VERSION" == "linked" ]]; then
|
||||
echo "tools-url=linked" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
elif [[ "$VERSION" == "default" ]]; then
|
||||
@@ -43,29 +46,20 @@ runs:
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "$VERSION" == "nightly-latest" && "$RUNNER_OS" != "Windows" ]]; then
|
||||
extension="tar.zst"
|
||||
else
|
||||
extension="tar.gz"
|
||||
fi
|
||||
|
||||
if [[ "$USE_ALL_PLATFORM_BUNDLE" == "true" ]]; then
|
||||
artifact_name="codeql-bundle.$extension"
|
||||
artifact_name="codeql-bundle.tar.gz"
|
||||
elif [[ "$RUNNER_OS" == "Linux" ]]; then
|
||||
artifact_name="codeql-bundle-linux64.$extension"
|
||||
artifact_name="codeql-bundle-linux64.tar.gz"
|
||||
elif [[ "$RUNNER_OS" == "macOS" ]]; then
|
||||
artifact_name="codeql-bundle-osx64.$extension"
|
||||
artifact_name="codeql-bundle-osx64.tar.gz"
|
||||
elif [[ "$RUNNER_OS" == "Windows" ]]; then
|
||||
artifact_name="codeql-bundle-win64.$extension"
|
||||
artifact_name="codeql-bundle-win64.tar.gz"
|
||||
else
|
||||
echo "::error::Unrecognized OS $RUNNER_OS"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$VERSION" == "nightly-latest" ]]; then
|
||||
tag=`gh release list --repo dsp-testing/codeql-cli-nightlies -L 1 | cut -f 3`
|
||||
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$tag/$artifact_name" >> $GITHUB_OUTPUT
|
||||
elif [[ "$VERSION" == *"nightly"* ]]; then
|
||||
if [[ "$VERSION" == *"nightly"* ]]; then
|
||||
version=`echo "$VERSION" | sed -e 's/^.*\-//'`
|
||||
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$version/$artifact_name" >> $GITHUB_OUTPUT
|
||||
elif [[ "$VERSION" == *"stable"* ]]; then
|
||||
|
||||
3
.github/workflows/__all-platform-bundle.yml
generated
vendored
3
.github/workflows/__all-platform-bundle.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
all-platform-bundle:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__analyze-ref-input.yml
generated
vendored
3
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
analyze-ref-input:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__autobuild-action.yml
generated
vendored
3
.github/workflows/__autobuild-action.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
autobuild-action:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__autobuild-direct-tracing-with-working-dir.yml
generated
vendored
3
.github/workflows/__autobuild-direct-tracing-with-working-dir.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
autobuild-direct-tracing-with-working-dir:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__autobuild-direct-tracing.yml
generated
vendored
3
.github/workflows/__autobuild-direct-tracing.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
autobuild-direct-tracing:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__build-mode-autobuild.yml
generated
vendored
3
.github/workflows/__build-mode-autobuild.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
build-mode-autobuild:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__build-mode-manual.yml
generated
vendored
3
.github/workflows/__build-mode-manual.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
build-mode-manual:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__build-mode-none.yml
generated
vendored
3
.github/workflows/__build-mode-none.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
build-mode-none:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__build-mode-rollback.yml
generated
vendored
3
.github/workflows/__build-mode-rollback.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
build-mode-rollback:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__bundle-toolcache.yml
generated
vendored
3
.github/workflows/__bundle-toolcache.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
bundle-toolcache:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__bundle-zstd.yml
generated
vendored
3
.github/workflows/__bundle-zstd.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
bundle-zstd:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__cleanup-db-cluster-dir.yml
generated
vendored
3
.github/workflows/__cleanup-db-cluster-dir.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
cleanup-db-cluster-dir:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__config-export.yml
generated
vendored
3
.github/workflows/__config-export.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
config-export:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__config-input.yml
generated
vendored
3
.github/workflows/__config-input.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
config-input:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__cpp-deptrace-disabled.yml
generated
vendored
3
.github/workflows/__cpp-deptrace-disabled.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
cpp-deptrace-disabled:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__cpp-deptrace-enabled-on-macos.yml
generated
vendored
3
.github/workflows/__cpp-deptrace-enabled-on-macos.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
cpp-deptrace-enabled-on-macos:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__cpp-deptrace-enabled.yml
generated
vendored
3
.github/workflows/__cpp-deptrace-enabled.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
cpp-deptrace-enabled:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__diagnostics-export.yml
generated
vendored
3
.github/workflows/__diagnostics-export.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
diagnostics-export:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__export-file-baseline-information.yml
generated
vendored
3
.github/workflows/__export-file-baseline-information.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
export-file-baseline-information:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__extractor-ram-threads.yml
generated
vendored
3
.github/workflows/__extractor-ram-threads.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
extractor-ram-threads:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__go-custom-queries.yml
generated
vendored
3
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
go-custom-queries:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml
generated
vendored
3
.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
go-indirect-tracing-workaround-diagnostic:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml
generated
vendored
3
.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
go-indirect-tracing-workaround-no-file-program:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__go-indirect-tracing-workaround.yml
generated
vendored
3
.github/workflows/__go-indirect-tracing-workaround.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
go-indirect-tracing-workaround:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
3
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
go-tracing-autobuilder:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
3
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
go-tracing-custom-build-steps:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
3
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
go-tracing-legacy-workflow:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__init-with-registries.yml
generated
vendored
3
.github/workflows/__init-with-registries.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
init-with-registries:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__javascript-source-root.yml
generated
vendored
3
.github/workflows/__javascript-source-root.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
javascript-source-root:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__job-run-uuid-sarif.yml
generated
vendored
3
.github/workflows/__job-run-uuid-sarif.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
job-run-uuid-sarif:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__language-aliases.yml
generated
vendored
3
.github/workflows/__language-aliases.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
language-aliases:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__multi-language-autodetect.yml
generated
vendored
3
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
multi-language-autodetect:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__overlay-init-fallback.yml
generated
vendored
3
.github/workflows/__overlay-init-fallback.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
overlay-init-fallback:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
3
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
packaging-codescanning-config-inputs-js:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
3
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
packaging-config-inputs-js:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__packaging-config-js.yml
generated
vendored
3
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
packaging-config-js:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__packaging-inputs-js.yml
generated
vendored
3
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
packaging-inputs-js:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__quality-queries.yml
generated
vendored
3
.github/workflows/__quality-queries.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
quality-queries:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__remote-config.yml
generated
vendored
3
.github/workflows/__remote-config.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
remote-config:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__resolve-environment-action.yml
generated
vendored
3
.github/workflows/__resolve-environment-action.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
resolve-environment-action:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__rubocop-multi-language.yml
generated
vendored
3
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
rubocop-multi-language:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__ruby.yml
generated
vendored
3
.github/workflows/__ruby.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
ruby:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__rust.yml
generated
vendored
3
.github/workflows/__rust.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
rust:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__split-workflow.yml
generated
vendored
3
.github/workflows/__split-workflow.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
split-workflow:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__start-proxy.yml
generated
vendored
3
.github/workflows/__start-proxy.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
start-proxy:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__submit-sarif-failure.yml
generated
vendored
3
.github/workflows/__submit-sarif-failure.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
submit-sarif-failure:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__swift-autobuild.yml
generated
vendored
3
.github/workflows/__swift-autobuild.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
swift-autobuild:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__swift-custom-build.yml
generated
vendored
3
.github/workflows/__swift-custom-build.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
swift-custom-build:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
3
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
test-autobuild-working-dir:
|
||||
strategy:
|
||||
|
||||
11
.github/workflows/__test-local-codeql.yml
generated
vendored
11
.github/workflows/__test-local-codeql.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
test-local-codeql:
|
||||
strategy:
|
||||
@@ -44,7 +47,7 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
version: linked
|
||||
name: Local CodeQL bundle
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -66,11 +69,9 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Fetch a CodeQL bundle
|
||||
env:
|
||||
CODEQL_URL: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Fetch latest CodeQL bundle
|
||||
run: |
|
||||
wget "$CODEQL_URL"
|
||||
wget https://github.com/github/codeql-action/releases/latest/download/codeql-bundle-linux64.tar.zst
|
||||
- id: init
|
||||
uses: ./../action/init
|
||||
with:
|
||||
|
||||
3
.github/workflows/__test-proxy.yml
generated
vendored
3
.github/workflows/__test-proxy.yml
generated
vendored
@@ -27,6 +27,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
test-proxy:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__unset-environment.yml
generated
vendored
3
.github/workflows/__unset-environment.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
unset-environment:
|
||||
strategy:
|
||||
|
||||
13
.github/workflows/__upload-quality-sarif.yml
generated
vendored
13
.github/workflows/__upload-quality-sarif.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
upload-quality-sarif:
|
||||
strategy:
|
||||
@@ -73,10 +76,8 @@ jobs:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
|
||||
github.sha }}
|
||||
analysis-kinds: code-scanning,code-quality
|
||||
languages: csharp,java,javascript,python
|
||||
analysis-kinds: code-quality
|
||||
- name: Build code
|
||||
run: ./build.sh
|
||||
# Generate some SARIF we can upload with the upload-sarif step
|
||||
@@ -86,8 +87,12 @@ jobs:
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
upload: never
|
||||
- uses: ./../action/upload-sarif
|
||||
id: upload-sarif
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
- name: Check output from `upload-sarif` step
|
||||
if: fromJSON(steps.upload-sarif.outputs.sarif-ids)[0].analysis != 'code-quality'
|
||||
run: exit 1
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
3
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
3
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
upload-ref-sha-input:
|
||||
strategy:
|
||||
|
||||
3
.github/workflows/__with-checkout-path.yml
generated
vendored
3
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -37,6 +37,9 @@ on:
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
with-checkout-path:
|
||||
strategy:
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
# Update the required checks based on the current branch.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
REPO_DIR="$(dirname "$SCRIPT_DIR")"
|
||||
GRANDPARENT_DIR="$(dirname "$REPO_DIR")"
|
||||
@@ -31,6 +33,12 @@ CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs -
|
||||
|
||||
echo "$CHECKS" | jq
|
||||
|
||||
# Fail if there are no checks
|
||||
if [ -z "$CHECKS" ] || [ "$(echo "$CHECKS" | jq '. | length')" -eq 0 ]; then
|
||||
echo "No checks found for $GITHUB_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "{\"contexts\": ${CHECKS}}" > checks.json
|
||||
|
||||
echo "Updating main"
|
||||
|
||||
@@ -6,6 +6,7 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th
|
||||
|
||||
- We have improved the CodeQL Action's ability to validate that the workflow it is used in does not use different versions of the CodeQL Action for different workflow steps. Mixing different versions of the CodeQL Action in the same workflow is unsupported and can lead to unpredictable results. A warning will now be emitted from the `codeql-action/init` step if different versions of the CodeQL Action are detected in the workflow file. Additionally, an error will now be thrown by the other CodeQL Action steps if they load a configuration file that was generated by a different version of the `codeql-action/init` step. [#3099](https://github.com/github/codeql-action/pull/3099) and [#3100](https://github.com/github/codeql-action/pull/3100)
|
||||
- We added support for reducing the size of dependency caches for Java analyses, which will reduce cache usage and speed up workflows. This will be enabled automatically at a later time. [#3107](https://github.com/github/codeql-action/pull/3107)
|
||||
- You can now run the latest CodeQL nightly bundle by passing `tools: nightly` to the `init` action. In general, the nightly bundle is unstable and we only recommend running it when directed by GitHub staff. [#3130](https://github.com/github/codeql-action/pull/3130)
|
||||
|
||||
## 3.30.3 - 10 Sep 2025
|
||||
|
||||
|
||||
@@ -58,7 +58,7 @@ inputs:
|
||||
# If changing this, make sure to update workflow.ts accordingly.
|
||||
default: ${{ github.workspace }}
|
||||
ref:
|
||||
description: "The ref where results will be uploaded. If not provided, the Action will use the GITHUB_REF environment variable. If provided, the sha input must be provided as well. This input is ignored for pull requests from forks."
|
||||
description: "The ref where results will be uploaded. If not provided, the Action will use the GITHUB_REF environment variable. If provided, the sha input must be provided as well. This input is ignored for pull requests from forks. Expected format: refs/heads/<branch name>, refs/tags/<tag>, refs/pull/<number>/merge, or refs/pull/<number>/head."
|
||||
required: false
|
||||
sha:
|
||||
description: "The sha of the HEAD of the ref where results will be uploaded. If not provided, the Action will use the GITHUB_SHA environment variable. If provided, the ref input must be provided as well. This input is ignored for pull requests from forks."
|
||||
|
||||
@@ -12,6 +12,9 @@ inputs:
|
||||
- The URL of a CodeQL Bundle tarball GitHub release asset, or
|
||||
- A special value `linked` which uses the version of the CodeQL tools
|
||||
that the Action has been bundled with.
|
||||
- A special value `nightly` which uses the latest nightly version of the
|
||||
CodeQL tools. Note that this is unstable and not recommended for
|
||||
production use.
|
||||
|
||||
If not specified, the Action will check in several places until it finds
|
||||
the CodeQL tools.
|
||||
|
||||
59
lib/analyze-action-post.js
generated
59
lib/analyze-action-post.js
generated
@@ -26447,7 +26447,7 @@ var require_package = __commonJS({
|
||||
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
|
||||
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
|
||||
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
|
||||
test: "npm run transpile && ava src/**.test.ts --serial --verbose",
|
||||
test: "npm run transpile && ava src/ --serial --verbose",
|
||||
"test-debug": "npm run test -- --timeout=20m",
|
||||
transpile: "tsc --build --verbose"
|
||||
},
|
||||
@@ -26492,9 +26492,9 @@ var require_package = __commonJS({
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.3.2",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.35.0",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^14.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
"@types/console-log-level": "^1.4.5",
|
||||
"@types/follow-redirects": "^1.14.4",
|
||||
@@ -26503,10 +26503,10 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.43.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.9",
|
||||
esbuild: "^0.25.10",
|
||||
eslint: "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.8.7",
|
||||
"eslint-plugin-filenames": "^1.3.2",
|
||||
@@ -94796,7 +94796,7 @@ var require_commonjs16 = __commonJS({
|
||||
var TYPEMASK = 1023;
|
||||
var entToType = (s) => s.isFile() ? IFREG : s.isDirectory() ? IFDIR : s.isSymbolicLink() ? IFLNK : s.isCharacterDevice() ? IFCHR : s.isBlockDevice() ? IFBLK : s.isSocket() ? IFSOCK : s.isFIFO() ? IFIFO : UNKNOWN;
|
||||
var normalizeCache = /* @__PURE__ */ new Map();
|
||||
var normalize3 = (s) => {
|
||||
var normalize2 = (s) => {
|
||||
const c = normalizeCache.get(s);
|
||||
if (c)
|
||||
return c;
|
||||
@@ -94809,7 +94809,7 @@ var require_commonjs16 = __commonJS({
|
||||
const c = normalizeNocaseCache.get(s);
|
||||
if (c)
|
||||
return c;
|
||||
const n = normalize3(s.toLowerCase());
|
||||
const n = normalize2(s.toLowerCase());
|
||||
normalizeNocaseCache.set(s, n);
|
||||
return n;
|
||||
};
|
||||
@@ -94978,7 +94978,7 @@ var require_commonjs16 = __commonJS({
|
||||
*/
|
||||
constructor(name, type2 = UNKNOWN, root, roots, nocase, children, opts) {
|
||||
this.name = name;
|
||||
this.#matchName = nocase ? normalizeNocase(name) : normalize3(name);
|
||||
this.#matchName = nocase ? normalizeNocase(name) : normalize2(name);
|
||||
this.#type = type2 & TYPEMASK;
|
||||
this.nocase = nocase;
|
||||
this.roots = roots;
|
||||
@@ -95071,7 +95071,7 @@ var require_commonjs16 = __commonJS({
|
||||
return this.parent || this;
|
||||
}
|
||||
const children = this.children();
|
||||
const name = this.nocase ? normalizeNocase(pathPart) : normalize3(pathPart);
|
||||
const name = this.nocase ? normalizeNocase(pathPart) : normalize2(pathPart);
|
||||
for (const p of children) {
|
||||
if (p.#matchName === name) {
|
||||
return p;
|
||||
@@ -95316,7 +95316,7 @@ var require_commonjs16 = __commonJS({
|
||||
* directly.
|
||||
*/
|
||||
isNamed(n) {
|
||||
return !this.nocase ? this.#matchName === normalize3(n) : this.#matchName === normalizeNocase(n);
|
||||
return !this.nocase ? this.#matchName === normalize2(n) : this.#matchName === normalizeNocase(n);
|
||||
}
|
||||
/**
|
||||
* Return the Path object corresponding to the target of a symbolic link.
|
||||
@@ -95455,7 +95455,7 @@ var require_commonjs16 = __commonJS({
|
||||
#readdirMaybePromoteChild(e, c) {
|
||||
for (let p = c.provisional; p < c.length; p++) {
|
||||
const pchild = c[p];
|
||||
const name = this.nocase ? normalizeNocase(e.name) : normalize3(e.name);
|
||||
const name = this.nocase ? normalizeNocase(e.name) : normalize2(e.name);
|
||||
if (name !== pchild.#matchName) {
|
||||
continue;
|
||||
}
|
||||
@@ -103286,7 +103286,7 @@ var require_tr46 = __commonJS({
|
||||
TRANSITIONAL: 0,
|
||||
NONTRANSITIONAL: 1
|
||||
};
|
||||
function normalize3(str2) {
|
||||
function normalize2(str2) {
|
||||
return str2.split("\0").map(function(s) {
|
||||
return s.normalize("NFC");
|
||||
}).join("\0");
|
||||
@@ -103366,7 +103366,7 @@ var require_tr46 = __commonJS({
|
||||
processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;
|
||||
}
|
||||
var error2 = false;
|
||||
if (normalize3(label) !== label || label[3] === "-" && label[4] === "-" || label[0] === "-" || label[label.length - 1] === "-" || label.indexOf(".") !== -1 || label.search(combiningMarksRegex) === 0) {
|
||||
if (normalize2(label) !== label || label[3] === "-" && label[4] === "-" || label[0] === "-" || label[label.length - 1] === "-" || label.indexOf(".") !== -1 || label.search(combiningMarksRegex) === 0) {
|
||||
error2 = true;
|
||||
}
|
||||
var len = countSymbols(label);
|
||||
@@ -103384,7 +103384,7 @@ var require_tr46 = __commonJS({
|
||||
}
|
||||
function processing(domain_name, useSTD3, processing_option) {
|
||||
var result = mapChars(domain_name, useSTD3, processing_option);
|
||||
result.string = normalize3(result.string);
|
||||
result.string = normalize2(result.string);
|
||||
var labels = result.string.split(".");
|
||||
for (var i = 0; i < labels.length; ++i) {
|
||||
try {
|
||||
@@ -117076,6 +117076,9 @@ function getCachedCodeQlVersion() {
|
||||
async function codeQlVersionAtLeast(codeql, requiredVersion) {
|
||||
return semver.gte((await codeql.getVersion()).version, requiredVersion);
|
||||
}
|
||||
function isInTestMode() {
|
||||
return process.env["CODEQL_ACTION_TEST_MODE" /* TEST_MODE */] === "true";
|
||||
}
|
||||
function wrapError(error2) {
|
||||
return error2 instanceof Error ? error2 : new Error(String(error2));
|
||||
}
|
||||
@@ -117195,6 +117198,9 @@ var githubUtils = __toESM(require_utils4());
|
||||
var retry = __toESM(require_dist_node15());
|
||||
var import_console_log_level = __toESM(require_console_log_level());
|
||||
var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
||||
function getRetryConfig() {
|
||||
return isInTestMode() ? { retries: 10, retryAfterBaseValue: 1e4 } : { retries: 3, retryAfterBaseValue: 1e3 };
|
||||
}
|
||||
function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) {
|
||||
const auth = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth;
|
||||
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
||||
@@ -117202,7 +117208,8 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {})
|
||||
githubUtils.getOctokitOptions(auth, {
|
||||
baseUrl: apiDetails.apiURL,
|
||||
userAgent: `CodeQL-Action/${getActionVersion()}`,
|
||||
log: (0, import_console_log_level.default)({ level: "debug" })
|
||||
log: (0, import_console_log_level.default)({ level: "debug" }),
|
||||
retry: getRetryConfig()
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -117487,7 +117494,6 @@ function wrapCliConfigurationError(cliError) {
|
||||
// src/config-utils.ts
|
||||
var fs3 = __toESM(require("fs"));
|
||||
var path3 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/analyses.ts
|
||||
var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => {
|
||||
@@ -117500,8 +117506,17 @@ var supportedAnalysisKinds = new Set(Object.values(AnalysisKind));
|
||||
// src/caching-utils.ts
|
||||
var core6 = __toESM(require_core());
|
||||
|
||||
// src/config/db-config.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
|
||||
// src/feature-flags.ts
|
||||
var semver3 = __toESM(require_semver2());
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/overlay-database-utils.ts
|
||||
var fs2 = __toESM(require("fs"));
|
||||
@@ -117747,13 +117762,13 @@ function computeChangedFiles(baseFileOids, overlayFileOids) {
|
||||
}
|
||||
|
||||
// src/tools-features.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var semver3 = __toESM(require_semver2());
|
||||
function isSupportedToolsFeature(versionInfo, feature) {
|
||||
return !!versionInfo.features && versionInfo.features[feature];
|
||||
}
|
||||
var SafeArtifactUploadVersion = "2.20.3";
|
||||
function isSafeArtifactUpload(codeQlVersion) {
|
||||
return !codeQlVersion ? true : semver2.gte(codeQlVersion, SafeArtifactUploadVersion);
|
||||
return !codeQlVersion ? true : semver3.gte(codeQlVersion, SafeArtifactUploadVersion);
|
||||
}
|
||||
|
||||
// src/feature-flags.ts
|
||||
@@ -117955,12 +117970,6 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = {
|
||||
rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */,
|
||||
swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */
|
||||
};
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
function getPathToParsedConfigFile(tempDir) {
|
||||
return path3.join(tempDir, "config");
|
||||
}
|
||||
|
||||
91
lib/analyze-action.js
generated
91
lib/analyze-action.js
generated
@@ -27722,7 +27722,7 @@ var require_pattern = __commonJS({
|
||||
const absolute = [];
|
||||
const relative2 = [];
|
||||
for (const pattern of patterns) {
|
||||
if (isAbsolute3(pattern)) {
|
||||
if (isAbsolute2(pattern)) {
|
||||
absolute.push(pattern);
|
||||
} else {
|
||||
relative2.push(pattern);
|
||||
@@ -27731,10 +27731,10 @@ var require_pattern = __commonJS({
|
||||
return [absolute, relative2];
|
||||
}
|
||||
exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative;
|
||||
function isAbsolute3(pattern) {
|
||||
function isAbsolute2(pattern) {
|
||||
return path20.isAbsolute(pattern);
|
||||
}
|
||||
exports2.isAbsolute = isAbsolute3;
|
||||
exports2.isAbsolute = isAbsolute2;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -32296,7 +32296,7 @@ var require_package = __commonJS({
|
||||
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
|
||||
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
|
||||
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
|
||||
test: "npm run transpile && ava src/**.test.ts --serial --verbose",
|
||||
test: "npm run transpile && ava src/ --serial --verbose",
|
||||
"test-debug": "npm run test -- --timeout=20m",
|
||||
transpile: "tsc --build --verbose"
|
||||
},
|
||||
@@ -32341,9 +32341,9 @@ var require_package = __commonJS({
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.3.2",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.35.0",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^14.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
"@types/console-log-level": "^1.4.5",
|
||||
"@types/follow-redirects": "^1.14.4",
|
||||
@@ -32352,10 +32352,10 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.43.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.9",
|
||||
esbuild: "^0.25.10",
|
||||
eslint: "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.8.7",
|
||||
"eslint-plugin-filenames": "^1.3.2",
|
||||
@@ -89754,7 +89754,7 @@ async function tryGetFolderBytes(cacheDir, logger, quiet = false) {
|
||||
}
|
||||
}
|
||||
var hadTimeout = false;
|
||||
async function withTimeout(timeoutMs, promise, onTimeout) {
|
||||
async function waitForResultWithTimeLimit(timeoutMs, promise, onTimeout) {
|
||||
let finished2 = false;
|
||||
const mainTask = async () => {
|
||||
const result = await promise;
|
||||
@@ -90177,6 +90177,9 @@ function parseRepositoryNwo(input) {
|
||||
|
||||
// src/api-client.ts
|
||||
var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
||||
function getRetryConfig() {
|
||||
return isInTestMode() ? { retries: 10, retryAfterBaseValue: 1e4 } : { retries: 3, retryAfterBaseValue: 1e3 };
|
||||
}
|
||||
function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) {
|
||||
const auth = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth;
|
||||
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
||||
@@ -90184,7 +90187,8 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {})
|
||||
githubUtils.getOctokitOptions(auth, {
|
||||
baseUrl: apiDetails.apiURL,
|
||||
userAgent: `CodeQL-Action/${getActionVersion()}`,
|
||||
log: (0, import_console_log_level.default)({ level: "debug" })
|
||||
log: (0, import_console_log_level.default)({ level: "debug" }),
|
||||
retry: getRetryConfig()
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -90555,7 +90559,6 @@ function wrapCliConfigurationError(cliError) {
|
||||
// src/config-utils.ts
|
||||
var fs9 = __toESM(require("fs"));
|
||||
var path10 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/caching-utils.ts
|
||||
var core6 = __toESM(require_core());
|
||||
@@ -90569,6 +90572,15 @@ function shouldStoreCache(kind) {
|
||||
return kind === "full" /* Full */ || kind === "store" /* Store */;
|
||||
}
|
||||
|
||||
// src/config/db-config.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
|
||||
// src/diff-informed-analysis-utils.ts
|
||||
var fs8 = __toESM(require("fs"));
|
||||
var path9 = __toESM(require("path"));
|
||||
@@ -90576,7 +90588,7 @@ var path9 = __toESM(require("path"));
|
||||
// src/feature-flags.ts
|
||||
var fs7 = __toESM(require("fs"));
|
||||
var path8 = __toESM(require("path"));
|
||||
var semver3 = __toESM(require_semver2());
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/defaults.json
|
||||
var bundleVersion = "codeql-bundle-v2.23.0";
|
||||
@@ -90872,7 +90884,7 @@ function computeChangedFiles(baseFileOids, overlayFileOids) {
|
||||
}
|
||||
var CACHE_VERSION = 1;
|
||||
var CACHE_PREFIX = "codeql-overlay-base-database";
|
||||
var MAX_CACHE_OPERATION_MS = 12e4;
|
||||
var MAX_CACHE_OPERATION_MS = 6e5;
|
||||
function checkOverlayBaseDatabase(config, logger, warningPrefix) {
|
||||
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
|
||||
if (!fs6.existsSync(baseDatabaseOidsFilePath)) {
|
||||
@@ -90940,7 +90952,7 @@ async function uploadOverlayBaseDatabaseToCache(codeql, config, logger) {
|
||||
`Uploading overlay-base database to Actions cache with key ${cacheSaveKey}`
|
||||
);
|
||||
try {
|
||||
const cacheId = await withTimeout(
|
||||
const cacheId = await waitForResultWithTimeLimit(
|
||||
MAX_CACHE_OPERATION_MS,
|
||||
actionsCache.saveCache([dbLocation], cacheSaveKey),
|
||||
() => {
|
||||
@@ -90982,7 +90994,7 @@ function createCacheKeyHash(components) {
|
||||
}
|
||||
|
||||
// src/tools-features.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var semver3 = __toESM(require_semver2());
|
||||
function isSupportedToolsFeature(versionInfo, feature) {
|
||||
return !!versionInfo.features && versionInfo.features[feature];
|
||||
}
|
||||
@@ -91266,7 +91278,7 @@ var GitHubFeatureFlags = class {
|
||||
DEFAULT_VERSION_FEATURE_FLAG_PREFIX.length,
|
||||
f.length - DEFAULT_VERSION_FEATURE_FLAG_SUFFIX.length
|
||||
).replace(/_/g, ".");
|
||||
if (!semver3.valid(version)) {
|
||||
if (!semver4.valid(version)) {
|
||||
this.logger.warning(
|
||||
`Ignoring feature flag ${f} as it does not specify a valid CodeQL version.`
|
||||
);
|
||||
@@ -91498,7 +91510,7 @@ async function uploadTrapCaches(codeql, config, logger) {
|
||||
process.env.GITHUB_SHA || "unknown"
|
||||
);
|
||||
logger.info(`Uploading TRAP cache to Actions cache with key ${key}`);
|
||||
await withTimeout(
|
||||
await waitForResultWithTimeLimit(
|
||||
MAX_CACHE_OPERATION_MS2,
|
||||
actionsCache2.saveCache([cacheDir], key),
|
||||
() => {
|
||||
@@ -91615,12 +91627,6 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = {
|
||||
rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */,
|
||||
swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */
|
||||
};
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
function getPathToParsedConfigFile(tempDir) {
|
||||
return path10.join(tempDir, "config");
|
||||
}
|
||||
@@ -92051,7 +92057,10 @@ function sanitizeUrlForStatusReport(url2) {
|
||||
|
||||
// src/setup-codeql.ts
|
||||
var CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||
var CODEQL_NIGHTLIES_REPOSITORY_OWNER = "dsp-testing";
|
||||
var CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies";
|
||||
var CODEQL_BUNDLE_VERSION_ALIAS = ["linked", "latest"];
|
||||
var CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"];
|
||||
function getCodeQLBundleExtension(compressionMethod) {
|
||||
switch (compressionMethod) {
|
||||
case "gzip":
|
||||
@@ -92194,7 +92203,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
|
||||
return void 0;
|
||||
}
|
||||
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, logger) {
|
||||
if (toolsInput && !CODEQL_BUNDLE_VERSION_ALIAS.includes(toolsInput) && !toolsInput.startsWith("http")) {
|
||||
if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) {
|
||||
logger.info(`Using CodeQL CLI from local path ${toolsInput}`);
|
||||
const compressionMethod2 = inferCompressionMethod(toolsInput);
|
||||
if (compressionMethod2 === void 0) {
|
||||
@@ -92223,6 +92232,12 @@ async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, varian
|
||||
let cliVersion2;
|
||||
let tagName;
|
||||
let url2;
|
||||
if (toolsInput !== void 0 && CODEQL_NIGHTLY_TOOLS_INPUTS.includes(toolsInput)) {
|
||||
logger.info(
|
||||
`Using the latest CodeQL CLI nightly, as requested by 'tools: ${toolsInput}'.`
|
||||
);
|
||||
toolsInput = await getNightlyToolsUrl(logger);
|
||||
}
|
||||
if (forceShippedTools) {
|
||||
cliVersion2 = cliVersion;
|
||||
tagName = bundleVersion;
|
||||
@@ -92506,6 +92521,34 @@ async function useZstdBundle(cliVersion2, tarSupportsZstd) {
|
||||
function getTempExtractionDir(tempDir) {
|
||||
return path12.join(tempDir, v4_default());
|
||||
}
|
||||
async function getNightlyToolsUrl(logger) {
|
||||
const zstdAvailability = await isZstdAvailable(logger);
|
||||
const compressionMethod = await useZstdBundle(
|
||||
CODEQL_VERSION_ZSTD_BUNDLE,
|
||||
zstdAvailability.available
|
||||
) ? "zstd" : "gzip";
|
||||
try {
|
||||
const release3 = await getApiClient().rest.repos.listReleases({
|
||||
owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER,
|
||||
repo: CODEQL_NIGHTLIES_REPOSITORY_NAME,
|
||||
per_page: 1,
|
||||
page: 1,
|
||||
prerelease: true
|
||||
});
|
||||
const latestRelease = release3.data[0];
|
||||
if (!latestRelease) {
|
||||
throw new Error("Could not find the latest nightly release.");
|
||||
}
|
||||
return `https://github.com/${CODEQL_NIGHTLIES_REPOSITORY_OWNER}/${CODEQL_NIGHTLIES_REPOSITORY_NAME}/releases/download/${latestRelease.tag_name}/${getCodeQLBundleName(compressionMethod)}`;
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Failed to retrieve the latest nightly release: ${wrapError(e)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
function isReservedToolsValue(tools) {
|
||||
return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools);
|
||||
}
|
||||
|
||||
// src/tracer-config.ts
|
||||
var fs13 = __toESM(require("fs"));
|
||||
|
||||
38
lib/autobuild-action.js
generated
38
lib/autobuild-action.js
generated
@@ -26447,7 +26447,7 @@ var require_package = __commonJS({
|
||||
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
|
||||
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
|
||||
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
|
||||
test: "npm run transpile && ava src/**.test.ts --serial --verbose",
|
||||
test: "npm run transpile && ava src/ --serial --verbose",
|
||||
"test-debug": "npm run test -- --timeout=20m",
|
||||
transpile: "tsc --build --verbose"
|
||||
},
|
||||
@@ -26492,9 +26492,9 @@ var require_package = __commonJS({
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.3.2",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.35.0",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^14.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
"@types/console-log-level": "^1.4.5",
|
||||
"@types/follow-redirects": "^1.14.4",
|
||||
@@ -26503,10 +26503,10 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.43.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.9",
|
||||
esbuild: "^0.25.10",
|
||||
eslint: "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.8.7",
|
||||
"eslint-plugin-filenames": "^1.3.2",
|
||||
@@ -77902,6 +77902,9 @@ function parseRepositoryNwo(input) {
|
||||
|
||||
// src/api-client.ts
|
||||
var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
||||
function getRetryConfig() {
|
||||
return isInTestMode() ? { retries: 10, retryAfterBaseValue: 1e4 } : { retries: 3, retryAfterBaseValue: 1e3 };
|
||||
}
|
||||
function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) {
|
||||
const auth = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth;
|
||||
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
||||
@@ -77909,7 +77912,8 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {})
|
||||
githubUtils.getOctokitOptions(auth, {
|
||||
baseUrl: apiDetails.apiURL,
|
||||
userAgent: `CodeQL-Action/${getActionVersion()}`,
|
||||
log: (0, import_console_log_level.default)({ level: "debug" })
|
||||
log: (0, import_console_log_level.default)({ level: "debug" }),
|
||||
retry: getRetryConfig()
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -78229,7 +78233,6 @@ function wrapCliConfigurationError(cliError) {
|
||||
// src/config-utils.ts
|
||||
var fs4 = __toESM(require("fs"));
|
||||
var path4 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/analyses.ts
|
||||
var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => {
|
||||
@@ -78242,10 +78245,19 @@ var supportedAnalysisKinds = new Set(Object.values(AnalysisKind));
|
||||
// src/caching-utils.ts
|
||||
var core6 = __toESM(require_core());
|
||||
|
||||
// src/config/db-config.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
|
||||
// src/feature-flags.ts
|
||||
var fs3 = __toESM(require("fs"));
|
||||
var path3 = __toESM(require("path"));
|
||||
var semver3 = __toESM(require_semver2());
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/defaults.json
|
||||
var bundleVersion = "codeql-bundle-v2.23.0";
|
||||
@@ -78487,7 +78499,7 @@ function computeChangedFiles(baseFileOids, overlayFileOids) {
|
||||
}
|
||||
|
||||
// src/tools-features.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var semver3 = __toESM(require_semver2());
|
||||
function isSupportedToolsFeature(versionInfo, feature) {
|
||||
return !!versionInfo.features && versionInfo.features[feature];
|
||||
}
|
||||
@@ -78770,7 +78782,7 @@ var GitHubFeatureFlags = class {
|
||||
DEFAULT_VERSION_FEATURE_FLAG_PREFIX.length,
|
||||
f.length - DEFAULT_VERSION_FEATURE_FLAG_SUFFIX.length
|
||||
).replace(/_/g, ".");
|
||||
if (!semver3.valid(version)) {
|
||||
if (!semver4.valid(version)) {
|
||||
this.logger.warning(
|
||||
`Ignoring feature flag ${f} as it does not specify a valid CodeQL version.`
|
||||
);
|
||||
@@ -78957,12 +78969,6 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = {
|
||||
rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */,
|
||||
swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */
|
||||
};
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
function getPathToParsedConfigFile(tempDir) {
|
||||
return path4.join(tempDir, "config");
|
||||
}
|
||||
|
||||
103
lib/init-action-post.js
generated
103
lib/init-action-post.js
generated
@@ -27722,7 +27722,7 @@ var require_pattern = __commonJS({
|
||||
const absolute = [];
|
||||
const relative2 = [];
|
||||
for (const pattern of patterns) {
|
||||
if (isAbsolute3(pattern)) {
|
||||
if (isAbsolute2(pattern)) {
|
||||
absolute.push(pattern);
|
||||
} else {
|
||||
relative2.push(pattern);
|
||||
@@ -27731,10 +27731,10 @@ var require_pattern = __commonJS({
|
||||
return [absolute, relative2];
|
||||
}
|
||||
exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative;
|
||||
function isAbsolute3(pattern) {
|
||||
function isAbsolute2(pattern) {
|
||||
return path19.isAbsolute(pattern);
|
||||
}
|
||||
exports2.isAbsolute = isAbsolute3;
|
||||
exports2.isAbsolute = isAbsolute2;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -32296,7 +32296,7 @@ var require_package = __commonJS({
|
||||
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
|
||||
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
|
||||
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
|
||||
test: "npm run transpile && ava src/**.test.ts --serial --verbose",
|
||||
test: "npm run transpile && ava src/ --serial --verbose",
|
||||
"test-debug": "npm run test -- --timeout=20m",
|
||||
transpile: "tsc --build --verbose"
|
||||
},
|
||||
@@ -32341,9 +32341,9 @@ var require_package = __commonJS({
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.3.2",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.35.0",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^14.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
"@types/console-log-level": "^1.4.5",
|
||||
"@types/follow-redirects": "^1.14.4",
|
||||
@@ -32352,10 +32352,10 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.43.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.9",
|
||||
esbuild: "^0.25.10",
|
||||
eslint: "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.8.7",
|
||||
"eslint-plugin-filenames": "^1.3.2",
|
||||
@@ -100645,7 +100645,7 @@ var require_commonjs16 = __commonJS({
|
||||
var TYPEMASK = 1023;
|
||||
var entToType = (s) => s.isFile() ? IFREG : s.isDirectory() ? IFDIR : s.isSymbolicLink() ? IFLNK : s.isCharacterDevice() ? IFCHR : s.isBlockDevice() ? IFBLK : s.isSocket() ? IFSOCK : s.isFIFO() ? IFIFO : UNKNOWN;
|
||||
var normalizeCache = /* @__PURE__ */ new Map();
|
||||
var normalize4 = (s) => {
|
||||
var normalize3 = (s) => {
|
||||
const c = normalizeCache.get(s);
|
||||
if (c)
|
||||
return c;
|
||||
@@ -100658,7 +100658,7 @@ var require_commonjs16 = __commonJS({
|
||||
const c = normalizeNocaseCache.get(s);
|
||||
if (c)
|
||||
return c;
|
||||
const n = normalize4(s.toLowerCase());
|
||||
const n = normalize3(s.toLowerCase());
|
||||
normalizeNocaseCache.set(s, n);
|
||||
return n;
|
||||
};
|
||||
@@ -100827,7 +100827,7 @@ var require_commonjs16 = __commonJS({
|
||||
*/
|
||||
constructor(name, type2 = UNKNOWN, root, roots, nocase, children, opts) {
|
||||
this.name = name;
|
||||
this.#matchName = nocase ? normalizeNocase(name) : normalize4(name);
|
||||
this.#matchName = nocase ? normalizeNocase(name) : normalize3(name);
|
||||
this.#type = type2 & TYPEMASK;
|
||||
this.nocase = nocase;
|
||||
this.roots = roots;
|
||||
@@ -100920,7 +100920,7 @@ var require_commonjs16 = __commonJS({
|
||||
return this.parent || this;
|
||||
}
|
||||
const children = this.children();
|
||||
const name = this.nocase ? normalizeNocase(pathPart) : normalize4(pathPart);
|
||||
const name = this.nocase ? normalizeNocase(pathPart) : normalize3(pathPart);
|
||||
for (const p of children) {
|
||||
if (p.#matchName === name) {
|
||||
return p;
|
||||
@@ -101165,7 +101165,7 @@ var require_commonjs16 = __commonJS({
|
||||
* directly.
|
||||
*/
|
||||
isNamed(n) {
|
||||
return !this.nocase ? this.#matchName === normalize4(n) : this.#matchName === normalizeNocase(n);
|
||||
return !this.nocase ? this.#matchName === normalize3(n) : this.#matchName === normalizeNocase(n);
|
||||
}
|
||||
/**
|
||||
* Return the Path object corresponding to the target of a symbolic link.
|
||||
@@ -101304,7 +101304,7 @@ var require_commonjs16 = __commonJS({
|
||||
#readdirMaybePromoteChild(e, c) {
|
||||
for (let p = c.provisional; p < c.length; p++) {
|
||||
const pchild = c[p];
|
||||
const name = this.nocase ? normalizeNocase(e.name) : normalize4(e.name);
|
||||
const name = this.nocase ? normalizeNocase(e.name) : normalize3(e.name);
|
||||
if (name !== pchild.#matchName) {
|
||||
continue;
|
||||
}
|
||||
@@ -109135,7 +109135,7 @@ var require_tr46 = __commonJS({
|
||||
TRANSITIONAL: 0,
|
||||
NONTRANSITIONAL: 1
|
||||
};
|
||||
function normalize4(str2) {
|
||||
function normalize3(str2) {
|
||||
return str2.split("\0").map(function(s) {
|
||||
return s.normalize("NFC");
|
||||
}).join("\0");
|
||||
@@ -109215,7 +109215,7 @@ var require_tr46 = __commonJS({
|
||||
processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;
|
||||
}
|
||||
var error2 = false;
|
||||
if (normalize4(label) !== label || label[3] === "-" && label[4] === "-" || label[0] === "-" || label[label.length - 1] === "-" || label.indexOf(".") !== -1 || label.search(combiningMarksRegex) === 0) {
|
||||
if (normalize3(label) !== label || label[3] === "-" && label[4] === "-" || label[0] === "-" || label[label.length - 1] === "-" || label.indexOf(".") !== -1 || label.search(combiningMarksRegex) === 0) {
|
||||
error2 = true;
|
||||
}
|
||||
var len = countSymbols(label);
|
||||
@@ -109233,7 +109233,7 @@ var require_tr46 = __commonJS({
|
||||
}
|
||||
function processing(domain_name, useSTD3, processing_option) {
|
||||
var result = mapChars(domain_name, useSTD3, processing_option);
|
||||
result.string = normalize4(result.string);
|
||||
result.string = normalize3(result.string);
|
||||
var labels = result.string.split(".");
|
||||
for (var i = 0; i < labels.length; ++i) {
|
||||
try {
|
||||
@@ -128396,6 +128396,9 @@ function parseRepositoryNwo(input) {
|
||||
|
||||
// src/api-client.ts
|
||||
var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
||||
function getRetryConfig() {
|
||||
return isInTestMode() ? { retries: 10, retryAfterBaseValue: 1e4 } : { retries: 3, retryAfterBaseValue: 1e3 };
|
||||
}
|
||||
function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) {
|
||||
const auth = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth;
|
||||
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
||||
@@ -128403,7 +128406,8 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {})
|
||||
githubUtils.getOctokitOptions(auth, {
|
||||
baseUrl: apiDetails.apiURL,
|
||||
userAgent: `CodeQL-Action/${getActionVersion()}`,
|
||||
log: (0, import_console_log_level.default)({ level: "debug" })
|
||||
log: (0, import_console_log_level.default)({ level: "debug" }),
|
||||
retry: getRetryConfig()
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -128746,7 +128750,6 @@ function wrapCliConfigurationError(cliError) {
|
||||
// src/config-utils.ts
|
||||
var fs9 = __toESM(require("fs"));
|
||||
var path10 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/analyses.ts
|
||||
var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => {
|
||||
@@ -128775,6 +128778,15 @@ var CodeQuality = {
|
||||
// src/caching-utils.ts
|
||||
var core6 = __toESM(require_core());
|
||||
|
||||
// src/config/db-config.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
|
||||
// src/diff-informed-analysis-utils.ts
|
||||
var fs8 = __toESM(require("fs"));
|
||||
var path9 = __toESM(require("path"));
|
||||
@@ -128782,7 +128794,7 @@ var path9 = __toESM(require("path"));
|
||||
// src/feature-flags.ts
|
||||
var fs7 = __toESM(require("fs"));
|
||||
var path8 = __toESM(require("path"));
|
||||
var semver3 = __toESM(require_semver2());
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/defaults.json
|
||||
var bundleVersion = "codeql-bundle-v2.23.0";
|
||||
@@ -129077,13 +129089,13 @@ function computeChangedFiles(baseFileOids, overlayFileOids) {
|
||||
}
|
||||
|
||||
// src/tools-features.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var semver3 = __toESM(require_semver2());
|
||||
function isSupportedToolsFeature(versionInfo, feature) {
|
||||
return !!versionInfo.features && versionInfo.features[feature];
|
||||
}
|
||||
var SafeArtifactUploadVersion = "2.20.3";
|
||||
function isSafeArtifactUpload(codeQlVersion) {
|
||||
return !codeQlVersion ? true : semver2.gte(codeQlVersion, SafeArtifactUploadVersion);
|
||||
return !codeQlVersion ? true : semver3.gte(codeQlVersion, SafeArtifactUploadVersion);
|
||||
}
|
||||
|
||||
// src/feature-flags.ts
|
||||
@@ -129365,7 +129377,7 @@ var GitHubFeatureFlags = class {
|
||||
DEFAULT_VERSION_FEATURE_FLAG_PREFIX.length,
|
||||
f.length - DEFAULT_VERSION_FEATURE_FLAG_SUFFIX.length
|
||||
).replace(/_/g, ".");
|
||||
if (!semver3.valid(version)) {
|
||||
if (!semver4.valid(version)) {
|
||||
this.logger.warning(
|
||||
`Ignoring feature flag ${f} as it does not specify a valid CodeQL version.`
|
||||
);
|
||||
@@ -129570,12 +129582,6 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = {
|
||||
rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */,
|
||||
swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */
|
||||
};
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
function getPathToParsedConfigFile(tempDir) {
|
||||
return path10.join(tempDir, "config");
|
||||
}
|
||||
@@ -129994,7 +130000,10 @@ function sanitizeUrlForStatusReport(url2) {
|
||||
|
||||
// src/setup-codeql.ts
|
||||
var CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||
var CODEQL_NIGHTLIES_REPOSITORY_OWNER = "dsp-testing";
|
||||
var CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies";
|
||||
var CODEQL_BUNDLE_VERSION_ALIAS = ["linked", "latest"];
|
||||
var CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"];
|
||||
function getCodeQLBundleExtension(compressionMethod) {
|
||||
switch (compressionMethod) {
|
||||
case "gzip":
|
||||
@@ -130137,7 +130146,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
|
||||
return void 0;
|
||||
}
|
||||
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, logger) {
|
||||
if (toolsInput && !CODEQL_BUNDLE_VERSION_ALIAS.includes(toolsInput) && !toolsInput.startsWith("http")) {
|
||||
if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) {
|
||||
logger.info(`Using CodeQL CLI from local path ${toolsInput}`);
|
||||
const compressionMethod2 = inferCompressionMethod(toolsInput);
|
||||
if (compressionMethod2 === void 0) {
|
||||
@@ -130166,6 +130175,12 @@ async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, varian
|
||||
let cliVersion2;
|
||||
let tagName;
|
||||
let url2;
|
||||
if (toolsInput !== void 0 && CODEQL_NIGHTLY_TOOLS_INPUTS.includes(toolsInput)) {
|
||||
logger.info(
|
||||
`Using the latest CodeQL CLI nightly, as requested by 'tools: ${toolsInput}'.`
|
||||
);
|
||||
toolsInput = await getNightlyToolsUrl(logger);
|
||||
}
|
||||
if (forceShippedTools) {
|
||||
cliVersion2 = cliVersion;
|
||||
tagName = bundleVersion;
|
||||
@@ -130449,6 +130464,34 @@ async function useZstdBundle(cliVersion2, tarSupportsZstd) {
|
||||
function getTempExtractionDir(tempDir) {
|
||||
return path12.join(tempDir, v4_default());
|
||||
}
|
||||
async function getNightlyToolsUrl(logger) {
|
||||
const zstdAvailability = await isZstdAvailable(logger);
|
||||
const compressionMethod = await useZstdBundle(
|
||||
CODEQL_VERSION_ZSTD_BUNDLE,
|
||||
zstdAvailability.available
|
||||
) ? "zstd" : "gzip";
|
||||
try {
|
||||
const release3 = await getApiClient().rest.repos.listReleases({
|
||||
owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER,
|
||||
repo: CODEQL_NIGHTLIES_REPOSITORY_NAME,
|
||||
per_page: 1,
|
||||
page: 1,
|
||||
prerelease: true
|
||||
});
|
||||
const latestRelease = release3.data[0];
|
||||
if (!latestRelease) {
|
||||
throw new Error("Could not find the latest nightly release.");
|
||||
}
|
||||
return `https://github.com/${CODEQL_NIGHTLIES_REPOSITORY_OWNER}/${CODEQL_NIGHTLIES_REPOSITORY_NAME}/releases/download/${latestRelease.tag_name}/${getCodeQLBundleName(compressionMethod)}`;
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Failed to retrieve the latest nightly release: ${wrapError(e)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
function isReservedToolsValue(tools) {
|
||||
return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools);
|
||||
}
|
||||
|
||||
// src/tracer-config.ts
|
||||
async function shouldEnableIndirectTracing(codeql, config) {
|
||||
|
||||
1080
lib/init-action.js
generated
1080
lib/init-action.js
generated
File diff suppressed because it is too large
Load Diff
36
lib/resolve-environment-action.js
generated
36
lib/resolve-environment-action.js
generated
@@ -26447,7 +26447,7 @@ var require_package = __commonJS({
|
||||
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
|
||||
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
|
||||
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
|
||||
test: "npm run transpile && ava src/**.test.ts --serial --verbose",
|
||||
test: "npm run transpile && ava src/ --serial --verbose",
|
||||
"test-debug": "npm run test -- --timeout=20m",
|
||||
transpile: "tsc --build --verbose"
|
||||
},
|
||||
@@ -26492,9 +26492,9 @@ var require_package = __commonJS({
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.3.2",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.35.0",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^14.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
"@types/console-log-level": "^1.4.5",
|
||||
"@types/follow-redirects": "^1.14.4",
|
||||
@@ -26503,10 +26503,10 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.43.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.9",
|
||||
esbuild: "^0.25.10",
|
||||
eslint: "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.8.7",
|
||||
"eslint-plugin-filenames": "^1.3.2",
|
||||
@@ -77910,6 +77910,9 @@ function parseRepositoryNwo(input) {
|
||||
|
||||
// src/api-client.ts
|
||||
var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
||||
function getRetryConfig() {
|
||||
return isInTestMode() ? { retries: 10, retryAfterBaseValue: 1e4 } : { retries: 3, retryAfterBaseValue: 1e3 };
|
||||
}
|
||||
function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) {
|
||||
const auth = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth;
|
||||
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
||||
@@ -77917,7 +77920,8 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {})
|
||||
githubUtils.getOctokitOptions(auth, {
|
||||
baseUrl: apiDetails.apiURL,
|
||||
userAgent: `CodeQL-Action/${getActionVersion()}`,
|
||||
log: (0, import_console_log_level.default)({ level: "debug" })
|
||||
log: (0, import_console_log_level.default)({ level: "debug" }),
|
||||
retry: getRetryConfig()
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -78228,7 +78232,6 @@ function wrapCliConfigurationError(cliError) {
|
||||
// src/config-utils.ts
|
||||
var fs3 = __toESM(require("fs"));
|
||||
var path3 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/analyses.ts
|
||||
var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => {
|
||||
@@ -78241,8 +78244,17 @@ var supportedAnalysisKinds = new Set(Object.values(AnalysisKind));
|
||||
// src/caching-utils.ts
|
||||
var core6 = __toESM(require_core());
|
||||
|
||||
// src/config/db-config.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
|
||||
// src/feature-flags.ts
|
||||
var semver3 = __toESM(require_semver2());
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/overlay-database-utils.ts
|
||||
var fs2 = __toESM(require("fs"));
|
||||
@@ -78480,7 +78492,7 @@ function computeChangedFiles(baseFileOids, overlayFileOids) {
|
||||
}
|
||||
|
||||
// src/tools-features.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var semver3 = __toESM(require_semver2());
|
||||
function isSupportedToolsFeature(versionInfo, feature) {
|
||||
return !!versionInfo.features && versionInfo.features[feature];
|
||||
}
|
||||
@@ -78684,12 +78696,6 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = {
|
||||
rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */,
|
||||
swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */
|
||||
};
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
function getPathToParsedConfigFile(tempDir) {
|
||||
return path3.join(tempDir, "config");
|
||||
}
|
||||
|
||||
57
lib/start-proxy-action-post.js
generated
57
lib/start-proxy-action-post.js
generated
@@ -26447,7 +26447,7 @@ var require_package = __commonJS({
|
||||
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
|
||||
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
|
||||
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
|
||||
test: "npm run transpile && ava src/**.test.ts --serial --verbose",
|
||||
test: "npm run transpile && ava src/ --serial --verbose",
|
||||
"test-debug": "npm run test -- --timeout=20m",
|
||||
transpile: "tsc --build --verbose"
|
||||
},
|
||||
@@ -26492,9 +26492,9 @@ var require_package = __commonJS({
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.3.2",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.35.0",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^14.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
"@types/console-log-level": "^1.4.5",
|
||||
"@types/follow-redirects": "^1.14.4",
|
||||
@@ -26503,10 +26503,10 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.43.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.9",
|
||||
esbuild: "^0.25.10",
|
||||
eslint: "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.8.7",
|
||||
"eslint-plugin-filenames": "^1.3.2",
|
||||
@@ -93456,7 +93456,7 @@ var require_commonjs16 = __commonJS({
|
||||
var TYPEMASK = 1023;
|
||||
var entToType = (s) => s.isFile() ? IFREG : s.isDirectory() ? IFDIR : s.isSymbolicLink() ? IFLNK : s.isCharacterDevice() ? IFCHR : s.isBlockDevice() ? IFBLK : s.isSocket() ? IFSOCK : s.isFIFO() ? IFIFO : UNKNOWN;
|
||||
var normalizeCache = /* @__PURE__ */ new Map();
|
||||
var normalize2 = (s) => {
|
||||
var normalize = (s) => {
|
||||
const c = normalizeCache.get(s);
|
||||
if (c)
|
||||
return c;
|
||||
@@ -93469,7 +93469,7 @@ var require_commonjs16 = __commonJS({
|
||||
const c = normalizeNocaseCache.get(s);
|
||||
if (c)
|
||||
return c;
|
||||
const n = normalize2(s.toLowerCase());
|
||||
const n = normalize(s.toLowerCase());
|
||||
normalizeNocaseCache.set(s, n);
|
||||
return n;
|
||||
};
|
||||
@@ -93638,7 +93638,7 @@ var require_commonjs16 = __commonJS({
|
||||
*/
|
||||
constructor(name, type2 = UNKNOWN, root, roots, nocase, children, opts) {
|
||||
this.name = name;
|
||||
this.#matchName = nocase ? normalizeNocase(name) : normalize2(name);
|
||||
this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
|
||||
this.#type = type2 & TYPEMASK;
|
||||
this.nocase = nocase;
|
||||
this.roots = roots;
|
||||
@@ -93731,7 +93731,7 @@ var require_commonjs16 = __commonJS({
|
||||
return this.parent || this;
|
||||
}
|
||||
const children = this.children();
|
||||
const name = this.nocase ? normalizeNocase(pathPart) : normalize2(pathPart);
|
||||
const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
|
||||
for (const p of children) {
|
||||
if (p.#matchName === name) {
|
||||
return p;
|
||||
@@ -93976,7 +93976,7 @@ var require_commonjs16 = __commonJS({
|
||||
* directly.
|
||||
*/
|
||||
isNamed(n) {
|
||||
return !this.nocase ? this.#matchName === normalize2(n) : this.#matchName === normalizeNocase(n);
|
||||
return !this.nocase ? this.#matchName === normalize(n) : this.#matchName === normalizeNocase(n);
|
||||
}
|
||||
/**
|
||||
* Return the Path object corresponding to the target of a symbolic link.
|
||||
@@ -94115,7 +94115,7 @@ var require_commonjs16 = __commonJS({
|
||||
#readdirMaybePromoteChild(e, c) {
|
||||
for (let p = c.provisional; p < c.length; p++) {
|
||||
const pchild = c[p];
|
||||
const name = this.nocase ? normalizeNocase(e.name) : normalize2(e.name);
|
||||
const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
|
||||
if (name !== pchild.#matchName) {
|
||||
continue;
|
||||
}
|
||||
@@ -101946,7 +101946,7 @@ var require_tr46 = __commonJS({
|
||||
TRANSITIONAL: 0,
|
||||
NONTRANSITIONAL: 1
|
||||
};
|
||||
function normalize2(str2) {
|
||||
function normalize(str2) {
|
||||
return str2.split("\0").map(function(s) {
|
||||
return s.normalize("NFC");
|
||||
}).join("\0");
|
||||
@@ -102026,7 +102026,7 @@ var require_tr46 = __commonJS({
|
||||
processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;
|
||||
}
|
||||
var error2 = false;
|
||||
if (normalize2(label) !== label || label[3] === "-" && label[4] === "-" || label[0] === "-" || label[label.length - 1] === "-" || label.indexOf(".") !== -1 || label.search(combiningMarksRegex) === 0) {
|
||||
if (normalize(label) !== label || label[3] === "-" && label[4] === "-" || label[0] === "-" || label[label.length - 1] === "-" || label.indexOf(".") !== -1 || label.search(combiningMarksRegex) === 0) {
|
||||
error2 = true;
|
||||
}
|
||||
var len = countSymbols(label);
|
||||
@@ -102044,7 +102044,7 @@ var require_tr46 = __commonJS({
|
||||
}
|
||||
function processing(domain_name, useSTD3, processing_option) {
|
||||
var result = mapChars(domain_name, useSTD3, processing_option);
|
||||
result.string = normalize2(result.string);
|
||||
result.string = normalize(result.string);
|
||||
var labels = result.string.split(".");
|
||||
for (var i = 0; i < labels.length; ++i) {
|
||||
try {
|
||||
@@ -117042,6 +117042,9 @@ var ConfigurationError = class extends Error {
|
||||
super(message);
|
||||
}
|
||||
};
|
||||
function isInTestMode() {
|
||||
return process.env["CODEQL_ACTION_TEST_MODE" /* TEST_MODE */] === "true";
|
||||
}
|
||||
function getErrorMessage(error2) {
|
||||
return error2 instanceof Error ? error2.message : String(error2);
|
||||
}
|
||||
@@ -117078,6 +117081,9 @@ var githubUtils = __toESM(require_utils4());
|
||||
var retry = __toESM(require_dist_node15());
|
||||
var import_console_log_level = __toESM(require_console_log_level());
|
||||
var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
||||
function getRetryConfig() {
|
||||
return isInTestMode() ? { retries: 10, retryAfterBaseValue: 1e4 } : { retries: 3, retryAfterBaseValue: 1e3 };
|
||||
}
|
||||
function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) {
|
||||
const auth = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth;
|
||||
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
||||
@@ -117085,7 +117091,8 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {})
|
||||
githubUtils.getOctokitOptions(auth, {
|
||||
baseUrl: apiDetails.apiURL,
|
||||
userAgent: `CodeQL-Action/${getActionVersion()}`,
|
||||
log: (0, import_console_log_level.default)({ level: "debug" })
|
||||
log: (0, import_console_log_level.default)({ level: "debug" }),
|
||||
retry: getRetryConfig()
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -117127,7 +117134,6 @@ async function getGitHubVersion() {
|
||||
// src/config-utils.ts
|
||||
var fs = __toESM(require("fs"));
|
||||
var path = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/analyses.ts
|
||||
var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => {
|
||||
@@ -117140,8 +117146,17 @@ var supportedAnalysisKinds = new Set(Object.values(AnalysisKind));
|
||||
// src/caching-utils.ts
|
||||
var core6 = __toESM(require_core());
|
||||
|
||||
// src/config/db-config.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
|
||||
// src/feature-flags.ts
|
||||
var semver3 = __toESM(require_semver2());
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/overlay-database-utils.ts
|
||||
var actionsCache = __toESM(require_cache3());
|
||||
@@ -117163,7 +117178,7 @@ var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 15e3;
|
||||
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
|
||||
|
||||
// src/tools-features.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var semver3 = __toESM(require_semver2());
|
||||
|
||||
// src/feature-flags.ts
|
||||
var featureConfig = {
|
||||
@@ -117364,12 +117379,6 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = {
|
||||
rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */,
|
||||
swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */
|
||||
};
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
function getPathToParsedConfigFile(tempDir) {
|
||||
return path.join(tempDir, "config");
|
||||
}
|
||||
|
||||
19
lib/start-proxy-action.js
generated
19
lib/start-proxy-action.js
generated
@@ -44975,7 +44975,7 @@ var require_package = __commonJS({
|
||||
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
|
||||
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
|
||||
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
|
||||
test: "npm run transpile && ava src/**.test.ts --serial --verbose",
|
||||
test: "npm run transpile && ava src/ --serial --verbose",
|
||||
"test-debug": "npm run test -- --timeout=20m",
|
||||
transpile: "tsc --build --verbose"
|
||||
},
|
||||
@@ -45020,9 +45020,9 @@ var require_package = __commonJS({
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.3.2",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.35.0",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^14.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
"@types/console-log-level": "^1.4.5",
|
||||
"@types/follow-redirects": "^1.14.4",
|
||||
@@ -45031,10 +45031,10 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.43.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.9",
|
||||
esbuild: "^0.25.10",
|
||||
eslint: "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.8.7",
|
||||
"eslint-plugin-filenames": "^1.3.2",
|
||||
@@ -49321,6 +49321,9 @@ async function delay(milliseconds, opts) {
|
||||
}
|
||||
});
|
||||
}
|
||||
function isInTestMode() {
|
||||
return process.env["CODEQL_ACTION_TEST_MODE" /* TEST_MODE */] === "true";
|
||||
}
|
||||
function getErrorMessage(error2) {
|
||||
return error2 instanceof Error ? error2.message : String(error2);
|
||||
}
|
||||
@@ -49370,6 +49373,9 @@ var core6 = __toESM(require_core());
|
||||
var githubUtils = __toESM(require_utils4());
|
||||
var retry = __toESM(require_dist_node15());
|
||||
var import_console_log_level = __toESM(require_console_log_level());
|
||||
function getRetryConfig() {
|
||||
return isInTestMode() ? { retries: 10, retryAfterBaseValue: 1e4 } : { retries: 3, retryAfterBaseValue: 1e3 };
|
||||
}
|
||||
function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) {
|
||||
const auth = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth;
|
||||
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
||||
@@ -49377,7 +49383,8 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {})
|
||||
githubUtils.getOctokitOptions(auth, {
|
||||
baseUrl: apiDetails.apiURL,
|
||||
userAgent: `CodeQL-Action/${getActionVersion()}`,
|
||||
log: (0, import_console_log_level.default)({ level: "debug" })
|
||||
log: (0, import_console_log_level.default)({ level: "debug" }),
|
||||
retry: getRetryConfig()
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
81
lib/upload-lib.js
generated
81
lib/upload-lib.js
generated
@@ -29019,7 +29019,7 @@ var require_pattern = __commonJS({
|
||||
const absolute = [];
|
||||
const relative2 = [];
|
||||
for (const pattern of patterns) {
|
||||
if (isAbsolute3(pattern)) {
|
||||
if (isAbsolute2(pattern)) {
|
||||
absolute.push(pattern);
|
||||
} else {
|
||||
relative2.push(pattern);
|
||||
@@ -29028,10 +29028,10 @@ var require_pattern = __commonJS({
|
||||
return [absolute, relative2];
|
||||
}
|
||||
exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative;
|
||||
function isAbsolute3(pattern) {
|
||||
function isAbsolute2(pattern) {
|
||||
return path15.isAbsolute(pattern);
|
||||
}
|
||||
exports2.isAbsolute = isAbsolute3;
|
||||
exports2.isAbsolute = isAbsolute2;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -33593,7 +33593,7 @@ var require_package = __commonJS({
|
||||
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
|
||||
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
|
||||
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
|
||||
test: "npm run transpile && ava src/**.test.ts --serial --verbose",
|
||||
test: "npm run transpile && ava src/ --serial --verbose",
|
||||
"test-debug": "npm run test -- --timeout=20m",
|
||||
transpile: "tsc --build --verbose"
|
||||
},
|
||||
@@ -33638,9 +33638,9 @@ var require_package = __commonJS({
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.3.2",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.35.0",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^14.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
"@types/console-log-level": "^1.4.5",
|
||||
"@types/follow-redirects": "^1.14.4",
|
||||
@@ -33649,10 +33649,10 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.43.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.9",
|
||||
esbuild: "^0.25.10",
|
||||
eslint: "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.8.7",
|
||||
"eslint-plugin-filenames": "^1.3.2",
|
||||
@@ -88523,6 +88523,9 @@ function parseRepositoryNwo(input) {
|
||||
|
||||
// src/api-client.ts
|
||||
var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
||||
function getRetryConfig() {
|
||||
return isInTestMode() ? { retries: 10, retryAfterBaseValue: 1e4 } : { retries: 3, retryAfterBaseValue: 1e3 };
|
||||
}
|
||||
function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) {
|
||||
const auth = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth;
|
||||
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
||||
@@ -88530,7 +88533,8 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {})
|
||||
githubUtils.getOctokitOptions(auth, {
|
||||
baseUrl: apiDetails.apiURL,
|
||||
userAgent: `CodeQL-Action/${getActionVersion()}`,
|
||||
log: (0, import_console_log_level.default)({ level: "debug" })
|
||||
log: (0, import_console_log_level.default)({ level: "debug" }),
|
||||
retry: getRetryConfig()
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -88873,7 +88877,6 @@ function wrapCliConfigurationError(cliError) {
|
||||
// src/config-utils.ts
|
||||
var fs7 = __toESM(require("fs"));
|
||||
var path9 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/analyses.ts
|
||||
var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => {
|
||||
@@ -88886,12 +88889,21 @@ var supportedAnalysisKinds = new Set(Object.values(AnalysisKind));
|
||||
// src/caching-utils.ts
|
||||
var core6 = __toESM(require_core());
|
||||
|
||||
// src/config/db-config.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
|
||||
// src/diff-informed-analysis-utils.ts
|
||||
var fs6 = __toESM(require("fs"));
|
||||
var path8 = __toESM(require("path"));
|
||||
|
||||
// src/feature-flags.ts
|
||||
var semver3 = __toESM(require_semver2());
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/defaults.json
|
||||
var bundleVersion = "codeql-bundle-v2.23.0";
|
||||
@@ -89175,7 +89187,7 @@ function computeChangedFiles(baseFileOids, overlayFileOids) {
|
||||
}
|
||||
|
||||
// src/tools-features.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var semver3 = __toESM(require_semver2());
|
||||
function isSupportedToolsFeature(versionInfo, feature) {
|
||||
return !!versionInfo.features && versionInfo.features[feature];
|
||||
}
|
||||
@@ -89398,12 +89410,6 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = {
|
||||
rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */,
|
||||
swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */
|
||||
};
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
function getPathToParsedConfigFile(tempDir) {
|
||||
return path9.join(tempDir, "config");
|
||||
}
|
||||
@@ -89822,7 +89828,10 @@ function sanitizeUrlForStatusReport(url2) {
|
||||
|
||||
// src/setup-codeql.ts
|
||||
var CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||
var CODEQL_NIGHTLIES_REPOSITORY_OWNER = "dsp-testing";
|
||||
var CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies";
|
||||
var CODEQL_BUNDLE_VERSION_ALIAS = ["linked", "latest"];
|
||||
var CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"];
|
||||
function getCodeQLBundleExtension(compressionMethod) {
|
||||
switch (compressionMethod) {
|
||||
case "gzip":
|
||||
@@ -89965,7 +89974,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
|
||||
return void 0;
|
||||
}
|
||||
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, logger) {
|
||||
if (toolsInput && !CODEQL_BUNDLE_VERSION_ALIAS.includes(toolsInput) && !toolsInput.startsWith("http")) {
|
||||
if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) {
|
||||
logger.info(`Using CodeQL CLI from local path ${toolsInput}`);
|
||||
const compressionMethod2 = inferCompressionMethod(toolsInput);
|
||||
if (compressionMethod2 === void 0) {
|
||||
@@ -89994,6 +90003,12 @@ async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, varian
|
||||
let cliVersion2;
|
||||
let tagName;
|
||||
let url2;
|
||||
if (toolsInput !== void 0 && CODEQL_NIGHTLY_TOOLS_INPUTS.includes(toolsInput)) {
|
||||
logger.info(
|
||||
`Using the latest CodeQL CLI nightly, as requested by 'tools: ${toolsInput}'.`
|
||||
);
|
||||
toolsInput = await getNightlyToolsUrl(logger);
|
||||
}
|
||||
if (forceShippedTools) {
|
||||
cliVersion2 = cliVersion;
|
||||
tagName = bundleVersion;
|
||||
@@ -90277,6 +90292,34 @@ async function useZstdBundle(cliVersion2, tarSupportsZstd) {
|
||||
function getTempExtractionDir(tempDir) {
|
||||
return path11.join(tempDir, v4_default());
|
||||
}
|
||||
async function getNightlyToolsUrl(logger) {
|
||||
const zstdAvailability = await isZstdAvailable(logger);
|
||||
const compressionMethod = await useZstdBundle(
|
||||
CODEQL_VERSION_ZSTD_BUNDLE,
|
||||
zstdAvailability.available
|
||||
) ? "zstd" : "gzip";
|
||||
try {
|
||||
const release = await getApiClient().rest.repos.listReleases({
|
||||
owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER,
|
||||
repo: CODEQL_NIGHTLIES_REPOSITORY_NAME,
|
||||
per_page: 1,
|
||||
page: 1,
|
||||
prerelease: true
|
||||
});
|
||||
const latestRelease = release.data[0];
|
||||
if (!latestRelease) {
|
||||
throw new Error("Could not find the latest nightly release.");
|
||||
}
|
||||
return `https://github.com/${CODEQL_NIGHTLIES_REPOSITORY_OWNER}/${CODEQL_NIGHTLIES_REPOSITORY_NAME}/releases/download/${latestRelease.tag_name}/${getCodeQLBundleName(compressionMethod)}`;
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Failed to retrieve the latest nightly release: ${wrapError(e)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
function isReservedToolsValue(tools) {
|
||||
return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools);
|
||||
}
|
||||
|
||||
// src/tracer-config.ts
|
||||
async function shouldEnableIndirectTracing(codeql, config) {
|
||||
|
||||
43
lib/upload-sarif-action-post.js
generated
43
lib/upload-sarif-action-post.js
generated
@@ -26447,7 +26447,7 @@ var require_package = __commonJS({
|
||||
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
|
||||
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
|
||||
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
|
||||
test: "npm run transpile && ava src/**.test.ts --serial --verbose",
|
||||
test: "npm run transpile && ava src/ --serial --verbose",
|
||||
"test-debug": "npm run test -- --timeout=20m",
|
||||
transpile: "tsc --build --verbose"
|
||||
},
|
||||
@@ -26492,9 +26492,9 @@ var require_package = __commonJS({
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.3.2",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.35.0",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^14.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
"@types/console-log-level": "^1.4.5",
|
||||
"@types/follow-redirects": "^1.14.4",
|
||||
@@ -26503,10 +26503,10 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.43.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.9",
|
||||
esbuild: "^0.25.10",
|
||||
eslint: "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.8.7",
|
||||
"eslint-plugin-filenames": "^1.3.2",
|
||||
@@ -117042,6 +117042,9 @@ var ConfigurationError = class extends Error {
|
||||
super(message);
|
||||
}
|
||||
};
|
||||
function isInTestMode() {
|
||||
return process.env["CODEQL_ACTION_TEST_MODE" /* TEST_MODE */] === "true";
|
||||
}
|
||||
function getErrorMessage(error2) {
|
||||
return error2 instanceof Error ? error2.message : String(error2);
|
||||
}
|
||||
@@ -117082,6 +117085,9 @@ var githubUtils = __toESM(require_utils4());
|
||||
var retry = __toESM(require_dist_node15());
|
||||
var import_console_log_level = __toESM(require_console_log_level());
|
||||
var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
||||
function getRetryConfig() {
|
||||
return isInTestMode() ? { retries: 10, retryAfterBaseValue: 1e4 } : { retries: 3, retryAfterBaseValue: 1e3 };
|
||||
}
|
||||
function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) {
|
||||
const auth = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth;
|
||||
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
||||
@@ -117089,7 +117095,8 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {})
|
||||
githubUtils.getOctokitOptions(auth, {
|
||||
baseUrl: apiDetails.apiURL,
|
||||
userAgent: `CodeQL-Action/${getActionVersion()}`,
|
||||
log: (0, import_console_log_level.default)({ level: "debug" })
|
||||
log: (0, import_console_log_level.default)({ level: "debug" }),
|
||||
retry: getRetryConfig()
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -117287,14 +117294,20 @@ var cliErrorsConfig = {
|
||||
}
|
||||
};
|
||||
|
||||
// src/config-utils.ts
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/caching-utils.ts
|
||||
var core6 = __toESM(require_core());
|
||||
|
||||
// src/config/db-config.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
|
||||
// src/feature-flags.ts
|
||||
var semver3 = __toESM(require_semver2());
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/overlay-database-utils.ts
|
||||
var actionsCache = __toESM(require_cache3());
|
||||
@@ -117324,10 +117337,10 @@ var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 15e3;
|
||||
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
|
||||
|
||||
// src/tools-features.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var semver3 = __toESM(require_semver2());
|
||||
var SafeArtifactUploadVersion = "2.20.3";
|
||||
function isSafeArtifactUpload(codeQlVersion) {
|
||||
return !codeQlVersion ? true : semver2.gte(codeQlVersion, SafeArtifactUploadVersion);
|
||||
return !codeQlVersion ? true : semver3.gte(codeQlVersion, SafeArtifactUploadVersion);
|
||||
}
|
||||
|
||||
// src/feature-flags.ts
|
||||
@@ -117529,12 +117542,6 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = {
|
||||
rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */,
|
||||
swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */
|
||||
};
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
|
||||
// src/setup-codeql.ts
|
||||
var toolcache3 = __toESM(require_tool_cache());
|
||||
|
||||
210
lib/upload-sarif-action.js
generated
210
lib/upload-sarif-action.js
generated
@@ -27722,7 +27722,7 @@ var require_pattern = __commonJS({
|
||||
const absolute = [];
|
||||
const relative2 = [];
|
||||
for (const pattern of patterns) {
|
||||
if (isAbsolute3(pattern)) {
|
||||
if (isAbsolute2(pattern)) {
|
||||
absolute.push(pattern);
|
||||
} else {
|
||||
relative2.push(pattern);
|
||||
@@ -27731,10 +27731,10 @@ var require_pattern = __commonJS({
|
||||
return [absolute, relative2];
|
||||
}
|
||||
exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative;
|
||||
function isAbsolute3(pattern) {
|
||||
function isAbsolute2(pattern) {
|
||||
return path16.isAbsolute(pattern);
|
||||
}
|
||||
exports2.isAbsolute = isAbsolute3;
|
||||
exports2.isAbsolute = isAbsolute2;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -32296,7 +32296,7 @@ var require_package = __commonJS({
|
||||
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
|
||||
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
|
||||
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
|
||||
test: "npm run transpile && ava src/**.test.ts --serial --verbose",
|
||||
test: "npm run transpile && ava src/ --serial --verbose",
|
||||
"test-debug": "npm run test -- --timeout=20m",
|
||||
transpile: "tsc --build --verbose"
|
||||
},
|
||||
@@ -32341,9 +32341,9 @@ var require_package = __commonJS({
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.3.2",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.35.0",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^14.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
"@types/console-log-level": "^1.4.5",
|
||||
"@types/follow-redirects": "^1.14.4",
|
||||
@@ -32352,10 +32352,10 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.43.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.9",
|
||||
esbuild: "^0.25.10",
|
||||
eslint: "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.8.7",
|
||||
"eslint-plugin-filenames": "^1.3.2",
|
||||
@@ -88775,6 +88775,9 @@ function parseRepositoryNwo(input) {
|
||||
|
||||
// src/api-client.ts
|
||||
var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
||||
function getRetryConfig() {
|
||||
return isInTestMode() ? { retries: 10, retryAfterBaseValue: 1e4 } : { retries: 3, retryAfterBaseValue: 1e3 };
|
||||
}
|
||||
function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) {
|
||||
const auth = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth;
|
||||
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
||||
@@ -88782,7 +88785,8 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {})
|
||||
githubUtils.getOctokitOptions(auth, {
|
||||
baseUrl: apiDetails.apiURL,
|
||||
userAgent: `CodeQL-Action/${getActionVersion()}`,
|
||||
log: (0, import_console_log_level.default)({ level: "debug" })
|
||||
log: (0, import_console_log_level.default)({ level: "debug" }),
|
||||
retry: getRetryConfig()
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -89619,11 +89623,19 @@ var core9 = __toESM(require_core());
|
||||
// src/config-utils.ts
|
||||
var fs8 = __toESM(require("fs"));
|
||||
var path10 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/caching-utils.ts
|
||||
var core8 = __toESM(require_core());
|
||||
|
||||
// src/config/db-config.ts
|
||||
var semver4 = __toESM(require_semver2());
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
|
||||
// src/diff-informed-analysis-utils.ts
|
||||
var fs7 = __toESM(require("fs"));
|
||||
var path9 = __toESM(require("path"));
|
||||
@@ -89672,12 +89684,6 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = {
|
||||
rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */,
|
||||
swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */
|
||||
};
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
function getPathToParsedConfigFile(tempDir) {
|
||||
return path10.join(tempDir, "config");
|
||||
}
|
||||
@@ -90523,7 +90529,10 @@ function sanitizeUrlForStatusReport(url2) {
|
||||
|
||||
// src/setup-codeql.ts
|
||||
var CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||
var CODEQL_NIGHTLIES_REPOSITORY_OWNER = "dsp-testing";
|
||||
var CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies";
|
||||
var CODEQL_BUNDLE_VERSION_ALIAS = ["linked", "latest"];
|
||||
var CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"];
|
||||
function getCodeQLBundleExtension(compressionMethod) {
|
||||
switch (compressionMethod) {
|
||||
case "gzip":
|
||||
@@ -90666,7 +90675,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
|
||||
return void 0;
|
||||
}
|
||||
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, logger) {
|
||||
if (toolsInput && !CODEQL_BUNDLE_VERSION_ALIAS.includes(toolsInput) && !toolsInput.startsWith("http")) {
|
||||
if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) {
|
||||
logger.info(`Using CodeQL CLI from local path ${toolsInput}`);
|
||||
const compressionMethod2 = inferCompressionMethod(toolsInput);
|
||||
if (compressionMethod2 === void 0) {
|
||||
@@ -90695,6 +90704,12 @@ async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, varian
|
||||
let cliVersion2;
|
||||
let tagName;
|
||||
let url2;
|
||||
if (toolsInput !== void 0 && CODEQL_NIGHTLY_TOOLS_INPUTS.includes(toolsInput)) {
|
||||
logger.info(
|
||||
`Using the latest CodeQL CLI nightly, as requested by 'tools: ${toolsInput}'.`
|
||||
);
|
||||
toolsInput = await getNightlyToolsUrl(logger);
|
||||
}
|
||||
if (forceShippedTools) {
|
||||
cliVersion2 = cliVersion;
|
||||
tagName = bundleVersion;
|
||||
@@ -90978,6 +90993,34 @@ async function useZstdBundle(cliVersion2, tarSupportsZstd) {
|
||||
function getTempExtractionDir(tempDir) {
|
||||
return path12.join(tempDir, v4_default());
|
||||
}
|
||||
async function getNightlyToolsUrl(logger) {
|
||||
const zstdAvailability = await isZstdAvailable(logger);
|
||||
const compressionMethod = await useZstdBundle(
|
||||
CODEQL_VERSION_ZSTD_BUNDLE,
|
||||
zstdAvailability.available
|
||||
) ? "zstd" : "gzip";
|
||||
try {
|
||||
const release3 = await getApiClient().rest.repos.listReleases({
|
||||
owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER,
|
||||
repo: CODEQL_NIGHTLIES_REPOSITORY_NAME,
|
||||
per_page: 1,
|
||||
page: 1,
|
||||
prerelease: true
|
||||
});
|
||||
const latestRelease = release3.data[0];
|
||||
if (!latestRelease) {
|
||||
throw new Error("Could not find the latest nightly release.");
|
||||
}
|
||||
return `https://github.com/${CODEQL_NIGHTLIES_REPOSITORY_OWNER}/${CODEQL_NIGHTLIES_REPOSITORY_NAME}/releases/download/${latestRelease.tag_name}/${getCodeQLBundleName(compressionMethod)}`;
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Failed to retrieve the latest nightly release: ${wrapError(e)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
function isReservedToolsValue(tools) {
|
||||
return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools);
|
||||
}
|
||||
|
||||
// src/tracer-config.ts
|
||||
async function shouldEnableIndirectTracing(codeql, config) {
|
||||
@@ -92985,23 +93028,6 @@ function findSarifFilesInDir(sarifPath, isSarif) {
|
||||
walkSarifFiles(sarifPath);
|
||||
return sarifFiles;
|
||||
}
|
||||
function getSarifFilePaths(sarifPath, isSarif) {
|
||||
if (!fs14.existsSync(sarifPath)) {
|
||||
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
|
||||
}
|
||||
let sarifFiles;
|
||||
if (fs14.lstatSync(sarifPath).isDirectory()) {
|
||||
sarifFiles = findSarifFilesInDir(sarifPath, isSarif);
|
||||
if (sarifFiles.length === 0) {
|
||||
throw new ConfigurationError(
|
||||
`No SARIF files found to upload in "${sarifPath}".`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
sarifFiles = [sarifPath];
|
||||
}
|
||||
return sarifFiles;
|
||||
}
|
||||
function countResultsInSarif(sarif) {
|
||||
let numResults = 0;
|
||||
const parsedSarif = JSON.parse(sarif);
|
||||
@@ -93097,20 +93123,6 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
||||
}
|
||||
return payloadObj;
|
||||
}
|
||||
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
||||
const sarifPaths = getSarifFilePaths(
|
||||
inputSarifPath,
|
||||
uploadTarget.sarifPredicate
|
||||
);
|
||||
return uploadSpecifiedFiles(
|
||||
sarifPaths,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
uploadTarget
|
||||
);
|
||||
}
|
||||
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
|
||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
@@ -93358,6 +93370,30 @@ function filterAlertsByDiffRange(logger, sarif) {
|
||||
}
|
||||
|
||||
// src/upload-sarif-action.ts
|
||||
async function findAndUpload(logger, features, sarifPath, pathStats, checkoutPath, analysis, category) {
|
||||
let sarifFiles;
|
||||
if (pathStats.isDirectory()) {
|
||||
sarifFiles = findSarifFilesInDir(
|
||||
sarifPath,
|
||||
analysis.sarifPredicate
|
||||
);
|
||||
} else if (pathStats.isFile() && analysis.sarifPredicate(sarifPath)) {
|
||||
sarifFiles = [sarifPath];
|
||||
} else {
|
||||
return void 0;
|
||||
}
|
||||
if (sarifFiles.length !== 0) {
|
||||
return await uploadSpecifiedFiles(
|
||||
sarifFiles,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
analysis
|
||||
);
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
async function sendSuccessStatusReport(startedAt, uploadStats, logger) {
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
"upload-sarif" /* UploadSarif */,
|
||||
@@ -93404,41 +93440,59 @@ async function run() {
|
||||
const sarifPath = getRequiredInput("sarif_file");
|
||||
const checkoutPath = getRequiredInput("checkout_path");
|
||||
const category = getOptionalInput("category");
|
||||
const uploadResult = await uploadFiles(
|
||||
sarifPath,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
CodeScanning
|
||||
);
|
||||
core13.setOutput("sarif-id", uploadResult.sarifID);
|
||||
if (fs15.lstatSync(sarifPath).isDirectory()) {
|
||||
const qualitySarifFiles = findSarifFilesInDir(
|
||||
sarifPath,
|
||||
CodeQuality.sarifPredicate
|
||||
);
|
||||
if (qualitySarifFiles.length !== 0) {
|
||||
await uploadSpecifiedFiles(
|
||||
qualitySarifFiles,
|
||||
checkoutPath,
|
||||
fixCodeQualityCategory(logger, category),
|
||||
features,
|
||||
logger,
|
||||
CodeQuality
|
||||
);
|
||||
}
|
||||
const pathStats = fs15.lstatSync(sarifPath, { throwIfNoEntry: false });
|
||||
if (pathStats === void 0) {
|
||||
throw new ConfigurationError(`Path does not exist: ${sarifPath}.`);
|
||||
}
|
||||
const sarifIds = [];
|
||||
const uploadResult = await findAndUpload(
|
||||
logger,
|
||||
features,
|
||||
sarifPath,
|
||||
pathStats,
|
||||
checkoutPath,
|
||||
CodeScanning,
|
||||
category
|
||||
);
|
||||
if (uploadResult !== void 0) {
|
||||
core13.setOutput("sarif-id", uploadResult.sarifID);
|
||||
sarifIds.push({
|
||||
analysis: "code-scanning" /* CodeScanning */,
|
||||
id: uploadResult.sarifID
|
||||
});
|
||||
}
|
||||
const qualityUploadResult = await findAndUpload(
|
||||
logger,
|
||||
features,
|
||||
sarifPath,
|
||||
pathStats,
|
||||
checkoutPath,
|
||||
CodeQuality,
|
||||
fixCodeQualityCategory(logger, category)
|
||||
);
|
||||
if (qualityUploadResult !== void 0) {
|
||||
sarifIds.push({
|
||||
analysis: "code-quality" /* CodeQuality */,
|
||||
id: qualityUploadResult.sarifID
|
||||
});
|
||||
}
|
||||
core13.setOutput("sarif-ids", JSON.stringify(sarifIds));
|
||||
if (isInTestMode()) {
|
||||
core13.debug("In test mode. Waiting for processing is disabled.");
|
||||
} else if (getRequiredInput("wait-for-processing") === "true") {
|
||||
await waitForProcessing(
|
||||
getRepositoryNwo(),
|
||||
uploadResult.sarifID,
|
||||
logger
|
||||
);
|
||||
if (uploadResult !== void 0) {
|
||||
await waitForProcessing(
|
||||
getRepositoryNwo(),
|
||||
uploadResult.sarifID,
|
||||
logger
|
||||
);
|
||||
}
|
||||
}
|
||||
await sendSuccessStatusReport(startedAt, uploadResult.statusReport, logger);
|
||||
await sendSuccessStatusReport(
|
||||
startedAt,
|
||||
uploadResult?.statusReport || {},
|
||||
logger
|
||||
);
|
||||
} catch (unwrappedError) {
|
||||
const error2 = isThirdPartyAnalysis("upload-sarif" /* UploadSarif */) && unwrappedError instanceof InvalidSarifUploadError ? new ConfigurationError(unwrappedError.message) : wrapError(unwrappedError);
|
||||
const message = error2.message;
|
||||
|
||||
626
package-lock.json
generated
626
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
10
package.json
10
package.json
@@ -9,7 +9,7 @@
|
||||
"lint": "eslint --report-unused-disable-directives --max-warnings=0 .",
|
||||
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
|
||||
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
|
||||
"test": "npm run transpile && ava src/**.test.ts --serial --verbose",
|
||||
"test": "npm run transpile && ava src/ --serial --verbose",
|
||||
"test-debug": "npm run test -- --timeout=20m",
|
||||
"transpile": "tsc --build --verbose"
|
||||
},
|
||||
@@ -54,9 +54,9 @@
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.3.2",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.35.0",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^14.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
"@types/console-log-level": "^1.4.5",
|
||||
"@types/follow-redirects": "^1.14.4",
|
||||
@@ -65,10 +65,10 @@
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.43.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
"ava": "^6.4.1",
|
||||
"esbuild": "^0.25.9",
|
||||
"esbuild": "^0.25.10",
|
||||
"eslint": "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.8.7",
|
||||
"eslint-plugin-filenames": "^1.3.2",
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
name: "Local CodeQL bundle"
|
||||
description: "Tests using a CodeQL bundle from a local file rather than a URL"
|
||||
versions: ["nightly-latest"]
|
||||
versions: ["linked"]
|
||||
operatingSystems: ["ubuntu"]
|
||||
installGo: true
|
||||
steps:
|
||||
- name: Fetch a CodeQL bundle
|
||||
env:
|
||||
CODEQL_URL: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Fetch latest CodeQL bundle
|
||||
run: |
|
||||
wget "$CODEQL_URL"
|
||||
wget https://github.com/github/codeql-action/releases/latest/download/codeql-bundle-linux64.tar.zst
|
||||
- id: init
|
||||
uses: ./../action/init
|
||||
with:
|
||||
|
||||
@@ -6,9 +6,8 @@ steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
|
||||
analysis-kinds: code-scanning,code-quality
|
||||
languages: csharp,java,javascript,python
|
||||
analysis-kinds: code-quality
|
||||
- name: Build code
|
||||
run: ./build.sh
|
||||
# Generate some SARIF we can upload with the upload-sarif step
|
||||
@@ -18,6 +17,10 @@ steps:
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
upload: never
|
||||
- uses: ./../action/upload-sarif
|
||||
id: upload-sarif
|
||||
with:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
- name: "Check output from `upload-sarif` step"
|
||||
if: fromJSON(steps.upload-sarif.outputs.sarif-ids)[0].analysis != 'code-quality'
|
||||
run: exit 1
|
||||
|
||||
@@ -269,6 +269,17 @@ for file in sorted((this_dir / 'checks').glob('*.yml')):
|
||||
'shell': 'bash',
|
||||
},
|
||||
},
|
||||
'concurrency': {
|
||||
# Cancel in-progress workflows in the same 'group' for pull_request events,
|
||||
# but not other event types. This should have the effect that workflows on PRs
|
||||
# get cancelled if there is a newer workflow in the same concurrency group.
|
||||
# For other events, the new workflows should wait until earlier ones have finished.
|
||||
# This should help reduce the number of concurrent workflows on the repo, and
|
||||
# consequently the number of concurrent API requests.
|
||||
'cancel-in-progress': "${{ github.event_name == 'pull_request' }}",
|
||||
# The group is determined by the workflow name + the ref
|
||||
'group': "${{ github.workflow }}-${{ github.ref }}"
|
||||
},
|
||||
'jobs': {
|
||||
checkName: checkJob
|
||||
}
|
||||
|
||||
@@ -36,6 +36,7 @@ test("getApiClient", async (t) => {
|
||||
baseUrl: "http://api.github.localhost",
|
||||
log: sinon.match.any,
|
||||
userAgent: `CodeQL-Action/${actionsUtil.getActionVersion()}`,
|
||||
retry: api.getRetryConfig(),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
GitHubVariant,
|
||||
GitHubVersion,
|
||||
isHTTPError,
|
||||
isInTestMode,
|
||||
parseGitHubUrl,
|
||||
parseMatrixInput,
|
||||
} from "./util";
|
||||
@@ -38,6 +39,20 @@ export interface GitHubApiExternalRepoDetails {
|
||||
apiURL: string | undefined;
|
||||
}
|
||||
|
||||
export function getRetryConfig(): {
|
||||
retries: number;
|
||||
retryAfterBaseValue?: number;
|
||||
} {
|
||||
// If we are in test mode, increase the allowed number of retries to 10
|
||||
// and the base backoff from 1s to 10s. `plugin-retry` will wait
|
||||
// `(failedAttempts ^ 2) * retryAfterBaseValue`-long (in ms) after
|
||||
// each failed attempt.
|
||||
// If we are not in test mode, we use the default configuration.
|
||||
return isInTestMode()
|
||||
? { retries: 10, retryAfterBaseValue: 10_000 }
|
||||
: { retries: 3, retryAfterBaseValue: 1_000 };
|
||||
}
|
||||
|
||||
function createApiClientWithDetails(
|
||||
apiDetails: GitHubApiCombinedDetails,
|
||||
{ allowExternal = false } = {},
|
||||
@@ -50,6 +65,7 @@ function createApiClientWithDetails(
|
||||
baseUrl: apiDetails.apiURL,
|
||||
userAgent: `CodeQL-Action/${getActionVersion()}`,
|
||||
log: consoleLogLevel({ level: "debug" }),
|
||||
retry: getRetryConfig(),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import * as api from "./api-client";
|
||||
import { CachingKind } from "./caching-utils";
|
||||
import { createStubCodeQL } from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
import * as errorMessages from "./error-messages";
|
||||
import { Feature } from "./feature-flags";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
@@ -32,7 +33,6 @@ import {
|
||||
import {
|
||||
GitHubVariant,
|
||||
GitHubVersion,
|
||||
prettyPrintPack,
|
||||
ConfigurationError,
|
||||
withTmpDir,
|
||||
BuildMode,
|
||||
@@ -341,7 +341,7 @@ test("load input outside of workspace", async (t) => {
|
||||
t.deepEqual(
|
||||
err,
|
||||
new ConfigurationError(
|
||||
configUtils.getConfigFileOutsideWorkspaceErrorMessage(
|
||||
errorMessages.getConfigFileOutsideWorkspaceErrorMessage(
|
||||
path.join(tempDir, "../input"),
|
||||
),
|
||||
),
|
||||
@@ -368,7 +368,7 @@ test("load non-local input with invalid repo syntax", async (t) => {
|
||||
t.deepEqual(
|
||||
err,
|
||||
new ConfigurationError(
|
||||
configUtils.getConfigFileRepoFormatInvalidMessage(
|
||||
errorMessages.getConfigFileRepoFormatInvalidMessage(
|
||||
"octo-org/codeql-config@main",
|
||||
),
|
||||
),
|
||||
@@ -397,7 +397,7 @@ test("load non-existent input", async (t) => {
|
||||
t.deepEqual(
|
||||
err,
|
||||
new ConfigurationError(
|
||||
configUtils.getConfigFileDoesNotExistErrorMessage(
|
||||
errorMessages.getConfigFileDoesNotExistErrorMessage(
|
||||
path.join(tempDir, "input"),
|
||||
),
|
||||
),
|
||||
@@ -604,7 +604,7 @@ test("Remote config handles the case where a directory is provided", async (t) =
|
||||
t.deepEqual(
|
||||
err,
|
||||
new ConfigurationError(
|
||||
configUtils.getConfigFileDirectoryGivenMessage(repoReference),
|
||||
errorMessages.getConfigFileDirectoryGivenMessage(repoReference),
|
||||
),
|
||||
);
|
||||
}
|
||||
@@ -632,7 +632,7 @@ test("Invalid format of remote config handled correctly", async (t) => {
|
||||
t.deepEqual(
|
||||
err,
|
||||
new ConfigurationError(
|
||||
configUtils.getConfigFileFormatInvalidMessage(repoReference),
|
||||
errorMessages.getConfigFileFormatInvalidMessage(repoReference),
|
||||
),
|
||||
);
|
||||
}
|
||||
@@ -660,7 +660,7 @@ test("No detected languages", async (t) => {
|
||||
} catch (err) {
|
||||
t.deepEqual(
|
||||
err,
|
||||
new ConfigurationError(configUtils.getNoLanguagesError()),
|
||||
new ConfigurationError(errorMessages.getNoLanguagesError()),
|
||||
);
|
||||
}
|
||||
});
|
||||
@@ -683,344 +683,15 @@ test("Unknown languages", async (t) => {
|
||||
t.deepEqual(
|
||||
err,
|
||||
new ConfigurationError(
|
||||
configUtils.getUnknownLanguagesError(["rubbish", "english"]),
|
||||
errorMessages.getUnknownLanguagesError(["rubbish", "english"]),
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Test macro for ensuring the packs block is valid
|
||||
*/
|
||||
const parsePacksMacro = test.macro({
|
||||
exec: (
|
||||
t: ExecutionContext<unknown>,
|
||||
packsInput: string,
|
||||
languages: Language[],
|
||||
expected: configUtils.Packs | undefined,
|
||||
) =>
|
||||
t.deepEqual(
|
||||
configUtils.parsePacksFromInput(packsInput, languages, false),
|
||||
expected,
|
||||
),
|
||||
|
||||
title: (providedTitle = "") => `Parse Packs: ${providedTitle}`,
|
||||
});
|
||||
|
||||
/**
|
||||
* Test macro for testing when the packs block is invalid
|
||||
*/
|
||||
const parsePacksErrorMacro = test.macro({
|
||||
exec: (
|
||||
t: ExecutionContext<unknown>,
|
||||
packsInput: string,
|
||||
languages: Language[],
|
||||
expected: RegExp,
|
||||
) =>
|
||||
t.throws(
|
||||
() => configUtils.parsePacksFromInput(packsInput, languages, false),
|
||||
{
|
||||
message: expected,
|
||||
},
|
||||
),
|
||||
title: (providedTitle = "") => `Parse Packs Error: ${providedTitle}`,
|
||||
});
|
||||
|
||||
/**
|
||||
* Test macro for testing when the packs block is invalid
|
||||
*/
|
||||
const invalidPackNameMacro = test.macro({
|
||||
exec: (t: ExecutionContext, name: string) =>
|
||||
parsePacksErrorMacro.exec(
|
||||
t,
|
||||
name,
|
||||
[KnownLanguage.cpp],
|
||||
new RegExp(`^"${name}" is not a valid pack$`),
|
||||
),
|
||||
title: (_providedTitle: string | undefined, arg: string | undefined) =>
|
||||
`Invalid pack string: ${arg}`,
|
||||
});
|
||||
|
||||
test("no packs", parsePacksMacro, "", [], undefined);
|
||||
test("two packs", parsePacksMacro, "a/b,c/d@1.2.3", [KnownLanguage.cpp], {
|
||||
[KnownLanguage.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
});
|
||||
test(
|
||||
"two packs with spaces",
|
||||
parsePacksMacro,
|
||||
" a/b , c/d@1.2.3 ",
|
||||
[KnownLanguage.cpp],
|
||||
{
|
||||
[KnownLanguage.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
},
|
||||
);
|
||||
test(
|
||||
"two packs with language",
|
||||
parsePacksErrorMacro,
|
||||
"a/b,c/d@1.2.3",
|
||||
[KnownLanguage.cpp, KnownLanguage.java],
|
||||
new RegExp(
|
||||
"Cannot specify a 'packs' input in a multi-language analysis. " +
|
||||
"Use a codeql-config.yml file instead and specify packs by language.",
|
||||
),
|
||||
);
|
||||
|
||||
test(
|
||||
"packs with other valid names",
|
||||
parsePacksMacro,
|
||||
[
|
||||
// ranges are ok
|
||||
"c/d@1.0",
|
||||
"c/d@~1.0.0",
|
||||
"c/d@~1.0.0:a/b",
|
||||
"c/d@~1.0.0+abc:a/b",
|
||||
"c/d@~1.0.0-abc:a/b",
|
||||
"c/d:a/b",
|
||||
// whitespace is removed
|
||||
" c/d @ ~1.0.0 : b.qls ",
|
||||
// and it is retained within a path
|
||||
" c/d @ ~1.0.0 : b/a path with/spaces.qls ",
|
||||
// this is valid. the path is '@'. It will probably fail when passed to the CLI
|
||||
"c/d@1.2.3:@",
|
||||
// this is valid, too. It will fail if it doesn't match a path
|
||||
// (globbing is not done)
|
||||
"c/d@1.2.3:+*)_(",
|
||||
].join(","),
|
||||
[KnownLanguage.cpp],
|
||||
{
|
||||
[KnownLanguage.cpp]: [
|
||||
"c/d@1.0",
|
||||
"c/d@~1.0.0",
|
||||
"c/d@~1.0.0:a/b",
|
||||
"c/d@~1.0.0+abc:a/b",
|
||||
"c/d@~1.0.0-abc:a/b",
|
||||
"c/d:a/b",
|
||||
"c/d@~1.0.0:b.qls",
|
||||
"c/d@~1.0.0:b/a path with/spaces.qls",
|
||||
"c/d@1.2.3:@",
|
||||
"c/d@1.2.3:+*)_(",
|
||||
],
|
||||
},
|
||||
);
|
||||
|
||||
test(invalidPackNameMacro, "c"); // all packs require at least a scope and a name
|
||||
test(invalidPackNameMacro, "c-/d");
|
||||
test(invalidPackNameMacro, "-c/d");
|
||||
test(invalidPackNameMacro, "c/d_d");
|
||||
test(invalidPackNameMacro, "c/d@@");
|
||||
test(invalidPackNameMacro, "c/d@1.0.0:");
|
||||
test(invalidPackNameMacro, "c/d:");
|
||||
test(invalidPackNameMacro, "c/d:/a");
|
||||
test(invalidPackNameMacro, "@1.0.0:a");
|
||||
test(invalidPackNameMacro, "c/d@../a");
|
||||
test(invalidPackNameMacro, "c/d@b/../a");
|
||||
test(invalidPackNameMacro, "c/d:z@1");
|
||||
|
||||
/**
|
||||
* Test macro for pretty printing pack specs
|
||||
*/
|
||||
const packSpecPrettyPrintingMacro = test.macro({
|
||||
exec: (t: ExecutionContext, packStr: string, packObj: configUtils.Pack) => {
|
||||
const parsed = configUtils.parsePacksSpecification(packStr);
|
||||
t.deepEqual(parsed, packObj, "parsed pack spec is correct");
|
||||
const stringified = prettyPrintPack(packObj);
|
||||
t.deepEqual(
|
||||
stringified,
|
||||
packStr.trim(),
|
||||
"pretty-printed pack spec is correct",
|
||||
);
|
||||
|
||||
t.deepEqual(
|
||||
configUtils.validatePackSpecification(packStr),
|
||||
packStr.trim(),
|
||||
"pack spec is valid",
|
||||
);
|
||||
},
|
||||
title: (
|
||||
_providedTitle: string | undefined,
|
||||
packStr: string,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
_packObj: configUtils.Pack,
|
||||
) => `Prettyprint pack spec: '${packStr}'`,
|
||||
});
|
||||
|
||||
test(packSpecPrettyPrintingMacro, "a/b", {
|
||||
name: "a/b",
|
||||
version: undefined,
|
||||
path: undefined,
|
||||
});
|
||||
test(packSpecPrettyPrintingMacro, "a/b@~1.2.3", {
|
||||
name: "a/b",
|
||||
version: "~1.2.3",
|
||||
path: undefined,
|
||||
});
|
||||
test(packSpecPrettyPrintingMacro, "a/b@~1.2.3:abc/def", {
|
||||
name: "a/b",
|
||||
version: "~1.2.3",
|
||||
path: "abc/def",
|
||||
});
|
||||
test(packSpecPrettyPrintingMacro, "a/b:abc/def", {
|
||||
name: "a/b",
|
||||
version: undefined,
|
||||
path: "abc/def",
|
||||
});
|
||||
test(packSpecPrettyPrintingMacro, " a/b:abc/def ", {
|
||||
name: "a/b",
|
||||
version: undefined,
|
||||
path: "abc/def",
|
||||
});
|
||||
|
||||
const mockLogger = getRunnerLogger(true);
|
||||
|
||||
const calculateAugmentationMacro = test.macro({
|
||||
exec: async (
|
||||
t: ExecutionContext,
|
||||
_title: string,
|
||||
rawPacksInput: string | undefined,
|
||||
rawQueriesInput: string | undefined,
|
||||
languages: Language[],
|
||||
expectedAugmentationProperties: configUtils.AugmentationProperties,
|
||||
) => {
|
||||
const actualAugmentationProperties =
|
||||
await configUtils.calculateAugmentation(
|
||||
rawPacksInput,
|
||||
rawQueriesInput,
|
||||
languages,
|
||||
);
|
||||
t.deepEqual(actualAugmentationProperties, expectedAugmentationProperties);
|
||||
},
|
||||
title: (_, title) => `Calculate Augmentation: ${title}`,
|
||||
});
|
||||
|
||||
test(
|
||||
calculateAugmentationMacro,
|
||||
"All empty",
|
||||
undefined,
|
||||
undefined,
|
||||
[KnownLanguage.javascript],
|
||||
{
|
||||
...configUtils.defaultAugmentationProperties,
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationMacro,
|
||||
"With queries",
|
||||
undefined,
|
||||
" a, b , c, d",
|
||||
[KnownLanguage.javascript],
|
||||
{
|
||||
...configUtils.defaultAugmentationProperties,
|
||||
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationMacro,
|
||||
"With queries combining",
|
||||
undefined,
|
||||
" + a, b , c, d ",
|
||||
[KnownLanguage.javascript],
|
||||
{
|
||||
...configUtils.defaultAugmentationProperties,
|
||||
queriesInputCombines: true,
|
||||
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationMacro,
|
||||
"With packs",
|
||||
" codeql/a , codeql/b , codeql/c , codeql/d ",
|
||||
undefined,
|
||||
[KnownLanguage.javascript],
|
||||
{
|
||||
...configUtils.defaultAugmentationProperties,
|
||||
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationMacro,
|
||||
"With packs combining",
|
||||
" + codeql/a, codeql/b, codeql/c, codeql/d",
|
||||
undefined,
|
||||
[KnownLanguage.javascript],
|
||||
{
|
||||
...configUtils.defaultAugmentationProperties,
|
||||
packsInputCombines: true,
|
||||
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
|
||||
},
|
||||
);
|
||||
|
||||
const calculateAugmentationErrorMacro = test.macro({
|
||||
exec: async (
|
||||
t: ExecutionContext,
|
||||
_title: string,
|
||||
rawPacksInput: string | undefined,
|
||||
rawQueriesInput: string | undefined,
|
||||
languages: Language[],
|
||||
expectedError: RegExp | string,
|
||||
) => {
|
||||
await t.throwsAsync(
|
||||
() =>
|
||||
configUtils.calculateAugmentation(
|
||||
rawPacksInput,
|
||||
rawQueriesInput,
|
||||
languages,
|
||||
),
|
||||
{ message: expectedError },
|
||||
);
|
||||
},
|
||||
title: (_, title) => `Calculate Augmentation Error: ${title}`,
|
||||
});
|
||||
|
||||
test(
|
||||
calculateAugmentationErrorMacro,
|
||||
"Plus (+) with nothing else (queries)",
|
||||
undefined,
|
||||
" + ",
|
||||
[KnownLanguage.javascript],
|
||||
/The workflow property "queries" is invalid/,
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationErrorMacro,
|
||||
"Plus (+) with nothing else (packs)",
|
||||
" + ",
|
||||
undefined,
|
||||
[KnownLanguage.javascript],
|
||||
/The workflow property "packs" is invalid/,
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationErrorMacro,
|
||||
"Packs input with multiple languages",
|
||||
" + a/b, c/d ",
|
||||
undefined,
|
||||
[KnownLanguage.javascript, KnownLanguage.java],
|
||||
/Cannot specify a 'packs' input in a multi-language analysis/,
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationErrorMacro,
|
||||
"Packs input with no languages",
|
||||
" + a/b, c/d ",
|
||||
undefined,
|
||||
[],
|
||||
/No languages specified/,
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationErrorMacro,
|
||||
"Invalid packs",
|
||||
" a-pack-without-a-scope ",
|
||||
undefined,
|
||||
[KnownLanguage.javascript],
|
||||
/"a-pack-without-a-scope" is not a valid pack/,
|
||||
);
|
||||
|
||||
test("no generateRegistries when registries is undefined", async (t) => {
|
||||
return await withTmpDir(async (tmpDir) => {
|
||||
const registriesInput = undefined;
|
||||
@@ -1097,28 +768,28 @@ const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
|
||||
languagesInput: "",
|
||||
languagesInRepository: ["html"],
|
||||
expectedApiCall: true,
|
||||
expectedError: configUtils.getNoLanguagesError(),
|
||||
expectedError: errorMessages.getNoLanguagesError(),
|
||||
},
|
||||
{
|
||||
name: "no languages",
|
||||
languagesInput: "",
|
||||
languagesInRepository: [],
|
||||
expectedApiCall: true,
|
||||
expectedError: configUtils.getNoLanguagesError(),
|
||||
expectedError: errorMessages.getNoLanguagesError(),
|
||||
},
|
||||
{
|
||||
name: "unrecognized languages from input",
|
||||
languagesInput: "a, b, c, javascript",
|
||||
languagesInRepository: [],
|
||||
expectedApiCall: false,
|
||||
expectedError: configUtils.getUnknownLanguagesError(["a", "b"]),
|
||||
expectedError: errorMessages.getUnknownLanguagesError(["a", "b"]),
|
||||
},
|
||||
{
|
||||
name: "extractors that aren't languages aren't included (specified)",
|
||||
languagesInput: "html",
|
||||
languagesInRepository: [],
|
||||
expectedApiCall: false,
|
||||
expectedError: configUtils.getUnknownLanguagesError(["html"]),
|
||||
expectedError: errorMessages.getUnknownLanguagesError(["html"]),
|
||||
},
|
||||
{
|
||||
name: "extractors that aren't languages aren't included (autodetected)",
|
||||
|
||||
@@ -3,7 +3,6 @@ import * as path from "path";
|
||||
import { performance } from "perf_hooks";
|
||||
|
||||
import * as yaml from "js-yaml";
|
||||
import * as semver from "semver";
|
||||
|
||||
import { getActionVersion, isAnalyzingPullRequest } from "./actions-util";
|
||||
import {
|
||||
@@ -17,7 +16,14 @@ import {
|
||||
import * as api from "./api-client";
|
||||
import { CachingKind, getCachingKind } from "./caching-utils";
|
||||
import { type CodeQL } from "./codeql";
|
||||
import {
|
||||
calculateAugmentation,
|
||||
ExcludeQueryFilter,
|
||||
generateCodeScanningConfig,
|
||||
UserConfig,
|
||||
} from "./config/db-config";
|
||||
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
|
||||
import * as errorMessages from "./error-messages";
|
||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||
import { getGitRoot, isAnalyzingDefaultBranch } from "./git-utils";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
@@ -30,7 +36,6 @@ import { RepositoryNwo } from "./repository";
|
||||
import { downloadTrapCaches } from "./trap-caching";
|
||||
import {
|
||||
GitHubVersion,
|
||||
prettyPrintPack,
|
||||
ConfigurationError,
|
||||
BuildMode,
|
||||
codeQlVersionAtLeast,
|
||||
@@ -38,34 +43,7 @@ import {
|
||||
isDefined,
|
||||
} from "./util";
|
||||
|
||||
// Property names from the user-supplied config file.
|
||||
|
||||
const PACKS_PROPERTY = "packs";
|
||||
|
||||
/**
|
||||
* Format of the config file supplied by the user.
|
||||
*/
|
||||
export interface UserConfig {
|
||||
name?: string;
|
||||
"disable-default-queries"?: boolean;
|
||||
queries?: Array<{
|
||||
name?: string;
|
||||
uses: string;
|
||||
}>;
|
||||
"paths-ignore"?: string[];
|
||||
paths?: string[];
|
||||
|
||||
// If this is a multi-language analysis, then the packages must be split by
|
||||
// language. If this is a single language analysis, then no split by
|
||||
// language is necessary.
|
||||
packs?: Record<string, string[]> | string[];
|
||||
|
||||
// Set of query filters to include and exclude extra queries based on
|
||||
// codeql query suite `include` and `exclude` properties
|
||||
"query-filters"?: QueryFilter[];
|
||||
}
|
||||
|
||||
export type QueryFilter = ExcludeQueryFilter | IncludeQueryFilter;
|
||||
export * from "./config/db-config";
|
||||
|
||||
export type RegistryConfigWithCredentials = RegistryConfigNoCredentials & {
|
||||
// Token to use when downloading packs from this registry.
|
||||
@@ -90,14 +68,6 @@ export interface RegistryConfigNoCredentials {
|
||||
kind?: "github" | "docker";
|
||||
}
|
||||
|
||||
interface ExcludeQueryFilter {
|
||||
exclude: Record<string, string[] | string>;
|
||||
}
|
||||
|
||||
interface IncludeQueryFilter {
|
||||
include: Record<string, string[] | string>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format of the parsed config file.
|
||||
*/
|
||||
@@ -199,121 +169,6 @@ export interface Config {
|
||||
useOverlayDatabaseCaching: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes how to augment the user config with inputs from the action.
|
||||
*
|
||||
* When running a CodeQL analysis, the user can supply a config file. When
|
||||
* running a CodeQL analysis from a GitHub action, the user can supply a
|
||||
* config file _and_ a set of inputs.
|
||||
*
|
||||
* The inputs from the action are used to augment the user config before
|
||||
* passing the user config to the CodeQL CLI invocation.
|
||||
*/
|
||||
export interface AugmentationProperties {
|
||||
/**
|
||||
* Whether or not the queries input combines with the queries in the config.
|
||||
*/
|
||||
queriesInputCombines: boolean;
|
||||
|
||||
/**
|
||||
* The queries input from the `with` block of the action declaration
|
||||
*/
|
||||
queriesInput?: Array<{ uses: string }>;
|
||||
|
||||
/**
|
||||
* Whether or not the packs input combines with the packs in the config.
|
||||
*/
|
||||
packsInputCombines: boolean;
|
||||
|
||||
/**
|
||||
* The packs input from the `with` block of the action declaration
|
||||
*/
|
||||
packsInput?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* The default, empty augmentation properties. This is most useful
|
||||
* for tests.
|
||||
*/
|
||||
export const defaultAugmentationProperties: AugmentationProperties = {
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
packsInput: undefined,
|
||||
queriesInput: undefined,
|
||||
};
|
||||
export type Packs = Partial<Record<Language, string[]>>;
|
||||
|
||||
export interface Pack {
|
||||
name: string;
|
||||
version?: string;
|
||||
path?: string;
|
||||
}
|
||||
|
||||
export function getPacksStrInvalid(
|
||||
packStr: string,
|
||||
configFile?: string,
|
||||
): string {
|
||||
return configFile
|
||||
? getConfigFilePropertyError(
|
||||
configFile,
|
||||
PACKS_PROPERTY,
|
||||
`"${packStr}" is not a valid pack`,
|
||||
)
|
||||
: `"${packStr}" is not a valid pack`;
|
||||
}
|
||||
|
||||
export function getConfigFileOutsideWorkspaceErrorMessage(
|
||||
configFile: string,
|
||||
): string {
|
||||
return `The configuration file "${configFile}" is outside of the workspace`;
|
||||
}
|
||||
|
||||
export function getConfigFileDoesNotExistErrorMessage(
|
||||
configFile: string,
|
||||
): string {
|
||||
return `The configuration file "${configFile}" does not exist`;
|
||||
}
|
||||
|
||||
export function getConfigFileRepoFormatInvalidMessage(
|
||||
configFile: string,
|
||||
): string {
|
||||
let error = `The configuration file "${configFile}" is not a supported remote file reference.`;
|
||||
error += " Expected format <owner>/<repository>/<file-path>@<ref>";
|
||||
|
||||
return error;
|
||||
}
|
||||
|
||||
export function getConfigFileFormatInvalidMessage(configFile: string): string {
|
||||
return `The configuration file "${configFile}" could not be read`;
|
||||
}
|
||||
|
||||
export function getConfigFileDirectoryGivenMessage(configFile: string): string {
|
||||
return `The configuration file "${configFile}" looks like a directory, not a file`;
|
||||
}
|
||||
|
||||
function getConfigFilePropertyError(
|
||||
configFile: string | undefined,
|
||||
property: string,
|
||||
error: string,
|
||||
): string {
|
||||
if (configFile === undefined) {
|
||||
return `The workflow property "${property}" is invalid: ${error}`;
|
||||
} else {
|
||||
return `The configuration file "${configFile}" is invalid: property "${property}" ${error}`;
|
||||
}
|
||||
}
|
||||
|
||||
export function getNoLanguagesError(): string {
|
||||
return (
|
||||
"Did not detect any languages to analyze. " +
|
||||
"Please update input in workflow or check that GitHub detects the correct languages in your repository."
|
||||
);
|
||||
}
|
||||
|
||||
export function getUnknownLanguagesError(languages: string[]): string {
|
||||
return `Did not recognize the following languages: ${languages.join(", ")}`;
|
||||
}
|
||||
|
||||
export async function getSupportedLanguageMap(
|
||||
codeql: CodeQL,
|
||||
features: FeatureEnablement,
|
||||
@@ -450,13 +305,15 @@ export async function getLanguages(
|
||||
const languages = Array.from(languagesSet);
|
||||
|
||||
if (!autodetected && unknownLanguages.length > 0) {
|
||||
throw new ConfigurationError(getUnknownLanguagesError(unknownLanguages));
|
||||
throw new ConfigurationError(
|
||||
errorMessages.getUnknownLanguagesError(unknownLanguages),
|
||||
);
|
||||
}
|
||||
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
if (languages.length === 0) {
|
||||
throw new ConfigurationError(getNoLanguagesError());
|
||||
throw new ConfigurationError(errorMessages.getNoLanguagesError());
|
||||
}
|
||||
|
||||
if (autodetected) {
|
||||
@@ -666,7 +523,7 @@ async function loadUserConfig(
|
||||
// Error if the config file is now outside of the workspace
|
||||
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new ConfigurationError(
|
||||
getConfigFileOutsideWorkspaceErrorMessage(configFile),
|
||||
errorMessages.getConfigFileOutsideWorkspaceErrorMessage(configFile),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -676,73 +533,6 @@ async function loadUserConfig(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates how the codeql config file needs to be augmented before passing
|
||||
* it to the CLI. The reason this is necessary is the codeql-action can be called
|
||||
* with extra inputs from the workflow. These inputs are not part of the config
|
||||
* and the CLI does not know about these inputs so we need to inject them into
|
||||
* the config file sent to the CLI.
|
||||
*
|
||||
* @param rawPacksInput The packs input from the action configuration.
|
||||
* @param rawQueriesInput The queries input from the action configuration.
|
||||
* @param languages The languages that the config file is for. If the packs input
|
||||
* is non-empty, then there must be exactly one language. Otherwise, an
|
||||
* error is thrown.
|
||||
*
|
||||
* @returns The properties that need to be augmented in the config file.
|
||||
*
|
||||
* @throws An error if the packs input is non-empty and the languages input does
|
||||
* not have exactly one language.
|
||||
*/
|
||||
// exported for testing.
|
||||
export async function calculateAugmentation(
|
||||
rawPacksInput: string | undefined,
|
||||
rawQueriesInput: string | undefined,
|
||||
languages: Language[],
|
||||
): Promise<AugmentationProperties> {
|
||||
const packsInputCombines = shouldCombine(rawPacksInput);
|
||||
const packsInput = parsePacksFromInput(
|
||||
rawPacksInput,
|
||||
languages,
|
||||
packsInputCombines,
|
||||
);
|
||||
const queriesInputCombines = shouldCombine(rawQueriesInput);
|
||||
const queriesInput = parseQueriesFromInput(
|
||||
rawQueriesInput,
|
||||
queriesInputCombines,
|
||||
);
|
||||
|
||||
return {
|
||||
packsInputCombines,
|
||||
packsInput: packsInput?.[languages[0]],
|
||||
queriesInput,
|
||||
queriesInputCombines,
|
||||
};
|
||||
}
|
||||
|
||||
function parseQueriesFromInput(
|
||||
rawQueriesInput: string | undefined,
|
||||
queriesInputCombines: boolean,
|
||||
) {
|
||||
if (!rawQueriesInput) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const trimmedInput = queriesInputCombines
|
||||
? rawQueriesInput.trim().slice(1).trim()
|
||||
: (rawQueriesInput?.trim() ?? "");
|
||||
if (queriesInputCombines && trimmedInput.length === 0) {
|
||||
throw new ConfigurationError(
|
||||
getConfigFilePropertyError(
|
||||
undefined,
|
||||
"queries",
|
||||
"A '+' was used in the 'queries' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs.",
|
||||
),
|
||||
);
|
||||
}
|
||||
return trimmedInput.split(",").map((query) => ({ uses: query.trim() }));
|
||||
}
|
||||
|
||||
const OVERLAY_ANALYSIS_FEATURES: Record<Language, Feature> = {
|
||||
actions: Feature.OverlayAnalysisActions,
|
||||
cpp: Feature.OverlayAnalysisCpp,
|
||||
@@ -938,161 +728,6 @@ export async function getOverlayDatabaseMode(
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack names must be in the form of `scope/name`, with only alpha-numeric characters,
|
||||
* and `-` allowed as long as not the first or last char.
|
||||
**/
|
||||
const PACK_IDENTIFIER_PATTERN = (function () {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
|
||||
// Exported for testing
|
||||
export function parsePacksFromInput(
|
||||
rawPacksInput: string | undefined,
|
||||
languages: Language[],
|
||||
packsInputCombines: boolean,
|
||||
): Packs | undefined {
|
||||
if (!rawPacksInput?.trim()) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (languages.length > 1) {
|
||||
throw new ConfigurationError(
|
||||
"Cannot specify a 'packs' input in a multi-language analysis. Use a codeql-config.yml file instead and specify packs by language.",
|
||||
);
|
||||
} else if (languages.length === 0) {
|
||||
throw new ConfigurationError(
|
||||
"No languages specified. Cannot process the packs input.",
|
||||
);
|
||||
}
|
||||
|
||||
rawPacksInput = rawPacksInput.trim();
|
||||
if (packsInputCombines) {
|
||||
rawPacksInput = rawPacksInput.trim().substring(1).trim();
|
||||
if (!rawPacksInput) {
|
||||
throw new ConfigurationError(
|
||||
getConfigFilePropertyError(
|
||||
undefined,
|
||||
"packs",
|
||||
"A '+' was used in the 'packs' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs.",
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
[languages[0]]: rawPacksInput.split(",").reduce((packs, pack) => {
|
||||
packs.push(validatePackSpecification(pack));
|
||||
return packs;
|
||||
}, [] as string[]),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates that this package specification is syntactically correct.
|
||||
* It may not point to any real package, but after this function returns
|
||||
* without throwing, we are guaranteed that the package specification
|
||||
* is roughly correct.
|
||||
*
|
||||
* The CLI itself will do a more thorough validation of the package
|
||||
* specification.
|
||||
*
|
||||
* A package specification looks like this:
|
||||
*
|
||||
* `scope/name@version:path`
|
||||
*
|
||||
* Version and path are optional.
|
||||
*
|
||||
* @param packStr the package specification to verify.
|
||||
* @param configFile Config file to use for error reporting
|
||||
*/
|
||||
export function parsePacksSpecification(packStr: string): Pack {
|
||||
if (typeof packStr !== "string") {
|
||||
throw new ConfigurationError(getPacksStrInvalid(packStr));
|
||||
}
|
||||
|
||||
packStr = packStr.trim();
|
||||
const atIndex = packStr.indexOf("@");
|
||||
const colonIndex = packStr.indexOf(":", atIndex);
|
||||
const packStart = 0;
|
||||
const versionStart = atIndex + 1 || undefined;
|
||||
const pathStart = colonIndex + 1 || undefined;
|
||||
const packEnd = Math.min(
|
||||
atIndex > 0 ? atIndex : Infinity,
|
||||
colonIndex > 0 ? colonIndex : Infinity,
|
||||
packStr.length,
|
||||
);
|
||||
const versionEnd = versionStart
|
||||
? Math.min(colonIndex > 0 ? colonIndex : Infinity, packStr.length)
|
||||
: undefined;
|
||||
const pathEnd = pathStart ? packStr.length : undefined;
|
||||
|
||||
const packName = packStr.slice(packStart, packEnd).trim();
|
||||
const version = versionStart
|
||||
? packStr.slice(versionStart, versionEnd).trim()
|
||||
: undefined;
|
||||
const packPath = pathStart
|
||||
? packStr.slice(pathStart, pathEnd).trim()
|
||||
: undefined;
|
||||
|
||||
if (!PACK_IDENTIFIER_PATTERN.test(packName)) {
|
||||
throw new ConfigurationError(getPacksStrInvalid(packStr));
|
||||
}
|
||||
if (version) {
|
||||
try {
|
||||
new semver.Range(version);
|
||||
} catch {
|
||||
// The range string is invalid. OK to ignore the caught error
|
||||
throw new ConfigurationError(getPacksStrInvalid(packStr));
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
packPath &&
|
||||
(path.isAbsolute(packPath) ||
|
||||
// Permit using "/" instead of "\" on Windows
|
||||
// Use `x.split(y).join(z)` as a polyfill for `x.replaceAll(y, z)` since
|
||||
// if we used a regex we'd need to escape the path separator on Windows
|
||||
// which seems more awkward.
|
||||
path.normalize(packPath).split(path.sep).join("/") !==
|
||||
packPath.split(path.sep).join("/"))
|
||||
) {
|
||||
throw new ConfigurationError(getPacksStrInvalid(packStr));
|
||||
}
|
||||
|
||||
if (!packPath && pathStart) {
|
||||
// 0 length path
|
||||
throw new ConfigurationError(getPacksStrInvalid(packStr));
|
||||
}
|
||||
|
||||
return {
|
||||
name: packName,
|
||||
version,
|
||||
path: packPath,
|
||||
};
|
||||
}
|
||||
|
||||
export function validatePackSpecification(pack: string) {
|
||||
return prettyPrintPack(parsePacksSpecification(pack));
|
||||
}
|
||||
|
||||
/**
|
||||
* The convention in this action is that an input value that is prefixed with a '+' will
|
||||
* be combined with the corresponding value in the config file.
|
||||
*
|
||||
* Without a '+', an input value will override the corresponding value in the config file.
|
||||
*
|
||||
* @param inputValue The input value to process.
|
||||
* @returns true if the input value should replace the corresponding value in the config file,
|
||||
* false if it should be appended.
|
||||
*/
|
||||
function shouldCombine(inputValue?: string): boolean {
|
||||
return !!inputValue?.trim().startsWith("+");
|
||||
}
|
||||
|
||||
function dbLocationOrDefault(
|
||||
dbLocation: string | undefined,
|
||||
tempDir: string,
|
||||
@@ -1245,7 +880,7 @@ function getLocalConfig(configFile: string): UserConfig {
|
||||
// Error if the file does not exist
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new ConfigurationError(
|
||||
getConfigFileDoesNotExistErrorMessage(configFile),
|
||||
errorMessages.getConfigFileDoesNotExistErrorMessage(configFile),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1264,7 +899,7 @@ async function getRemoteConfig(
|
||||
// 5 = 4 groups + the whole expression
|
||||
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
||||
throw new ConfigurationError(
|
||||
getConfigFileRepoFormatInvalidMessage(configFile),
|
||||
errorMessages.getConfigFileRepoFormatInvalidMessage(configFile),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1282,10 +917,12 @@ async function getRemoteConfig(
|
||||
fileContents = response.data.content;
|
||||
} else if (Array.isArray(response.data)) {
|
||||
throw new ConfigurationError(
|
||||
getConfigFileDirectoryGivenMessage(configFile),
|
||||
errorMessages.getConfigFileDirectoryGivenMessage(configFile),
|
||||
);
|
||||
} else {
|
||||
throw new ConfigurationError(getConfigFileFormatInvalidMessage(configFile));
|
||||
throw new ConfigurationError(
|
||||
errorMessages.getConfigFileFormatInvalidMessage(configFile),
|
||||
);
|
||||
}
|
||||
|
||||
return yaml.load(
|
||||
@@ -1496,56 +1133,6 @@ export async function parseBuildModeInput(
|
||||
return input as BuildMode;
|
||||
}
|
||||
|
||||
export function generateCodeScanningConfig(
|
||||
originalUserInput: UserConfig,
|
||||
augmentationProperties: AugmentationProperties,
|
||||
): UserConfig {
|
||||
// make a copy so we can modify it
|
||||
const augmentedConfig = cloneObject(originalUserInput);
|
||||
|
||||
// Inject the queries from the input
|
||||
if (augmentationProperties.queriesInput) {
|
||||
if (augmentationProperties.queriesInputCombines) {
|
||||
augmentedConfig.queries = (augmentedConfig.queries || []).concat(
|
||||
augmentationProperties.queriesInput,
|
||||
);
|
||||
} else {
|
||||
augmentedConfig.queries = augmentationProperties.queriesInput;
|
||||
}
|
||||
}
|
||||
if (augmentedConfig.queries?.length === 0) {
|
||||
delete augmentedConfig.queries;
|
||||
}
|
||||
|
||||
// Inject the packs from the input
|
||||
if (augmentationProperties.packsInput) {
|
||||
if (augmentationProperties.packsInputCombines) {
|
||||
// At this point, we already know that this is a single-language analysis
|
||||
if (Array.isArray(augmentedConfig.packs)) {
|
||||
augmentedConfig.packs = (augmentedConfig.packs || []).concat(
|
||||
augmentationProperties.packsInput,
|
||||
);
|
||||
} else if (!augmentedConfig.packs) {
|
||||
augmentedConfig.packs = augmentationProperties.packsInput;
|
||||
} else {
|
||||
// At this point, we know there is only one language.
|
||||
// If there were more than one language, an error would already have been thrown.
|
||||
const language = Object.keys(augmentedConfig.packs)[0];
|
||||
augmentedConfig.packs[language] = augmentedConfig.packs[
|
||||
language
|
||||
].concat(augmentationProperties.packsInput);
|
||||
}
|
||||
} else {
|
||||
augmentedConfig.packs = augmentationProperties.packsInput;
|
||||
}
|
||||
}
|
||||
if (Array.isArray(augmentedConfig.packs) && !augmentedConfig.packs.length) {
|
||||
delete augmentedConfig.packs;
|
||||
}
|
||||
|
||||
return augmentedConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends `extraQueryExclusions` to `cliConfig`'s `query-filters`.
|
||||
*
|
||||
|
||||
331
src/config/db-config.test.ts
Normal file
331
src/config/db-config.test.ts
Normal file
@@ -0,0 +1,331 @@
|
||||
import test, { ExecutionContext } from "ava";
|
||||
|
||||
import { KnownLanguage, Language } from "../languages";
|
||||
import { prettyPrintPack } from "../util";
|
||||
|
||||
import * as dbConfig from "./db-config";
|
||||
|
||||
/**
|
||||
* Test macro for ensuring the packs block is valid
|
||||
*/
|
||||
const parsePacksMacro = test.macro({
|
||||
exec: (
|
||||
t: ExecutionContext<unknown>,
|
||||
packsInput: string,
|
||||
languages: Language[],
|
||||
expected: dbConfig.Packs | undefined,
|
||||
) =>
|
||||
t.deepEqual(
|
||||
dbConfig.parsePacksFromInput(packsInput, languages, false),
|
||||
expected,
|
||||
),
|
||||
|
||||
title: (providedTitle = "") => `Parse Packs: ${providedTitle}`,
|
||||
});
|
||||
|
||||
/**
|
||||
* Test macro for testing when the packs block is invalid
|
||||
*/
|
||||
const parsePacksErrorMacro = test.macro({
|
||||
exec: (
|
||||
t: ExecutionContext<unknown>,
|
||||
packsInput: string,
|
||||
languages: Language[],
|
||||
expected: RegExp,
|
||||
) =>
|
||||
t.throws(() => dbConfig.parsePacksFromInput(packsInput, languages, false), {
|
||||
message: expected,
|
||||
}),
|
||||
title: (providedTitle = "") => `Parse Packs Error: ${providedTitle}`,
|
||||
});
|
||||
|
||||
/**
|
||||
* Test macro for testing when the packs block is invalid
|
||||
*/
|
||||
const invalidPackNameMacro = test.macro({
|
||||
exec: (t: ExecutionContext, name: string) =>
|
||||
parsePacksErrorMacro.exec(
|
||||
t,
|
||||
name,
|
||||
[KnownLanguage.cpp],
|
||||
new RegExp(`^"${name}" is not a valid pack$`),
|
||||
),
|
||||
title: (_providedTitle: string | undefined, arg: string | undefined) =>
|
||||
`Invalid pack string: ${arg}`,
|
||||
});
|
||||
|
||||
test("no packs", parsePacksMacro, "", [], undefined);
|
||||
test("two packs", parsePacksMacro, "a/b,c/d@1.2.3", [KnownLanguage.cpp], {
|
||||
[KnownLanguage.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
});
|
||||
test(
|
||||
"two packs with spaces",
|
||||
parsePacksMacro,
|
||||
" a/b , c/d@1.2.3 ",
|
||||
[KnownLanguage.cpp],
|
||||
{
|
||||
[KnownLanguage.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
},
|
||||
);
|
||||
test(
|
||||
"two packs with language",
|
||||
parsePacksErrorMacro,
|
||||
"a/b,c/d@1.2.3",
|
||||
[KnownLanguage.cpp, KnownLanguage.java],
|
||||
new RegExp(
|
||||
"Cannot specify a 'packs' input in a multi-language analysis. " +
|
||||
"Use a codeql-config.yml file instead and specify packs by language.",
|
||||
),
|
||||
);
|
||||
|
||||
test(
|
||||
"packs with other valid names",
|
||||
parsePacksMacro,
|
||||
[
|
||||
// ranges are ok
|
||||
"c/d@1.0",
|
||||
"c/d@~1.0.0",
|
||||
"c/d@~1.0.0:a/b",
|
||||
"c/d@~1.0.0+abc:a/b",
|
||||
"c/d@~1.0.0-abc:a/b",
|
||||
"c/d:a/b",
|
||||
// whitespace is removed
|
||||
" c/d @ ~1.0.0 : b.qls ",
|
||||
// and it is retained within a path
|
||||
" c/d @ ~1.0.0 : b/a path with/spaces.qls ",
|
||||
// this is valid. the path is '@'. It will probably fail when passed to the CLI
|
||||
"c/d@1.2.3:@",
|
||||
// this is valid, too. It will fail if it doesn't match a path
|
||||
// (globbing is not done)
|
||||
"c/d@1.2.3:+*)_(",
|
||||
].join(","),
|
||||
[KnownLanguage.cpp],
|
||||
{
|
||||
[KnownLanguage.cpp]: [
|
||||
"c/d@1.0",
|
||||
"c/d@~1.0.0",
|
||||
"c/d@~1.0.0:a/b",
|
||||
"c/d@~1.0.0+abc:a/b",
|
||||
"c/d@~1.0.0-abc:a/b",
|
||||
"c/d:a/b",
|
||||
"c/d@~1.0.0:b.qls",
|
||||
"c/d@~1.0.0:b/a path with/spaces.qls",
|
||||
"c/d@1.2.3:@",
|
||||
"c/d@1.2.3:+*)_(",
|
||||
],
|
||||
},
|
||||
);
|
||||
|
||||
test(invalidPackNameMacro, "c"); // all packs require at least a scope and a name
|
||||
test(invalidPackNameMacro, "c-/d");
|
||||
test(invalidPackNameMacro, "-c/d");
|
||||
test(invalidPackNameMacro, "c/d_d");
|
||||
test(invalidPackNameMacro, "c/d@@");
|
||||
test(invalidPackNameMacro, "c/d@1.0.0:");
|
||||
test(invalidPackNameMacro, "c/d:");
|
||||
test(invalidPackNameMacro, "c/d:/a");
|
||||
test(invalidPackNameMacro, "@1.0.0:a");
|
||||
test(invalidPackNameMacro, "c/d@../a");
|
||||
test(invalidPackNameMacro, "c/d@b/../a");
|
||||
test(invalidPackNameMacro, "c/d:z@1");
|
||||
|
||||
/**
|
||||
* Test macro for pretty printing pack specs
|
||||
*/
|
||||
const packSpecPrettyPrintingMacro = test.macro({
|
||||
exec: (t: ExecutionContext, packStr: string, packObj: dbConfig.Pack) => {
|
||||
const parsed = dbConfig.parsePacksSpecification(packStr);
|
||||
t.deepEqual(parsed, packObj, "parsed pack spec is correct");
|
||||
const stringified = prettyPrintPack(packObj);
|
||||
t.deepEqual(
|
||||
stringified,
|
||||
packStr.trim(),
|
||||
"pretty-printed pack spec is correct",
|
||||
);
|
||||
|
||||
t.deepEqual(
|
||||
dbConfig.validatePackSpecification(packStr),
|
||||
packStr.trim(),
|
||||
"pack spec is valid",
|
||||
);
|
||||
},
|
||||
title: (
|
||||
_providedTitle: string | undefined,
|
||||
packStr: string,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
_packObj: dbConfig.Pack,
|
||||
) => `Prettyprint pack spec: '${packStr}'`,
|
||||
});
|
||||
|
||||
test(packSpecPrettyPrintingMacro, "a/b", {
|
||||
name: "a/b",
|
||||
version: undefined,
|
||||
path: undefined,
|
||||
});
|
||||
test(packSpecPrettyPrintingMacro, "a/b@~1.2.3", {
|
||||
name: "a/b",
|
||||
version: "~1.2.3",
|
||||
path: undefined,
|
||||
});
|
||||
test(packSpecPrettyPrintingMacro, "a/b@~1.2.3:abc/def", {
|
||||
name: "a/b",
|
||||
version: "~1.2.3",
|
||||
path: "abc/def",
|
||||
});
|
||||
test(packSpecPrettyPrintingMacro, "a/b:abc/def", {
|
||||
name: "a/b",
|
||||
version: undefined,
|
||||
path: "abc/def",
|
||||
});
|
||||
test(packSpecPrettyPrintingMacro, " a/b:abc/def ", {
|
||||
name: "a/b",
|
||||
version: undefined,
|
||||
path: "abc/def",
|
||||
});
|
||||
|
||||
const calculateAugmentationMacro = test.macro({
|
||||
exec: async (
|
||||
t: ExecutionContext,
|
||||
_title: string,
|
||||
rawPacksInput: string | undefined,
|
||||
rawQueriesInput: string | undefined,
|
||||
languages: Language[],
|
||||
expectedAugmentationProperties: dbConfig.AugmentationProperties,
|
||||
) => {
|
||||
const actualAugmentationProperties = await dbConfig.calculateAugmentation(
|
||||
rawPacksInput,
|
||||
rawQueriesInput,
|
||||
languages,
|
||||
);
|
||||
t.deepEqual(actualAugmentationProperties, expectedAugmentationProperties);
|
||||
},
|
||||
title: (_, title) => `Calculate Augmentation: ${title}`,
|
||||
});
|
||||
|
||||
test(
|
||||
calculateAugmentationMacro,
|
||||
"All empty",
|
||||
undefined,
|
||||
undefined,
|
||||
[KnownLanguage.javascript],
|
||||
{
|
||||
...dbConfig.defaultAugmentationProperties,
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationMacro,
|
||||
"With queries",
|
||||
undefined,
|
||||
" a, b , c, d",
|
||||
[KnownLanguage.javascript],
|
||||
{
|
||||
...dbConfig.defaultAugmentationProperties,
|
||||
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationMacro,
|
||||
"With queries combining",
|
||||
undefined,
|
||||
" + a, b , c, d ",
|
||||
[KnownLanguage.javascript],
|
||||
{
|
||||
...dbConfig.defaultAugmentationProperties,
|
||||
queriesInputCombines: true,
|
||||
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationMacro,
|
||||
"With packs",
|
||||
" codeql/a , codeql/b , codeql/c , codeql/d ",
|
||||
undefined,
|
||||
[KnownLanguage.javascript],
|
||||
{
|
||||
...dbConfig.defaultAugmentationProperties,
|
||||
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationMacro,
|
||||
"With packs combining",
|
||||
" + codeql/a, codeql/b, codeql/c, codeql/d",
|
||||
undefined,
|
||||
[KnownLanguage.javascript],
|
||||
{
|
||||
...dbConfig.defaultAugmentationProperties,
|
||||
packsInputCombines: true,
|
||||
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
|
||||
},
|
||||
);
|
||||
|
||||
const calculateAugmentationErrorMacro = test.macro({
|
||||
exec: async (
|
||||
t: ExecutionContext,
|
||||
_title: string,
|
||||
rawPacksInput: string | undefined,
|
||||
rawQueriesInput: string | undefined,
|
||||
languages: Language[],
|
||||
expectedError: RegExp | string,
|
||||
) => {
|
||||
await t.throwsAsync(
|
||||
() =>
|
||||
dbConfig.calculateAugmentation(
|
||||
rawPacksInput,
|
||||
rawQueriesInput,
|
||||
languages,
|
||||
),
|
||||
{ message: expectedError },
|
||||
);
|
||||
},
|
||||
title: (_, title) => `Calculate Augmentation Error: ${title}`,
|
||||
});
|
||||
|
||||
test(
|
||||
calculateAugmentationErrorMacro,
|
||||
"Plus (+) with nothing else (queries)",
|
||||
undefined,
|
||||
" + ",
|
||||
[KnownLanguage.javascript],
|
||||
/The workflow property "queries" is invalid/,
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationErrorMacro,
|
||||
"Plus (+) with nothing else (packs)",
|
||||
" + ",
|
||||
undefined,
|
||||
[KnownLanguage.javascript],
|
||||
/The workflow property "packs" is invalid/,
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationErrorMacro,
|
||||
"Packs input with multiple languages",
|
||||
" + a/b, c/d ",
|
||||
undefined,
|
||||
[KnownLanguage.javascript, KnownLanguage.java],
|
||||
/Cannot specify a 'packs' input in a multi-language analysis/,
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationErrorMacro,
|
||||
"Packs input with no languages",
|
||||
" + a/b, c/d ",
|
||||
undefined,
|
||||
[],
|
||||
/No languages specified/,
|
||||
);
|
||||
|
||||
test(
|
||||
calculateAugmentationErrorMacro,
|
||||
"Invalid packs",
|
||||
" a-pack-without-a-scope ",
|
||||
undefined,
|
||||
[KnownLanguage.javascript],
|
||||
/"a-pack-without-a-scope" is not a valid pack/,
|
||||
);
|
||||
363
src/config/db-config.ts
Normal file
363
src/config/db-config.ts
Normal file
@@ -0,0 +1,363 @@
|
||||
import * as path from "path";
|
||||
|
||||
import * as semver from "semver";
|
||||
|
||||
import * as errorMessages from "../error-messages";
|
||||
import { Language } from "../languages";
|
||||
import { cloneObject, ConfigurationError, prettyPrintPack } from "../util";
|
||||
|
||||
export interface ExcludeQueryFilter {
|
||||
exclude: Record<string, string[] | string>;
|
||||
}
|
||||
|
||||
export interface IncludeQueryFilter {
|
||||
include: Record<string, string[] | string>;
|
||||
}
|
||||
|
||||
export type QueryFilter = ExcludeQueryFilter | IncludeQueryFilter;
|
||||
|
||||
/**
|
||||
* Format of the config file supplied by the user.
|
||||
*/
|
||||
export interface UserConfig {
|
||||
name?: string;
|
||||
"disable-default-queries"?: boolean;
|
||||
queries?: Array<{
|
||||
name?: string;
|
||||
uses: string;
|
||||
}>;
|
||||
"paths-ignore"?: string[];
|
||||
paths?: string[];
|
||||
|
||||
// If this is a multi-language analysis, then the packages must be split by
|
||||
// language. If this is a single language analysis, then no split by
|
||||
// language is necessary.
|
||||
packs?: Record<string, string[]> | string[];
|
||||
|
||||
// Set of query filters to include and exclude extra queries based on
|
||||
// codeql query suite `include` and `exclude` properties
|
||||
"query-filters"?: QueryFilter[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes how to augment the user config with inputs from the action.
|
||||
*
|
||||
* When running a CodeQL analysis, the user can supply a config file. When
|
||||
* running a CodeQL analysis from a GitHub action, the user can supply a
|
||||
* config file _and_ a set of inputs.
|
||||
*
|
||||
* The inputs from the action are used to augment the user config before
|
||||
* passing the user config to the CodeQL CLI invocation.
|
||||
*/
|
||||
export interface AugmentationProperties {
|
||||
/**
|
||||
* Whether or not the queries input combines with the queries in the config.
|
||||
*/
|
||||
queriesInputCombines: boolean;
|
||||
|
||||
/**
|
||||
* The queries input from the `with` block of the action declaration
|
||||
*/
|
||||
queriesInput?: Array<{ uses: string }>;
|
||||
|
||||
/**
|
||||
* Whether or not the packs input combines with the packs in the config.
|
||||
*/
|
||||
packsInputCombines: boolean;
|
||||
|
||||
/**
|
||||
* The packs input from the `with` block of the action declaration
|
||||
*/
|
||||
packsInput?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* The default, empty augmentation properties. This is most useful
|
||||
* for tests.
|
||||
*/
|
||||
export const defaultAugmentationProperties: AugmentationProperties = {
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
packsInput: undefined,
|
||||
queriesInput: undefined,
|
||||
};
|
||||
|
||||
/**
|
||||
* The convention in this action is that an input value that is prefixed with a '+' will
|
||||
* be combined with the corresponding value in the config file.
|
||||
*
|
||||
* Without a '+', an input value will override the corresponding value in the config file.
|
||||
*
|
||||
* @param inputValue The input value to process.
|
||||
* @returns true if the input value should replace the corresponding value in the config file,
|
||||
* false if it should be appended.
|
||||
*/
|
||||
function shouldCombine(inputValue?: string): boolean {
|
||||
return !!inputValue?.trim().startsWith("+");
|
||||
}
|
||||
|
||||
export type Packs = Partial<Record<Language, string[]>>;
|
||||
|
||||
export interface Pack {
|
||||
name: string;
|
||||
version?: string;
|
||||
path?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack names must be in the form of `scope/name`, with only alpha-numeric characters,
|
||||
* and `-` allowed as long as not the first or last char.
|
||||
**/
|
||||
const PACK_IDENTIFIER_PATTERN = (function () {
|
||||
const alphaNumeric = "[a-z0-9]";
|
||||
const alphaNumericDash = "[a-z0-9-]";
|
||||
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
|
||||
/**
|
||||
* Validates that this package specification is syntactically correct.
|
||||
* It may not point to any real package, but after this function returns
|
||||
* without throwing, we are guaranteed that the package specification
|
||||
* is roughly correct.
|
||||
*
|
||||
* The CLI itself will do a more thorough validation of the package
|
||||
* specification.
|
||||
*
|
||||
* A package specification looks like this:
|
||||
*
|
||||
* `scope/name@version:path`
|
||||
*
|
||||
* Version and path are optional.
|
||||
*
|
||||
* @param packStr the package specification to verify.
|
||||
* @param configFile Config file to use for error reporting
|
||||
*/
|
||||
export function parsePacksSpecification(packStr: string): Pack {
|
||||
if (typeof packStr !== "string") {
|
||||
throw new ConfigurationError(errorMessages.getPacksStrInvalid(packStr));
|
||||
}
|
||||
|
||||
packStr = packStr.trim();
|
||||
const atIndex = packStr.indexOf("@");
|
||||
const colonIndex = packStr.indexOf(":", atIndex);
|
||||
const packStart = 0;
|
||||
const versionStart = atIndex + 1 || undefined;
|
||||
const pathStart = colonIndex + 1 || undefined;
|
||||
const packEnd = Math.min(
|
||||
atIndex > 0 ? atIndex : Infinity,
|
||||
colonIndex > 0 ? colonIndex : Infinity,
|
||||
packStr.length,
|
||||
);
|
||||
const versionEnd = versionStart
|
||||
? Math.min(colonIndex > 0 ? colonIndex : Infinity, packStr.length)
|
||||
: undefined;
|
||||
const pathEnd = pathStart ? packStr.length : undefined;
|
||||
|
||||
const packName = packStr.slice(packStart, packEnd).trim();
|
||||
const version = versionStart
|
||||
? packStr.slice(versionStart, versionEnd).trim()
|
||||
: undefined;
|
||||
const packPath = pathStart
|
||||
? packStr.slice(pathStart, pathEnd).trim()
|
||||
: undefined;
|
||||
|
||||
if (!PACK_IDENTIFIER_PATTERN.test(packName)) {
|
||||
throw new ConfigurationError(errorMessages.getPacksStrInvalid(packStr));
|
||||
}
|
||||
if (version) {
|
||||
try {
|
||||
new semver.Range(version);
|
||||
} catch {
|
||||
// The range string is invalid. OK to ignore the caught error
|
||||
throw new ConfigurationError(errorMessages.getPacksStrInvalid(packStr));
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
packPath &&
|
||||
(path.isAbsolute(packPath) ||
|
||||
// Permit using "/" instead of "\" on Windows
|
||||
// Use `x.split(y).join(z)` as a polyfill for `x.replaceAll(y, z)` since
|
||||
// if we used a regex we'd need to escape the path separator on Windows
|
||||
// which seems more awkward.
|
||||
path.normalize(packPath).split(path.sep).join("/") !==
|
||||
packPath.split(path.sep).join("/"))
|
||||
) {
|
||||
throw new ConfigurationError(errorMessages.getPacksStrInvalid(packStr));
|
||||
}
|
||||
|
||||
if (!packPath && pathStart) {
|
||||
// 0 length path
|
||||
throw new ConfigurationError(errorMessages.getPacksStrInvalid(packStr));
|
||||
}
|
||||
|
||||
return {
|
||||
name: packName,
|
||||
version,
|
||||
path: packPath,
|
||||
};
|
||||
}
|
||||
|
||||
export function validatePackSpecification(pack: string) {
|
||||
return prettyPrintPack(parsePacksSpecification(pack));
|
||||
}
|
||||
|
||||
// Exported for testing
|
||||
export function parsePacksFromInput(
|
||||
rawPacksInput: string | undefined,
|
||||
languages: Language[],
|
||||
packsInputCombines: boolean,
|
||||
): Packs | undefined {
|
||||
if (!rawPacksInput?.trim()) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (languages.length > 1) {
|
||||
throw new ConfigurationError(
|
||||
"Cannot specify a 'packs' input in a multi-language analysis. Use a codeql-config.yml file instead and specify packs by language.",
|
||||
);
|
||||
} else if (languages.length === 0) {
|
||||
throw new ConfigurationError(
|
||||
"No languages specified. Cannot process the packs input.",
|
||||
);
|
||||
}
|
||||
|
||||
rawPacksInput = rawPacksInput.trim();
|
||||
if (packsInputCombines) {
|
||||
rawPacksInput = rawPacksInput.trim().substring(1).trim();
|
||||
if (!rawPacksInput) {
|
||||
throw new ConfigurationError(
|
||||
errorMessages.getConfigFilePropertyError(
|
||||
undefined,
|
||||
"packs",
|
||||
"A '+' was used in the 'packs' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs.",
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
[languages[0]]: rawPacksInput.split(",").reduce((packs, pack) => {
|
||||
packs.push(validatePackSpecification(pack));
|
||||
return packs;
|
||||
}, [] as string[]),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates how the codeql config file needs to be augmented before passing
|
||||
* it to the CLI. The reason this is necessary is the codeql-action can be called
|
||||
* with extra inputs from the workflow. These inputs are not part of the config
|
||||
* and the CLI does not know about these inputs so we need to inject them into
|
||||
* the config file sent to the CLI.
|
||||
*
|
||||
* @param rawPacksInput The packs input from the action configuration.
|
||||
* @param rawQueriesInput The queries input from the action configuration.
|
||||
* @param languages The languages that the config file is for. If the packs input
|
||||
* is non-empty, then there must be exactly one language. Otherwise, an
|
||||
* error is thrown.
|
||||
*
|
||||
* @returns The properties that need to be augmented in the config file.
|
||||
*
|
||||
* @throws An error if the packs input is non-empty and the languages input does
|
||||
* not have exactly one language.
|
||||
*/
|
||||
// exported for testing.
|
||||
export async function calculateAugmentation(
|
||||
rawPacksInput: string | undefined,
|
||||
rawQueriesInput: string | undefined,
|
||||
languages: Language[],
|
||||
): Promise<AugmentationProperties> {
|
||||
const packsInputCombines = shouldCombine(rawPacksInput);
|
||||
const packsInput = parsePacksFromInput(
|
||||
rawPacksInput,
|
||||
languages,
|
||||
packsInputCombines,
|
||||
);
|
||||
const queriesInputCombines = shouldCombine(rawQueriesInput);
|
||||
const queriesInput = parseQueriesFromInput(
|
||||
rawQueriesInput,
|
||||
queriesInputCombines,
|
||||
);
|
||||
|
||||
return {
|
||||
packsInputCombines,
|
||||
packsInput: packsInput?.[languages[0]],
|
||||
queriesInput,
|
||||
queriesInputCombines,
|
||||
};
|
||||
}
|
||||
|
||||
function parseQueriesFromInput(
|
||||
rawQueriesInput: string | undefined,
|
||||
queriesInputCombines: boolean,
|
||||
) {
|
||||
if (!rawQueriesInput) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const trimmedInput = queriesInputCombines
|
||||
? rawQueriesInput.trim().slice(1).trim()
|
||||
: (rawQueriesInput?.trim() ?? "");
|
||||
if (queriesInputCombines && trimmedInput.length === 0) {
|
||||
throw new ConfigurationError(
|
||||
errorMessages.getConfigFilePropertyError(
|
||||
undefined,
|
||||
"queries",
|
||||
"A '+' was used in the 'queries' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs.",
|
||||
),
|
||||
);
|
||||
}
|
||||
return trimmedInput.split(",").map((query) => ({ uses: query.trim() }));
|
||||
}
|
||||
|
||||
export function generateCodeScanningConfig(
|
||||
originalUserInput: UserConfig,
|
||||
augmentationProperties: AugmentationProperties,
|
||||
): UserConfig {
|
||||
// make a copy so we can modify it
|
||||
const augmentedConfig = cloneObject(originalUserInput);
|
||||
|
||||
// Inject the queries from the input
|
||||
if (augmentationProperties.queriesInput) {
|
||||
if (augmentationProperties.queriesInputCombines) {
|
||||
augmentedConfig.queries = (augmentedConfig.queries || []).concat(
|
||||
augmentationProperties.queriesInput,
|
||||
);
|
||||
} else {
|
||||
augmentedConfig.queries = augmentationProperties.queriesInput;
|
||||
}
|
||||
}
|
||||
if (augmentedConfig.queries?.length === 0) {
|
||||
delete augmentedConfig.queries;
|
||||
}
|
||||
|
||||
// Inject the packs from the input
|
||||
if (augmentationProperties.packsInput) {
|
||||
if (augmentationProperties.packsInputCombines) {
|
||||
// At this point, we already know that this is a single-language analysis
|
||||
if (Array.isArray(augmentedConfig.packs)) {
|
||||
augmentedConfig.packs = (augmentedConfig.packs || []).concat(
|
||||
augmentationProperties.packsInput,
|
||||
);
|
||||
} else if (!augmentedConfig.packs) {
|
||||
augmentedConfig.packs = augmentationProperties.packsInput;
|
||||
} else {
|
||||
// At this point, we know there is only one language.
|
||||
// If there were more than one language, an error would already have been thrown.
|
||||
const language = Object.keys(augmentedConfig.packs)[0];
|
||||
augmentedConfig.packs[language] = augmentedConfig.packs[
|
||||
language
|
||||
].concat(augmentationProperties.packsInput);
|
||||
}
|
||||
} else {
|
||||
augmentedConfig.packs = augmentationProperties.packsInput;
|
||||
}
|
||||
}
|
||||
if (Array.isArray(augmentedConfig.packs) && !augmentedConfig.packs.length) {
|
||||
delete augmentedConfig.packs;
|
||||
}
|
||||
|
||||
return augmentedConfig;
|
||||
}
|
||||
66
src/error-messages.ts
Normal file
66
src/error-messages.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
const PACKS_PROPERTY = "packs";
|
||||
|
||||
export function getConfigFileOutsideWorkspaceErrorMessage(
|
||||
configFile: string,
|
||||
): string {
|
||||
return `The configuration file "${configFile}" is outside of the workspace`;
|
||||
}
|
||||
|
||||
export function getConfigFileDoesNotExistErrorMessage(
|
||||
configFile: string,
|
||||
): string {
|
||||
return `The configuration file "${configFile}" does not exist`;
|
||||
}
|
||||
|
||||
export function getConfigFileRepoFormatInvalidMessage(
|
||||
configFile: string,
|
||||
): string {
|
||||
let error = `The configuration file "${configFile}" is not a supported remote file reference.`;
|
||||
error += " Expected format <owner>/<repository>/<file-path>@<ref>";
|
||||
|
||||
return error;
|
||||
}
|
||||
|
||||
export function getConfigFileFormatInvalidMessage(configFile: string): string {
|
||||
return `The configuration file "${configFile}" could not be read`;
|
||||
}
|
||||
|
||||
export function getConfigFileDirectoryGivenMessage(configFile: string): string {
|
||||
return `The configuration file "${configFile}" looks like a directory, not a file`;
|
||||
}
|
||||
|
||||
export function getConfigFilePropertyError(
|
||||
configFile: string | undefined,
|
||||
property: string,
|
||||
error: string,
|
||||
): string {
|
||||
if (configFile === undefined) {
|
||||
return `The workflow property "${property}" is invalid: ${error}`;
|
||||
} else {
|
||||
return `The configuration file "${configFile}" is invalid: property "${property}" ${error}`;
|
||||
}
|
||||
}
|
||||
|
||||
export function getPacksStrInvalid(
|
||||
packStr: string,
|
||||
configFile?: string,
|
||||
): string {
|
||||
return configFile
|
||||
? getConfigFilePropertyError(
|
||||
configFile,
|
||||
PACKS_PROPERTY,
|
||||
`"${packStr}" is not a valid pack`,
|
||||
)
|
||||
: `"${packStr}" is not a valid pack`;
|
||||
}
|
||||
|
||||
export function getNoLanguagesError(): string {
|
||||
return (
|
||||
"Did not detect any languages to analyze. " +
|
||||
"Please update input in workflow or check that GitHub detects the correct languages in your repository."
|
||||
);
|
||||
}
|
||||
|
||||
export function getUnknownLanguagesError(languages: string[]): string {
|
||||
return `Did not recognize the following languages: ${languages.join(", ")}`;
|
||||
}
|
||||
@@ -10,7 +10,11 @@ import { type CodeQL } from "./codeql";
|
||||
import { type Config } from "./config-utils";
|
||||
import { getCommitOid, getFileOidsUnderPath } from "./git-utils";
|
||||
import { Logger, withGroupAsync } from "./logging";
|
||||
import { isInTestMode, tryGetFolderBytes, withTimeout } from "./util";
|
||||
import {
|
||||
isInTestMode,
|
||||
tryGetFolderBytes,
|
||||
waitForResultWithTimeLimit,
|
||||
} from "./util";
|
||||
|
||||
export enum OverlayDatabaseMode {
|
||||
Overlay = "overlay",
|
||||
@@ -154,7 +158,12 @@ function computeChangedFiles(
|
||||
// Constants for database caching
|
||||
const CACHE_VERSION = 1;
|
||||
const CACHE_PREFIX = "codeql-overlay-base-database";
|
||||
const MAX_CACHE_OPERATION_MS = 120_000; // Two minutes
|
||||
|
||||
// The purpose of this ten-minute limit is to guard against the possibility
|
||||
// that the cache service is unresponsive, which would otherwise cause the
|
||||
// entire action to hang. Normally we expect cache operations to complete
|
||||
// within two minutes.
|
||||
const MAX_CACHE_OPERATION_MS = 600_000;
|
||||
|
||||
/**
|
||||
* Checks that the overlay-base database is valid by checking for the
|
||||
@@ -268,7 +277,7 @@ export async function uploadOverlayBaseDatabaseToCache(
|
||||
);
|
||||
|
||||
try {
|
||||
const cacheId = await withTimeout(
|
||||
const cacheId = await waitForResultWithTimeLimit(
|
||||
MAX_CACHE_OPERATION_MS,
|
||||
actionsCache.saveCache([dbLocation], cacheSaveKey),
|
||||
() => {},
|
||||
@@ -346,9 +355,39 @@ export async function downloadOverlayBaseDatabaseFromCache(
|
||||
let databaseDownloadDurationMs = 0;
|
||||
try {
|
||||
const databaseDownloadStart = performance.now();
|
||||
const foundKey = await withTimeout(
|
||||
const foundKey = await waitForResultWithTimeLimit(
|
||||
// This ten-minute limit for the cache restore operation is mainly to
|
||||
// guard against the possibility that the cache service is unresponsive
|
||||
// and hangs outside the data download.
|
||||
//
|
||||
// Data download (which is normally the most time-consuming part of the
|
||||
// restore operation) should not run long enough to hit this limit. Even
|
||||
// for an extremely large 10GB database, at a download speed of 40MB/s
|
||||
// (see below), the download should complete within five minutes. If we
|
||||
// do hit this limit, there are likely more serious problems other than
|
||||
// mere slow download speed.
|
||||
//
|
||||
// This is important because we don't want any ongoing file operations
|
||||
// on the database directory when we do hit this limit. Hitting this
|
||||
// time limit takes us to a fallback path where we re-initialize the
|
||||
// database from scratch at dbLocation, and having the cache restore
|
||||
// operation continue to write into dbLocation in the background would
|
||||
// really mess things up. We want to hit this limit only in the case
|
||||
// of a hung cache service, not just slow download speed.
|
||||
MAX_CACHE_OPERATION_MS,
|
||||
actionsCache.restoreCache([dbLocation], cacheRestoreKeyPrefix),
|
||||
actionsCache.restoreCache(
|
||||
[dbLocation],
|
||||
cacheRestoreKeyPrefix,
|
||||
undefined,
|
||||
{
|
||||
// Azure SDK download (which is the default) uses 128MB segments; see
|
||||
// https://github.com/actions/toolkit/blob/main/packages/cache/README.md.
|
||||
// Setting segmentTimeoutInMs to 3000 translates to segment download
|
||||
// speed of about 40 MB/s, which should be achievable unless the
|
||||
// download is unreliable (in which case we do want to abort).
|
||||
segmentTimeoutInMs: 3000,
|
||||
},
|
||||
),
|
||||
() => {
|
||||
logger.info("Timed out downloading overlay-base database from cache");
|
||||
},
|
||||
|
||||
@@ -33,8 +33,11 @@ export enum ToolsSource {
|
||||
}
|
||||
|
||||
export const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||
const CODEQL_NIGHTLIES_REPOSITORY_OWNER = "dsp-testing";
|
||||
const CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies";
|
||||
|
||||
const CODEQL_BUNDLE_VERSION_ALIAS: string[] = ["linked", "latest"];
|
||||
const CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"];
|
||||
|
||||
function getCodeQLBundleExtension(
|
||||
compressionMethod: tar.CompressionMethod,
|
||||
@@ -276,7 +279,7 @@ export async function getCodeQLSource(
|
||||
): Promise<CodeQLToolsSource> {
|
||||
if (
|
||||
toolsInput &&
|
||||
!CODEQL_BUNDLE_VERSION_ALIAS.includes(toolsInput) &&
|
||||
!isReservedToolsValue(toolsInput) &&
|
||||
!toolsInput.startsWith("http")
|
||||
) {
|
||||
logger.info(`Using CodeQL CLI from local path ${toolsInput}`);
|
||||
@@ -331,6 +334,16 @@ export async function getCodeQLSource(
|
||||
*/
|
||||
let url: string | undefined;
|
||||
|
||||
if (
|
||||
toolsInput !== undefined &&
|
||||
CODEQL_NIGHTLY_TOOLS_INPUTS.includes(toolsInput)
|
||||
) {
|
||||
logger.info(
|
||||
`Using the latest CodeQL CLI nightly, as requested by 'tools: ${toolsInput}'.`,
|
||||
);
|
||||
toolsInput = await getNightlyToolsUrl(logger);
|
||||
}
|
||||
|
||||
if (forceShippedTools) {
|
||||
cliVersion = defaults.cliVersion;
|
||||
tagName = defaults.bundleVersion;
|
||||
@@ -771,3 +784,46 @@ async function useZstdBundle(
|
||||
function getTempExtractionDir(tempDir: string) {
|
||||
return path.join(tempDir, uuidV4());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the URL of the latest nightly CodeQL bundle.
|
||||
*/
|
||||
async function getNightlyToolsUrl(logger: Logger) {
|
||||
const zstdAvailability = await tar.isZstdAvailable(logger);
|
||||
// The nightly is guaranteed to have a zstd bundle
|
||||
const compressionMethod = (await useZstdBundle(
|
||||
CODEQL_VERSION_ZSTD_BUNDLE,
|
||||
zstdAvailability.available,
|
||||
))
|
||||
? "zstd"
|
||||
: "gzip";
|
||||
|
||||
try {
|
||||
// Since nightlies are prereleases, we can't just download the latest release
|
||||
// on the repository. So instead we need to find the latest pre-release
|
||||
// version and construct the download URL from that.
|
||||
const release = await api.getApiClient().rest.repos.listReleases({
|
||||
owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER,
|
||||
repo: CODEQL_NIGHTLIES_REPOSITORY_NAME,
|
||||
per_page: 1,
|
||||
page: 1,
|
||||
prerelease: true,
|
||||
});
|
||||
const latestRelease = release.data[0];
|
||||
if (!latestRelease) {
|
||||
throw new Error("Could not find the latest nightly release.");
|
||||
}
|
||||
return `https://github.com/${CODEQL_NIGHTLIES_REPOSITORY_OWNER}/${CODEQL_NIGHTLIES_REPOSITORY_NAME}/releases/download/${latestRelease.tag_name}/${getCodeQLBundleName(compressionMethod)}`;
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Failed to retrieve the latest nightly release: ${util.wrapError(e)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function isReservedToolsValue(tools: string): boolean {
|
||||
return (
|
||||
CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) ||
|
||||
CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
getErrorMessage,
|
||||
isHTTPError,
|
||||
tryGetFolderBytes,
|
||||
withTimeout,
|
||||
waitForResultWithTimeLimit,
|
||||
} from "./util";
|
||||
|
||||
// This constant should be bumped if we make a breaking change
|
||||
@@ -96,7 +96,7 @@ export async function downloadTrapCaches(
|
||||
logger.info(
|
||||
`Looking in Actions cache for TRAP cache with key ${preferredKey}`,
|
||||
);
|
||||
const found = await withTimeout(
|
||||
const found = await waitForResultWithTimeLimit(
|
||||
MAX_CACHE_OPERATION_MS,
|
||||
actionsCache.restoreCache([cacheDir], preferredKey, [
|
||||
// Fall back to any cache with the right key prefix
|
||||
@@ -156,7 +156,7 @@ export async function uploadTrapCaches(
|
||||
process.env.GITHUB_SHA || "unknown",
|
||||
);
|
||||
logger.info(`Uploading TRAP cache to Actions cache with key ${key}`);
|
||||
await withTimeout(
|
||||
await waitForResultWithTimeLimit(
|
||||
MAX_CACHE_OPERATION_MS,
|
||||
actionsCache.saveCache([cacheDir], key),
|
||||
() => {
|
||||
|
||||
@@ -32,6 +32,55 @@ interface UploadSarifStatusReport
|
||||
extends StatusReportBase,
|
||||
upload_lib.UploadStatusReport {}
|
||||
|
||||
/**
|
||||
* Searches for SARIF files for the given `analysis` in the given `sarifPath`.
|
||||
* If any are found, then they are uploaded to the appropriate endpoint for the given `analysis`.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param features Information about FFs.
|
||||
* @param sarifPath The path to a SARIF file or directory containing SARIF files.
|
||||
* @param pathStats Information about `sarifPath`.
|
||||
* @param checkoutPath The checkout path.
|
||||
* @param analysis The configuration of the analysis we should upload SARIF files for.
|
||||
* @param category The SARIF category to use for the upload.
|
||||
* @returns The result of uploading the SARIF file(s) or `undefined` if there are none.
|
||||
*/
|
||||
async function findAndUpload(
|
||||
logger: Logger,
|
||||
features: Features,
|
||||
sarifPath: string,
|
||||
pathStats: fs.Stats,
|
||||
checkoutPath: string,
|
||||
analysis: analyses.AnalysisConfig,
|
||||
category?: string,
|
||||
): Promise<upload_lib.UploadResult | undefined> {
|
||||
let sarifFiles: string[] | undefined;
|
||||
|
||||
if (pathStats.isDirectory()) {
|
||||
sarifFiles = upload_lib.findSarifFilesInDir(
|
||||
sarifPath,
|
||||
analysis.sarifPredicate,
|
||||
);
|
||||
} else if (pathStats.isFile() && analysis.sarifPredicate(sarifPath)) {
|
||||
sarifFiles = [sarifPath];
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (sarifFiles.length !== 0) {
|
||||
return await upload_lib.uploadSpecifiedFiles(
|
||||
sarifFiles,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
analysis,
|
||||
);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async function sendSuccessStatusReport(
|
||||
startedAt: Date,
|
||||
uploadStats: upload_lib.UploadStatusReport,
|
||||
@@ -86,54 +135,71 @@ async function run() {
|
||||
}
|
||||
|
||||
try {
|
||||
// `sarifPath` can either be a path to a single file, or a path to a directory.
|
||||
const sarifPath = actionsUtil.getRequiredInput("sarif_file");
|
||||
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
|
||||
const category = actionsUtil.getOptionalInput("category");
|
||||
const pathStats = fs.lstatSync(sarifPath, { throwIfNoEntry: false });
|
||||
|
||||
const uploadResult = await upload_lib.uploadFiles(
|
||||
sarifPath,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
if (pathStats === undefined) {
|
||||
throw new ConfigurationError(`Path does not exist: ${sarifPath}.`);
|
||||
}
|
||||
|
||||
const sarifIds: Array<{ analysis: string; id: string }> = [];
|
||||
const uploadResult = await findAndUpload(
|
||||
logger,
|
||||
features,
|
||||
sarifPath,
|
||||
pathStats,
|
||||
checkoutPath,
|
||||
analyses.CodeScanning,
|
||||
category,
|
||||
);
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
if (uploadResult !== undefined) {
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
sarifIds.push({
|
||||
analysis: analyses.AnalysisKind.CodeScanning,
|
||||
id: uploadResult.sarifID,
|
||||
});
|
||||
}
|
||||
|
||||
// If there are `.quality.sarif` files in `sarifPath`, then upload those to the code quality service.
|
||||
// Code quality can currently only be enabled on top of security, so we'd currently always expect to
|
||||
// have a directory for the results here.
|
||||
if (fs.lstatSync(sarifPath).isDirectory()) {
|
||||
const qualitySarifFiles = upload_lib.findSarifFilesInDir(
|
||||
sarifPath,
|
||||
analyses.CodeQuality.sarifPredicate,
|
||||
);
|
||||
|
||||
if (qualitySarifFiles.length !== 0) {
|
||||
await upload_lib.uploadSpecifiedFiles(
|
||||
qualitySarifFiles,
|
||||
checkoutPath,
|
||||
actionsUtil.fixCodeQualityCategory(logger, category),
|
||||
features,
|
||||
logger,
|
||||
analyses.CodeQuality,
|
||||
);
|
||||
}
|
||||
const qualityUploadResult = await findAndUpload(
|
||||
logger,
|
||||
features,
|
||||
sarifPath,
|
||||
pathStats,
|
||||
checkoutPath,
|
||||
analyses.CodeQuality,
|
||||
actionsUtil.fixCodeQualityCategory(logger, category),
|
||||
);
|
||||
if (qualityUploadResult !== undefined) {
|
||||
sarifIds.push({
|
||||
analysis: analyses.AnalysisKind.CodeQuality,
|
||||
id: qualityUploadResult.sarifID,
|
||||
});
|
||||
}
|
||||
core.setOutput("sarif-ids", JSON.stringify(sarifIds));
|
||||
|
||||
// We don't upload results in test mode, so don't wait for processing
|
||||
if (isInTestMode()) {
|
||||
core.debug("In test mode. Waiting for processing is disabled.");
|
||||
} else if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
await upload_lib.waitForProcessing(
|
||||
getRepositoryNwo(),
|
||||
uploadResult.sarifID,
|
||||
logger,
|
||||
);
|
||||
if (uploadResult !== undefined) {
|
||||
await upload_lib.waitForProcessing(
|
||||
getRepositoryNwo(),
|
||||
uploadResult.sarifID,
|
||||
logger,
|
||||
);
|
||||
}
|
||||
// The code quality service does not currently have an endpoint to wait for SARIF processing,
|
||||
// so we can't wait for that here.
|
||||
}
|
||||
await sendSuccessStatusReport(startedAt, uploadResult.statusReport, logger);
|
||||
await sendSuccessStatusReport(
|
||||
startedAt,
|
||||
uploadResult?.statusReport || {},
|
||||
logger,
|
||||
);
|
||||
} catch (unwrappedError) {
|
||||
const error =
|
||||
isThirdPartyAnalysis(ActionName.UploadSarif) &&
|
||||
|
||||
@@ -297,7 +297,7 @@ test("listFolder", async (t) => {
|
||||
const longTime = 999_999;
|
||||
const shortTime = 10;
|
||||
|
||||
test("withTimeout on long task", async (t) => {
|
||||
test("waitForResultWithTimeLimit on long task", async (t) => {
|
||||
let longTaskTimedOut = false;
|
||||
const longTask = new Promise((resolve) => {
|
||||
const timer = setTimeout(() => {
|
||||
@@ -305,35 +305,43 @@ test("withTimeout on long task", async (t) => {
|
||||
}, longTime);
|
||||
t.teardown(() => clearTimeout(timer));
|
||||
});
|
||||
const result = await util.withTimeout(shortTime, longTask, () => {
|
||||
longTaskTimedOut = true;
|
||||
});
|
||||
const result = await util.waitForResultWithTimeLimit(
|
||||
shortTime,
|
||||
longTask,
|
||||
() => {
|
||||
longTaskTimedOut = true;
|
||||
},
|
||||
);
|
||||
t.deepEqual(longTaskTimedOut, true);
|
||||
t.deepEqual(result, undefined);
|
||||
});
|
||||
|
||||
test("withTimeout on short task", async (t) => {
|
||||
test("waitForResultWithTimeLimit on short task", async (t) => {
|
||||
let shortTaskTimedOut = false;
|
||||
const shortTask = new Promise((resolve) => {
|
||||
setTimeout(() => {
|
||||
resolve(99);
|
||||
}, shortTime);
|
||||
});
|
||||
const result = await util.withTimeout(longTime, shortTask, () => {
|
||||
shortTaskTimedOut = true;
|
||||
});
|
||||
const result = await util.waitForResultWithTimeLimit(
|
||||
longTime,
|
||||
shortTask,
|
||||
() => {
|
||||
shortTaskTimedOut = true;
|
||||
},
|
||||
);
|
||||
t.deepEqual(shortTaskTimedOut, false);
|
||||
t.deepEqual(result, 99);
|
||||
});
|
||||
|
||||
test("withTimeout doesn't call callback if promise resolves", async (t) => {
|
||||
test("waitForResultWithTimeLimit doesn't call callback if promise resolves", async (t) => {
|
||||
let shortTaskTimedOut = false;
|
||||
const shortTask = new Promise((resolve) => {
|
||||
setTimeout(() => {
|
||||
resolve(99);
|
||||
}, shortTime);
|
||||
});
|
||||
const result = await util.withTimeout(100, shortTask, () => {
|
||||
const result = await util.waitForResultWithTimeLimit(100, shortTask, () => {
|
||||
shortTaskTimedOut = true;
|
||||
});
|
||||
await new Promise((r) => setTimeout(r, 200));
|
||||
|
||||
@@ -864,7 +864,7 @@ let hadTimeout = false;
|
||||
* @param onTimeout A callback to call if the promise times out.
|
||||
* @returns The result of the promise, or undefined if the promise times out.
|
||||
*/
|
||||
export async function withTimeout<T>(
|
||||
export async function waitForResultWithTimeLimit<T>(
|
||||
timeoutMs: number,
|
||||
promise: Promise<T>,
|
||||
onTimeout: () => void,
|
||||
@@ -894,7 +894,7 @@ export async function withTimeout<T>(
|
||||
* Check if the global hadTimeout variable has been set, and if so then
|
||||
* exit the process to ensure any background tasks that are still running
|
||||
* are killed. This should be called at the end of execution if the
|
||||
* `withTimeout` function has been used.
|
||||
* `waitForResultWithTimeLimit` function has been used.
|
||||
*/
|
||||
export async function checkForTimeout() {
|
||||
if (hadTimeout === true) {
|
||||
|
||||
@@ -14,7 +14,7 @@ inputs:
|
||||
required: false
|
||||
default: ${{ github.workspace }}
|
||||
ref:
|
||||
description: "The ref where results will be uploaded. If not provided, the Action will use the GITHUB_REF environment variable. If provided, the sha input must be provided as well. This input is ignored for pull requests from forks."
|
||||
description: "The ref where results will be uploaded. If not provided, the Action will use the GITHUB_REF environment variable. If provided, the sha input must be provided as well. This input is ignored for pull requests from forks. Expected format: refs/heads/<branch name>, refs/tags/<tag>, refs/pull/<number>/merge, or refs/pull/<number>/head."
|
||||
required: false
|
||||
sha:
|
||||
description: "The sha of the HEAD of the ref where results will be uploaded. If not provided, the Action will use the GITHUB_SHA environment variable. If provided, the ref input must be provided as well. This input is ignored for pull requests from forks."
|
||||
@@ -34,7 +34,12 @@ inputs:
|
||||
default: "true"
|
||||
outputs:
|
||||
sarif-id:
|
||||
description: The ID of the uploaded SARIF file.
|
||||
description: The ID of the uploaded Code Scanning SARIF file, if any.
|
||||
sarif-ids:
|
||||
description: |
|
||||
A stringified JSON object containing the SARIF ID for each kind of analysis. For example:
|
||||
|
||||
{ "code-scanning": "some-id", "code-quality": "some-other-id" }
|
||||
runs:
|
||||
using: node20
|
||||
main: '../lib/upload-sarif-action.js'
|
||||
|
||||
Reference in New Issue
Block a user