mirror of
https://github.com/github/codeql-action.git
synced 2025-12-17 21:09:40 +08:00
Compare commits
75 Commits
codeql-bun
...
codeql-bun
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cdcdbb5797 | ||
|
|
8b0f2cf9da | ||
|
|
a35a881b65 | ||
|
|
d8667207b6 | ||
|
|
926a4898bc | ||
|
|
5c63cc5b1c | ||
|
|
30a3b9a904 | ||
|
|
83f0fe6c49 | ||
|
|
5c8f4be0e9 | ||
|
|
96f2840282 | ||
|
|
dfc31c9995 | ||
|
|
019a40b91a | ||
|
|
ae005db7f8 | ||
|
|
89c4c9e65c | ||
|
|
26f16a5e63 | ||
|
|
955f8596ae | ||
|
|
e7cff66ce1 | ||
|
|
bf419682de | ||
|
|
afdba76326 | ||
|
|
07e43a2208 | ||
|
|
9632771630 | ||
|
|
9d2dd7cfea | ||
|
|
d427c89ed7 | ||
|
|
125ff5530c | ||
|
|
86ead5e019 | ||
|
|
eb1c7a3887 | ||
|
|
6bd8101752 | ||
|
|
2408985f4e | ||
|
|
f8b1cb6997 | ||
|
|
2d031a36d6 | ||
|
|
1ba7713018 | ||
|
|
339e0d5afb | ||
|
|
0225834cc5 | ||
|
|
15f9b00614 | ||
|
|
ff82fd0736 | ||
|
|
d3314cca22 | ||
|
|
42add7b4d7 | ||
|
|
9c5706e1a2 | ||
|
|
3912995667 | ||
|
|
8d7f61b8f2 | ||
|
|
50bc388cfc | ||
|
|
4a409ace8f | ||
|
|
41499f5466 | ||
|
|
1023a086ae | ||
|
|
cc5f2fb439 | ||
|
|
789f65c9ee | ||
|
|
a5879b7b6e | ||
|
|
3da4cbfc79 | ||
|
|
5f061ca665 | ||
|
|
11ea309db5 | ||
|
|
1319d54f85 | ||
|
|
59d27da76b | ||
|
|
f0e3dfb303 | ||
|
|
dba4f66682 | ||
|
|
8f9b20ba50 | ||
|
|
0d65621757 | ||
|
|
c3ae9dcd15 | ||
|
|
570734c55c | ||
|
|
65920dd33a | ||
|
|
60f5c59630 | ||
|
|
0962265901 | ||
|
|
143b5fb429 | ||
|
|
8c923c00a3 | ||
|
|
34e8e09ae4 | ||
|
|
4f41ff7fc8 | ||
|
|
636b9eab1d | ||
|
|
153cab09da | ||
|
|
dddabd0d26 | ||
|
|
3100e1e354 | ||
|
|
6e92b190d0 | ||
|
|
292bb7c0b9 | ||
|
|
eac5e24aee | ||
|
|
8065746a2a | ||
|
|
abb267d186 | ||
|
|
9953504776 |
21
.github/actions/setup-swift/action.yml
vendored
21
.github/actions/setup-swift/action.yml
vendored
@@ -1,5 +1,5 @@
|
||||
name: "Set up Swift"
|
||||
description: Sets up an appropriate Swift version if Swift is enabled via CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT.
|
||||
description: Sets up an appropriate Swift version if supported on this platform.
|
||||
inputs:
|
||||
codeql-path:
|
||||
description: Path to the CodeQL CLI executable.
|
||||
@@ -9,24 +9,29 @@ runs:
|
||||
steps:
|
||||
- name: Get Swift version
|
||||
id: get_swift_version
|
||||
if: env.CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT == 'true'
|
||||
if: runner.os != 'Windows'
|
||||
shell: bash
|
||||
env:
|
||||
CODEQL_PATH: ${{ inputs.codeql-path }}
|
||||
run: |
|
||||
if [ $RUNNER_OS = "macOS" ]; then
|
||||
if [[ $RUNNER_OS = "macOS" ]]; then
|
||||
PLATFORM="osx64"
|
||||
else # We do not run this step on Windows.
|
||||
PLATFORM="linux64"
|
||||
fi
|
||||
SWIFT_EXTRACTOR_DIR="$("$CODEQL_PATH" resolve languages --format json | jq -r '.swift[0]')"
|
||||
VERSION="$("$SWIFT_EXTRACTOR_DIR/tools/$PLATFORM/extractor" --version | awk '/version/ { print $3 }')"
|
||||
# Specify 5.7.0, otherwise setup Action will default to latest minor version.
|
||||
if [ $VERSION = "5.7" ]; then
|
||||
VERSION="5.7.0"
|
||||
if [ $SWIFT_EXTRACTOR_DIR = "null" ]; then
|
||||
VERSION="null"
|
||||
else
|
||||
VERSION="$("$SWIFT_EXTRACTOR_DIR/tools/$PLATFORM/extractor" --version | awk '/version/ { print $3 }')"
|
||||
# Specify 5.7.0, otherwise setup Action will default to latest minor version.
|
||||
if [ $VERSION = "5.7" ]; then
|
||||
VERSION="5.7.0"
|
||||
fi
|
||||
fi
|
||||
echo "version=$VERSION" | tee -a $GITHUB_OUTPUT
|
||||
|
||||
- uses: swift-actions/setup-swift@65540b95f51493d65f5e59e97dcef9629ddf11bf # Please update the corresponding SHA in the CLI's CodeQL Action Integration Test.
|
||||
if: env.CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT == 'true'
|
||||
if: runner.os != 'Windows' && steps.get_swift_version.outputs.version != 'null'
|
||||
with:
|
||||
swift-version: "${{ steps.get_swift_version.outputs.version }}"
|
||||
|
||||
10
.github/workflows/__analyze-ref-input.yml
generated
vendored
10
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: "PR Check - Analyze: 'ref' and 'sha' from inputs"
|
||||
@@ -68,6 +68,9 @@ jobs:
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: "Analyze: 'ref' and 'sha' from inputs"
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -82,10 +85,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__autobuild-action.yml
generated
vendored
10
.github/workflows/__autobuild-action.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - autobuild-action
|
||||
@@ -32,6 +32,9 @@ jobs:
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
name: autobuild-action
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -46,10 +49,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__config-export.yml
generated
vendored
10
.github/workflows/__config-export.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Config export
|
||||
@@ -38,6 +38,9 @@ jobs:
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: Config export
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -52,10 +55,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__diagnostics-export.yml
generated
vendored
10
.github/workflows/__diagnostics-export.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Diagnostic export
|
||||
@@ -44,6 +44,9 @@ jobs:
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: Diagnostic export
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -58,10 +61,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__export-file-baseline-information.yml
generated
vendored
10
.github/workflows/__export-file-baseline-information.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Export file baseline information
|
||||
@@ -32,6 +32,9 @@ jobs:
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: Export file baseline information
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -46,10 +49,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__extractor-ram-threads.yml
generated
vendored
10
.github/workflows/__extractor-ram-threads.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Extractor ram and threads options test
|
||||
@@ -28,6 +28,9 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
name: Extractor ram and threads options test
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -42,10 +45,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__go-custom-queries.yml
generated
vendored
10
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Go: Custom queries'
|
||||
@@ -68,6 +68,9 @@ jobs:
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Go: Custom queries'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -82,10 +85,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
10
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Go: tracing with autobuilder step'
|
||||
@@ -54,6 +54,9 @@ jobs:
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: 'Go: tracing with autobuilder step'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -68,10 +71,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
10
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Go: tracing with custom build steps'
|
||||
@@ -54,6 +54,9 @@ jobs:
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: 'Go: tracing with custom build steps'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -68,10 +71,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
10
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Go: tracing with legacy workflow'
|
||||
@@ -54,6 +54,9 @@ jobs:
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: 'Go: tracing with legacy workflow'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -68,10 +71,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
15
.github/workflows/__init-with-registries.yml
generated
vendored
15
.github/workflows/__init-with-registries.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Packaging: Download using registries'
|
||||
@@ -44,6 +44,10 @@ jobs:
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Download using registries'
|
||||
permissions:
|
||||
contents: read
|
||||
packages: read
|
||||
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -58,10 +62,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
@@ -128,9 +129,5 @@ jobs:
|
||||
cat $QLCONFIG_PATH
|
||||
exit 1
|
||||
fi
|
||||
permissions:
|
||||
contents: read
|
||||
packages: read
|
||||
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
10
.github/workflows/__javascript-source-root.yml
generated
vendored
10
.github/workflows/__javascript-source-root.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Custom source root
|
||||
@@ -32,6 +32,9 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Custom source root
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -46,10 +49,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__ml-powered-queries.yml
generated
vendored
10
.github/workflows/__ml-powered-queries.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - ML-powered queries
|
||||
@@ -68,6 +68,9 @@ jobs:
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: ML-powered queries
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -82,10 +85,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
21
.github/workflows/__multi-language-autodetect.yml
generated
vendored
21
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Multi-language repository
|
||||
@@ -54,6 +54,9 @@ jobs:
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Multi-language repository
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -68,10 +71,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
with:
|
||||
upload-database: false
|
||||
|
||||
- name: Check language autodetect for all languages excluding Ruby, Swift
|
||||
- name: Check language autodetect for all languages excluding Swift
|
||||
shell: bash
|
||||
run: |
|
||||
CPP_DB=${{ fromJson(steps.analysis.outputs.db-locations).cpp }}
|
||||
@@ -127,11 +127,6 @@ jobs:
|
||||
echo "Did not create a database for Python, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check language autodetect for Ruby
|
||||
if: env.CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
RUBY_DB=${{ fromJson(steps.analysis.outputs.db-locations).ruby }}
|
||||
if [[ ! -d $RUBY_DB ]] || [[ ! $RUBY_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Ruby, or created it in the wrong location."
|
||||
@@ -139,7 +134,9 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Check language autodetect for Swift
|
||||
if: env.CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT == 'true'
|
||||
if: >-
|
||||
env.CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT == 'true' ||
|
||||
(runner.os != 'Windows' && matrix.version == 'nightly-latest')
|
||||
shell: bash
|
||||
run: |
|
||||
SWIFT_DB=${{ fromJson(steps.analysis.outputs.db-locations).swift }}
|
||||
|
||||
10
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
10
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Packaging: Config and input passed to the CLI'
|
||||
@@ -44,6 +44,9 @@ jobs:
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Config and input passed to the CLI'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -58,10 +61,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
10
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Packaging: Config and input'
|
||||
@@ -44,6 +44,9 @@ jobs:
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Config and input'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -58,10 +61,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__packaging-config-js.yml
generated
vendored
10
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Packaging: Config file'
|
||||
@@ -44,6 +44,9 @@ jobs:
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Config file'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -58,10 +61,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__packaging-inputs-js.yml
generated
vendored
10
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Packaging: Action input'
|
||||
@@ -44,6 +44,9 @@ jobs:
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Action input'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -58,10 +61,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__remote-config.yml
generated
vendored
10
.github/workflows/__remote-config.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Remote config file
|
||||
@@ -68,6 +68,9 @@ jobs:
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: Remote config file
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -82,10 +85,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__rubocop-multi-language.yml
generated
vendored
10
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - RuboCop multi-language
|
||||
@@ -28,6 +28,9 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
name: RuboCop multi-language
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -42,10 +45,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__ruby.yml
generated
vendored
10
.github/workflows/__ruby.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Ruby analysis
|
||||
@@ -38,6 +38,9 @@ jobs:
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Ruby analysis
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -52,10 +55,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__split-workflow.yml
generated
vendored
10
.github/workflows/__split-workflow.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Split workflow
|
||||
@@ -38,6 +38,9 @@ jobs:
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Split workflow
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -52,10 +55,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__submit-sarif-failure.yml
generated
vendored
10
.github/workflows/__submit-sarif-failure.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Submit SARIF after failure
|
||||
@@ -32,6 +32,9 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Submit SARIF after failure
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -46,10 +49,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__swift-custom-build.yml
generated
vendored
10
.github/workflows/__swift-custom-build.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Swift analysis using a custom build command
|
||||
@@ -38,6 +38,9 @@ jobs:
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Swift analysis using a custom build command
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -52,10 +55,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
10
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Autobuild working directory
|
||||
@@ -28,6 +28,9 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
name: Autobuild working directory
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -42,10 +45,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__test-local-codeql.yml
generated
vendored
10
.github/workflows/__test-local-codeql.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Local CodeQL bundle
|
||||
@@ -28,6 +28,9 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Local CodeQL bundle
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -42,10 +45,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__test-proxy.yml
generated
vendored
10
.github/workflows/__test-proxy.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Proxy test
|
||||
@@ -28,6 +28,9 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
name: Proxy test
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -42,10 +45,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__unset-environment.yml
generated
vendored
10
.github/workflows/__unset-environment.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Test unsetting environment variables
|
||||
@@ -40,6 +40,9 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Test unsetting environment variables
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -54,10 +57,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
10
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: "PR Check - Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
@@ -68,6 +68,9 @@ jobs:
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: "Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -82,10 +85,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
10
.github/workflows/__with-checkout-path.yml
generated
vendored
10
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Use a custom `checkout_path`
|
||||
@@ -68,6 +68,9 @@ jobs:
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: Use a custom `checkout_path`
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -82,10 +85,7 @@ jobs:
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
|
||||
4
.github/workflows/debug-artifacts.yml
vendored
4
.github/workflows/debug-artifacts.yml
vendored
@@ -49,11 +49,15 @@ jobs:
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
debug: true
|
||||
debug-artifact-name: my-debug-artifacts
|
||||
debug-database-name: my-db
|
||||
- uses: ./../action/.github/actions/setup-swift
|
||||
with:
|
||||
codeql-path: ${{ steps.init.outputs.codeql-path }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
|
||||
2
.github/workflows/post-release-mergeback.yml
vendored
2
.github/workflows/post-release-mergeback.yml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
|
||||
- name: Update git config
|
||||
run: |
|
||||
git config --global user.email "github-actions@github.com"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
- name: Get version and new branch
|
||||
|
||||
6
.github/workflows/python-deps.yml
vendored
6
.github/workflows/python-deps.yml
vendored
@@ -72,7 +72,7 @@ jobs:
|
||||
|
||||
- name: Verify packages installed
|
||||
run: |
|
||||
$GITHUB_WORKSPACE/python-setup/tests/check_requests_2_26_0.sh ${PYTHON_VERSION}
|
||||
$GITHUB_WORKSPACE/python-setup/tests/check_requests.sh ${PYTHON_VERSION} 2.31.0
|
||||
|
||||
# This one shouldn't fail, but also won't install packages
|
||||
test-setup-python-scripts-non-standard-location:
|
||||
@@ -170,5 +170,5 @@ jobs:
|
||||
|
||||
- name: Verify packages installed
|
||||
run: |
|
||||
$cmd = $Env:GITHUB_WORKSPACE + "\\python-setup\\tests\\check_requests_2_26_0.ps1"
|
||||
powershell -File $cmd $Env:PYTHON_VERSION
|
||||
$cmd = $Env:GITHUB_WORKSPACE + "\\python-setup\\tests\\check_requests.ps1"
|
||||
powershell -File $cmd $Env:PYTHON_VERSION 2.31.0
|
||||
|
||||
2
.github/workflows/update-bundle.yml
vendored
2
.github/workflows/update-bundle.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
|
||||
- name: Update git config
|
||||
run: |
|
||||
git config --global user.email "github-actions@github.com"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
- name: Update bundle
|
||||
|
||||
2
.github/workflows/update-dependencies.yml
vendored
2
.github/workflows/update-dependencies.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
git checkout "origin/$BRANCH"
|
||||
.github/workflows/script/update-node-modules.sh update
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
git config --global user.email "github-actions@github.com"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git add node_modules
|
||||
git commit -am "Update checked-in dependencies"
|
||||
|
||||
2
.github/workflows/update-release-branch.yml
vendored
2
.github/workflows/update-release-branch.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
|
||||
- name: Update git config
|
||||
run: |
|
||||
git config --global user.email "github-actions@github.com"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
- name: Update release branch
|
||||
|
||||
@@ -35,14 +35,22 @@ jobs:
|
||||
npm run build
|
||||
env:
|
||||
ENTERPRISE_RELEASES_PATH: ${{ github.workspace }}/enterprise-releases/
|
||||
- name: Commit Changes
|
||||
uses: peter-evans/create-pull-request@284f54f989303d2699d373481a0cfa13ad5a6666 # v5.0.1
|
||||
with:
|
||||
commit-message: Update supported GitHub Enterprise Server versions.
|
||||
title: Update supported GitHub Enterprise Server versions.
|
||||
body: ""
|
||||
author: GitHub <noreply@github.com>
|
||||
branch: update-supported-enterprise-server-versions
|
||||
draft: true
|
||||
|
||||
- name: Update git config
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
- name: Commit changes and open PR
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
if [[ -z $(git status --porcelain) ]]; then
|
||||
echo "No changes to commit"
|
||||
else
|
||||
git checkout -b update-supported-enterprise-server-versions
|
||||
git add .
|
||||
git commit --message "Update supported GitHub Enterprise Server versions"
|
||||
git push origin update-supported-enterprise-server-versions
|
||||
gh pr create --fill --draft
|
||||
fi
|
||||
|
||||
@@ -35,7 +35,10 @@ def main():
|
||||
|
||||
if oldest_supported_release is None or release_version < oldest_supported_release:
|
||||
end_of_life_date = datetime.date.fromisoformat(release_data["end"])
|
||||
if end_of_life_date > datetime.date.today():
|
||||
# The GHES version is not actually end of life until the end of the day specified by
|
||||
# `end_of_life_date`. Wait an extra week to be safe.
|
||||
is_end_of_life = datetime.date.today() > end_of_life_date + datetime.timedelta(weeks=1)
|
||||
if not is_end_of_life:
|
||||
oldest_supported_release = release_version
|
||||
|
||||
api_compatibility_data = {
|
||||
|
||||
17
CHANGELOG.md
17
CHANGELOG.md
@@ -2,8 +2,22 @@
|
||||
|
||||
## [UNRELEASED]
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 2.3.6 - 01 Jun 2023
|
||||
|
||||
- Update default CodeQL bundle version to 2.13.3. [#1698](https://github.com/github/codeql-action/pull/1698)
|
||||
|
||||
## 2.3.5 - 25 May 2023
|
||||
|
||||
- Allow invalid URIs to be used as values to `artifactLocation.uri` properties. This reverses a change from [#1668](https://github.com/github/codeql-action/pull/1668) that inadvertently led to stricter validation of some URI values. [#1705](https://github.com/github/codeql-action/pull/1705)
|
||||
- Gracefully handle invalid URIs when fingerprinting. [#1694](https://github.com/github/codeql-action/pull/1694)
|
||||
|
||||
## 2.3.4 - 24 May 2023
|
||||
|
||||
- Updated the SARIF 2.1.0 JSON schema file to the latest from [oasis-tcs/sarif-spec](https://github.com/oasis-tcs/sarif-spec/blob/123e95847b13fbdd4cbe2120fa5e33355d4a042b/Schemata/sarif-schema-2.1.0.json). [#1668](https://github.com/github/codeql-action/pull/1668)
|
||||
- We are rolling out a feature in May 2023 that will disable Python dependency installation for new users of the CodeQL Action. This improves the speed of analysis while having only a very minor impact on results. [#1676](https://github.com/github/codeql-action/pull/1676)
|
||||
- We are improving the way that [CodeQL bundles](https://github.com/github/codeql-action/releases) are tagged to make it possible to easily identify bundles by their CodeQL semantic version.
|
||||
- We are improving the way that [CodeQL bundles](https://github.com/github/codeql-action/releases) are tagged to make it possible to easily identify bundles by their CodeQL semantic version. [#1682](https://github.com/github/codeql-action/pull/1682)
|
||||
- As of CodeQL CLI 2.13.4, CodeQL bundles will be tagged using semantic versions, for example `codeql-bundle-v2.13.4`, instead of timestamps, like `codeql-bundle-20230615`.
|
||||
- This change does not affect the majority of workflows, and we will not be changing tags for existing bundle releases.
|
||||
- Some workflows with custom logic that depends on the specific format of the CodeQL bundle tag may need to be updated. For example, if your workflow matches CodeQL bundle tag names against a `codeql-bundle-yyyymmdd` pattern, you should update it to also recognize `codeql-bundle-vx.y.z` tags.
|
||||
@@ -13,7 +27,6 @@
|
||||
|
||||
- Update default CodeQL bundle version to 2.13.1. [#1664](https://github.com/github/codeql-action/pull/1664)
|
||||
- You can now configure CodeQL within your code scanning workflow by passing a `config` input to the `init` Action. See [Using a custom configuration file](https://aka.ms/code-scanning-docs/config-file) for more information about configuring code scanning. [#1590](https://github.com/github/codeql-action/pull/1590)
|
||||
- Updated the SARIF 2.1.0 JSON schema file to the latest from [oasis-tcs/sarif-spec](https://github.com/oasis-tcs/sarif-spec/blob/123e95847b13fbdd4cbe2120fa5e33355d4a042b/Schemata/sarif-schema-2.1.0.json). [#1668](https://github.com/github/codeql-action/pull/1668)
|
||||
|
||||
## 2.3.2 - 27 Apr 2023
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ Please note that this project is released with a [Contributor Code of Conduct][c
|
||||
|
||||
## Development and Testing
|
||||
|
||||
Before you start, ensure that you have a recent version of node (14 or higher) installed, along with a recent version of npm (7 or higher). You can see which version of node is used by the action in `init/action.yml`.
|
||||
Before you start, ensure that you have a recent version of node (16 or higher) installed, along with a recent version of npm (9.2 or higher). You can see which version of node is used by the action in `init/action.yml`.
|
||||
|
||||
### Common tasks
|
||||
|
||||
|
||||
@@ -170,3 +170,7 @@ You can use Actions or environment variables to share configuration across multi
|
||||
## Troubleshooting
|
||||
|
||||
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
|
||||
|
||||
## Contributing
|
||||
|
||||
This project welcomes contributions. See [CONTRIBUTING.md](CONTRIBUTING.md) for details on how to build, install, and contribute.
|
||||
|
||||
71
lib/actions-util.js
generated
71
lib/actions-util.js
generated
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getUploadValue = exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.workflowEventName = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionVersion = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.getUploadValue = exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.getWorkflowEventName = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionVersion = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
@@ -104,7 +104,7 @@ exports.getCommitOid = getCommitOid;
|
||||
* Returns undefined if run by other triggers or the merge base cannot be determined.
|
||||
*/
|
||||
const determineMergeBaseCommitOid = async function () {
|
||||
if (workflowEventName() !== "pull_request") {
|
||||
if (getWorkflowEventName() !== "pull_request") {
|
||||
return undefined;
|
||||
}
|
||||
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
@@ -155,7 +155,7 @@ exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
|
||||
*
|
||||
* This will combine the workflow path and current job name.
|
||||
* Computing this the first time requires making requests to
|
||||
* the github API, but after that the result will be cached.
|
||||
* the GitHub API, but after that the result will be cached.
|
||||
*/
|
||||
async function getAnalysisKey() {
|
||||
const analysisKeyEnvVar = "CODEQL_ACTION_ANALYSIS_KEY";
|
||||
@@ -395,7 +395,8 @@ async function sendStatusReport(statusReport) {
|
||||
if ((0, util_1.isHTTPError)(e)) {
|
||||
switch (e.status) {
|
||||
case 403:
|
||||
if (workflowIsTriggeredByPushEvent() && isDependabotActor()) {
|
||||
if (getWorkflowEventName() === "push" &&
|
||||
process.env["GITHUB_ACTOR"] === "dependabot[bot]") {
|
||||
core.setFailed('Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
||||
"Uploading Code Scanning results requires write access. " +
|
||||
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
||||
@@ -428,42 +429,36 @@ async function sendStatusReport(statusReport) {
|
||||
}
|
||||
}
|
||||
exports.sendStatusReport = sendStatusReport;
|
||||
function workflowEventName() {
|
||||
// If the original event is dynamic CODESCANNING_EVENT_NAME will contain the right info (push/pull_request)
|
||||
if (process.env["GITHUB_EVENT_NAME"] === "dynamic") {
|
||||
const value = process.env["CODESCANNING_EVENT_NAME"];
|
||||
if (value === undefined || value.length === 0) {
|
||||
return process.env["GITHUB_EVENT_NAME"];
|
||||
}
|
||||
return value;
|
||||
}
|
||||
return process.env["GITHUB_EVENT_NAME"];
|
||||
/**
|
||||
* Returns the name of the event that triggered this workflow.
|
||||
*
|
||||
* This will be "dynamic" for default setup workflow runs.
|
||||
*/
|
||||
function getWorkflowEventName() {
|
||||
return (0, util_1.getRequiredEnvParam)("GITHUB_EVENT_NAME");
|
||||
}
|
||||
exports.workflowEventName = workflowEventName;
|
||||
// Was the workflow run triggered by a `push` event, for example as opposed to a `pull_request` event.
|
||||
function workflowIsTriggeredByPushEvent() {
|
||||
return workflowEventName() === "push";
|
||||
}
|
||||
// Is dependabot the actor that triggered the current workflow run.
|
||||
function isDependabotActor() {
|
||||
return process.env["GITHUB_ACTOR"] === "dependabot[bot]";
|
||||
}
|
||||
// Is the current action executing a local copy (i.e. we're running a workflow on the codeql-action repo itself)
|
||||
// as opposed to running a remote action (i.e. when another repo references us)
|
||||
exports.getWorkflowEventName = getWorkflowEventName;
|
||||
/**
|
||||
* Returns whether the current workflow is executing a local copy of the Action, e.g. we're running
|
||||
* a workflow on the codeql-action repo itself.
|
||||
*/
|
||||
function isRunningLocalAction() {
|
||||
const relativeScriptPath = getRelativeScriptPath();
|
||||
return (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath));
|
||||
}
|
||||
exports.isRunningLocalAction = isRunningLocalAction;
|
||||
// Get the location where the action is running from.
|
||||
// This can be used to get the actions name or tell if we're running a local action.
|
||||
/**
|
||||
* Get the location where the Action is running from.
|
||||
*
|
||||
* This can be used to get the Action's name or tell if we're running a local Action.
|
||||
*/
|
||||
function getRelativeScriptPath() {
|
||||
const runnerTemp = (0, util_1.getRequiredEnvParam)("RUNNER_TEMP");
|
||||
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
|
||||
return path.relative(actionsDirectory, __filename);
|
||||
}
|
||||
exports.getRelativeScriptPath = getRelativeScriptPath;
|
||||
// Reads the contents of GITHUB_EVENT_PATH as a JSON object
|
||||
/** Returns the contents of `GITHUB_EVENT_PATH` as a JSON object. */
|
||||
function getWorkflowEvent() {
|
||||
const eventJsonFile = (0, util_1.getRequiredEnvParam)("GITHUB_EVENT_PATH");
|
||||
try {
|
||||
@@ -476,10 +471,13 @@ function getWorkflowEvent() {
|
||||
function removeRefsHeadsPrefix(ref) {
|
||||
return ref.startsWith("refs/heads/") ? ref.slice("refs/heads/".length) : ref;
|
||||
}
|
||||
// Returns whether we are analyzing the default branch for the repository.
|
||||
// For cases where the repository information might not be available (e.g.,
|
||||
// dynamic workflows), this can be forced by the environment variable
|
||||
// CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH.
|
||||
/**
|
||||
* Returns whether we are analyzing the default branch for the repository.
|
||||
*
|
||||
* This first checks the environment variable `CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH`. This
|
||||
* environment variable can be set in cases where repository information might not be available, for
|
||||
* example dynamic workflows.
|
||||
*/
|
||||
async function isAnalyzingDefaultBranch() {
|
||||
if (process.env.CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH === "true") {
|
||||
return true;
|
||||
@@ -489,8 +487,8 @@ async function isAnalyzingDefaultBranch() {
|
||||
currentRef = removeRefsHeadsPrefix(currentRef);
|
||||
const event = getWorkflowEvent();
|
||||
let defaultBranch = event?.repository?.default_branch;
|
||||
if (process.env.GITHUB_EVENT_NAME === "schedule") {
|
||||
defaultBranch = removeRefsHeadsPrefix((0, util_1.getRequiredEnvParam)("GITHUB_REF"));
|
||||
if (getWorkflowEventName() === "schedule") {
|
||||
defaultBranch = removeRefsHeadsPrefix(getRefFromEnv());
|
||||
}
|
||||
return currentRef === defaultBranch;
|
||||
}
|
||||
@@ -524,7 +522,10 @@ async function printDebugLogs(config) {
|
||||
}
|
||||
}
|
||||
exports.printDebugLogs = printDebugLogs;
|
||||
// Parses the `upload` input into an `UploadKind`, converting unspecified and deprecated upload inputs appropriately.
|
||||
/**
|
||||
* Parses the `upload` input into an `UploadKind`, converting unspecified and deprecated upload
|
||||
* inputs appropriately.
|
||||
*/
|
||||
function getUploadValue(input) {
|
||||
switch (input) {
|
||||
case undefined:
|
||||
|
||||
File diff suppressed because one or more lines are too long
9
lib/actions-util.test.js
generated
9
lib/actions-util.test.js
generated
@@ -172,6 +172,7 @@ const util_1 = require("./util");
|
||||
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
||||
});
|
||||
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
|
||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "true";
|
||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "false";
|
||||
@@ -210,12 +211,4 @@ const util_1 = require("./util");
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("workflowEventName()", async (t) => {
|
||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||
t.deepEqual(actionsutil.workflowEventName(), "push");
|
||||
process.env["GITHUB_EVENT_NAME"] = "dynamic";
|
||||
t.deepEqual(actionsutil.workflowEventName(), "dynamic");
|
||||
process.env["CODESCANNING_EVENT_NAME"] = "push";
|
||||
t.deepEqual(actionsutil.workflowEventName(), "push");
|
||||
});
|
||||
//# sourceMappingURL=actions-util.test.js.map
|
||||
File diff suppressed because one or more lines are too long
2
lib/analyze.js
generated
2
lib/analyze.js
generated
@@ -209,7 +209,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
logger.endGroup();
|
||||
logger.info(analysisSummary);
|
||||
}
|
||||
logger.info(await runPrintLinesOfCode(language));
|
||||
await runPrintLinesOfCode(language);
|
||||
}
|
||||
catch (e) {
|
||||
logger.info(String(e));
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20230428",
|
||||
"cliVersion": "2.13.1",
|
||||
"priorBundleVersion": "codeql-bundle-20230414",
|
||||
"priorCliVersion": "2.13.0"
|
||||
"bundleVersion": "codeql-bundle-20230524",
|
||||
"cliVersion": "2.13.3",
|
||||
"priorBundleVersion": "codeql-bundle-20230428",
|
||||
"priorCliVersion": "2.13.1"
|
||||
}
|
||||
|
||||
9
lib/fingerprints.js
generated
9
lib/fingerprints.js
generated
@@ -194,7 +194,14 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
|
||||
logger.debug(`Ignoring location as URI "${location.uri}" is invalid`);
|
||||
return undefined;
|
||||
}
|
||||
let uri = decodeURIComponent(location.uri);
|
||||
let uri;
|
||||
try {
|
||||
uri = decodeURIComponent(location.uri);
|
||||
}
|
||||
catch (e) {
|
||||
logger.debug(`Ignoring location as URI "${location.uri}" is invalid`);
|
||||
return undefined;
|
||||
}
|
||||
// Remove a file scheme, and abort if the scheme is anything else
|
||||
const fileUriPrefix = "file://";
|
||||
if (uri.startsWith(fileUriPrefix)) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
3
lib/init-action-post-helper.js
generated
3
lib/init-action-post-helper.js
generated
@@ -110,8 +110,9 @@ async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, p
|
||||
// but we didn't upload anything.
|
||||
if (process.env["CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF"] === "true" &&
|
||||
!uploadFailedSarifResult.raw_upload_size_bytes) {
|
||||
const error = JSON.stringify(uploadFailedSarifResult);
|
||||
throw new Error("Expected to upload a failed SARIF file for this CodeQL code scanning run, " +
|
||||
`but the result was instead ${uploadFailedSarifResult}.`);
|
||||
`but the result was instead ${error}.`);
|
||||
}
|
||||
// Upload appropriate Actions artifacts for debugging
|
||||
if (config.debugMode) {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,qCAAqC;AACrC,iDAAmD;AACnD,mDAA6D;AAG7D,6DAAuF;AACvF,wDAA0C;AAC1C,iCAKgB;AAChB,yCAKoB;AAWpB,SAAS,mCAAmC,CAC1C,KAAc;IAEd,MAAM,YAAY,GAAG,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC;IACtC,OAAO;QACL,uBAAuB,EAAE,YAAY,CAAC,OAAO;QAC7C,6BAA6B,EAAE,YAAY,CAAC,KAAK;KAClD,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,KAAK,UAAU,sBAAsB,CACnC,MAAc,EACd,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;QACrB,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,wBAAwB,EAAE,MAAM,CAAC,CAAC,EAAE;QACxE,OAAO,EAAE,iCAAiC,EAAE,kBAAkB,EAAE,CAAC;KAClE;IACD,MAAM,QAAQ,GAAG,MAAM,IAAA,sBAAW,EAAC,MAAM,CAAC,CAAC;IAC3C,MAAM,OAAO,GAAG,IAAA,0BAAmB,EAAC,YAAY,CAAC,CAAC;IAClD,MAAM,MAAM,GAAG,IAAA,uBAAgB,EAAC,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC,CAAC;IACxE,MAAM,YAAY,GAAG,IAAA,gCAAqB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACtE,IACE,CAAC,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAC,QAAQ,CAClC,WAAW,CAAC,cAAc,CAAC,YAAY,CAAC,CACzC;QACD,IAAA,mBAAY,GAAE,EACd;QACA,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,QAAQ,GAAG,IAAA,kCAAuB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACpE,MAAM,YAAY,GAAG,IAAA,sCAA2B,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IAC5E,MAAM,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;IAEvC,MAAM,SAAS,GAAG,4BAA4B,CAAC;IAE/C,kFAAkF;IAClF,IACE,YAAY,KAAK,SAAS;QAC1B,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,wBAAwB,EAAE,MAAM,CAAC,CAAC,EACpE;QACA,MAAM,MAAM,CAAC,iBAAiB,CAAC,SAAS,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;KACvE;SAAM;QACL,8EAA8E;QAC9E,MAAM,MAAM,CAAC,yBAAyB,CACpC,YAAY,EACZ,SAAS,EACT,QAAQ,EACR,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC;KACH;IAED,IAAI,CAAC,IAAI,CAAC,+BAA+B,SAAS,EAAE,CAAC,CAAC;IACtD,MAAM,YAAY,GAAG,MAAM,SAAS,CAAC,iBAAiB,CACpD,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,MAAM,CACP,CAAC;IACF,MAAM,SAAS,CAAC,iBAAiB,CAC/B,aAAa,EACb,YAAY,CAAC,OAAO,EACpB,MAAM,EACN,EAAE,uBAAuB,EAAE,IAAI,EAAE,CAClC,CAAC;IACF,OAAO,YAAY,EAAE,YAAY,IAAI,EAAE,CAAC;AAC1C,CAAC;AAEM,KAAK,UAAU,yBAAyB,CAC7C,MAAc,EACd,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,IAAI,OAAO,CAAC,GAAG,CAAC,oEAA+C,CAAC,KAAK,MAAM,EAAE;QAC3E,IAAI;YACF,OAAO,MAAM,sBAAsB,CACjC,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;SACH;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,KAAK,CACV,2EAA2E,CAAC,EAAE,CAC/E,CAAC;YACF,OAAO,mCAAmC,CAAC,CAAC,CAAC,CAAC;SAC/C;KACF;SAAM;QACL,OAAO;YACL,iCAAiC,EAC/B,uCAAuC;SAC1C,CAAC;KACH;AACH,CAAC;AA1BD,8DA0BC;AAEM,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB,EACxB,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;QACF,OAAO;KACR;IAED,MAAM,uBAAuB,GAAG,MAAM,yBAAyB,CAC7D,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;IAEF,IAAI,uBAAuB,CAAC,iCAAiC,EAAE;QAC7D,MAAM,CAAC,KAAK,CACV,8EAA8E;YAC5E,GAAG,uBAAuB,CAAC,iCAAiC,GAAG,CAClE,CAAC;KACH;IACD,8FAA8F;IAC9F,iCAAiC;IACjC,IACE,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC,KAAK,MAAM;QAClE,CAAC,uBAAuB,CAAC,qBAAqB,EAC9C;QACA,MAAM,IAAI,KAAK,CACb,4EAA4E;YAC1E,8BAA8B,uBAAuB,GAAG,CAC3D,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,CAAC,SAAS,EAAE;QACpB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;IAED,OAAO,uBAAuB,CAAC;AACjC,CAAC;AArDD,kBAqDC"}
|
||||
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,qCAAqC;AACrC,iDAAmD;AACnD,mDAA6D;AAG7D,6DAAuF;AACvF,wDAA0C;AAC1C,iCAKgB;AAChB,yCAKoB;AAWpB,SAAS,mCAAmC,CAC1C,KAAc;IAEd,MAAM,YAAY,GAAG,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC;IACtC,OAAO;QACL,uBAAuB,EAAE,YAAY,CAAC,OAAO;QAC7C,6BAA6B,EAAE,YAAY,CAAC,KAAK;KAClD,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,KAAK,UAAU,sBAAsB,CACnC,MAAc,EACd,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;QACrB,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,wBAAwB,EAAE,MAAM,CAAC,CAAC,EAAE;QACxE,OAAO,EAAE,iCAAiC,EAAE,kBAAkB,EAAE,CAAC;KAClE;IACD,MAAM,QAAQ,GAAG,MAAM,IAAA,sBAAW,EAAC,MAAM,CAAC,CAAC;IAC3C,MAAM,OAAO,GAAG,IAAA,0BAAmB,EAAC,YAAY,CAAC,CAAC;IAClD,MAAM,MAAM,GAAG,IAAA,uBAAgB,EAAC,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC,CAAC;IACxE,MAAM,YAAY,GAAG,IAAA,gCAAqB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACtE,IACE,CAAC,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAC,QAAQ,CAClC,WAAW,CAAC,cAAc,CAAC,YAAY,CAAC,CACzC;QACD,IAAA,mBAAY,GAAE,EACd;QACA,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,QAAQ,GAAG,IAAA,kCAAuB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACpE,MAAM,YAAY,GAAG,IAAA,sCAA2B,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IAC5E,MAAM,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;IAEvC,MAAM,SAAS,GAAG,4BAA4B,CAAC;IAE/C,kFAAkF;IAClF,IACE,YAAY,KAAK,SAAS;QAC1B,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,wBAAwB,EAAE,MAAM,CAAC,CAAC,EACpE;QACA,MAAM,MAAM,CAAC,iBAAiB,CAAC,SAAS,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;KACvE;SAAM;QACL,8EAA8E;QAC9E,MAAM,MAAM,CAAC,yBAAyB,CACpC,YAAY,EACZ,SAAS,EACT,QAAQ,EACR,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC;KACH;IAED,IAAI,CAAC,IAAI,CAAC,+BAA+B,SAAS,EAAE,CAAC,CAAC;IACtD,MAAM,YAAY,GAAG,MAAM,SAAS,CAAC,iBAAiB,CACpD,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,MAAM,CACP,CAAC;IACF,MAAM,SAAS,CAAC,iBAAiB,CAC/B,aAAa,EACb,YAAY,CAAC,OAAO,EACpB,MAAM,EACN,EAAE,uBAAuB,EAAE,IAAI,EAAE,CAClC,CAAC;IACF,OAAO,YAAY,EAAE,YAAY,IAAI,EAAE,CAAC;AAC1C,CAAC;AAEM,KAAK,UAAU,yBAAyB,CAC7C,MAAc,EACd,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,IAAI,OAAO,CAAC,GAAG,CAAC,oEAA+C,CAAC,KAAK,MAAM,EAAE;QAC3E,IAAI;YACF,OAAO,MAAM,sBAAsB,CACjC,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;SACH;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,KAAK,CACV,2EAA2E,CAAC,EAAE,CAC/E,CAAC;YACF,OAAO,mCAAmC,CAAC,CAAC,CAAC,CAAC;SAC/C;KACF;SAAM;QACL,OAAO;YACL,iCAAiC,EAC/B,uCAAuC;SAC1C,CAAC;KACH;AACH,CAAC;AA1BD,8DA0BC;AAEM,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB,EACxB,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;QACF,OAAO;KACR;IAED,MAAM,uBAAuB,GAAG,MAAM,yBAAyB,CAC7D,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;IAEF,IAAI,uBAAuB,CAAC,iCAAiC,EAAE;QAC7D,MAAM,CAAC,KAAK,CACV,8EAA8E;YAC5E,GAAG,uBAAuB,CAAC,iCAAiC,GAAG,CAClE,CAAC;KACH;IACD,8FAA8F;IAC9F,iCAAiC;IACjC,IACE,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC,KAAK,MAAM;QAClE,CAAC,uBAAuB,CAAC,qBAAqB,EAC9C;QACA,MAAM,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,uBAAuB,CAAC,CAAC;QACtD,MAAM,IAAI,KAAK,CACb,4EAA4E;YAC1E,8BAA8B,KAAK,GAAG,CACzC,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,CAAC,SAAS,EAAE;QACpB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;IAED,OAAO,uBAAuB,CAAC;AACjC,CAAC;AAtDD,kBAsDC"}
|
||||
2
lib/trap-caching.js
generated
2
lib/trap-caching.js
generated
@@ -91,7 +91,7 @@ async function downloadTrapCaches(codeql, languages, logger) {
|
||||
}
|
||||
let baseSha = "unknown";
|
||||
const eventPath = process.env.GITHUB_EVENT_PATH;
|
||||
if (actionsUtil.workflowEventName() === "pull_request" &&
|
||||
if (actionsUtil.getWorkflowEventName() === "pull_request" &&
|
||||
eventPath !== undefined) {
|
||||
const event = JSON.parse(fs.readFileSync(path.resolve(eventPath), "utf-8"));
|
||||
baseSha = event.pull_request?.base?.sha || baseSha;
|
||||
|
||||
File diff suppressed because one or more lines are too long
15
lib/upload-lib.js
generated
15
lib/upload-lib.js
generated
@@ -181,16 +181,23 @@ function validateSarifFileSchema(sarifFilePath, logger) {
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, "utf8"));
|
||||
const schema = require("../src/sarif-schema-2.1.0.json");
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
if (!result.valid) {
|
||||
// Filter errors related to invalid URIs in the artifactLocation field as this
|
||||
// is a breaking change. See https://github.com/github/codeql-action/issues/1703
|
||||
const errors = (result.errors || []).filter((err) => err.argument !== "uri-reference");
|
||||
const warnings = (result.errors || []).filter((err) => err.argument === "uri-reference");
|
||||
for (const warning of warnings) {
|
||||
logger.info(`Warning: '${warning.instance}' is not a valid URI in '${warning.property}'.`);
|
||||
}
|
||||
if (errors.length) {
|
||||
// Output the more verbose error messages in groups as these may be very large.
|
||||
for (const error of result.errors) {
|
||||
for (const error of errors) {
|
||||
logger.startGroup(`Error details: ${error.stack}`);
|
||||
logger.info(JSON.stringify(error, null, 2));
|
||||
logger.endGroup();
|
||||
}
|
||||
// Set the main error message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const sarifErrors = result.errors.map((e) => `- ${e.stack}`);
|
||||
const sarifErrors = errors.map((e) => `- ${e.stack}`);
|
||||
throw new Error(`Unable to upload "${sarifFilePath}" as it is not valid SARIF:\n${sarifErrors.join("\n")}`);
|
||||
}
|
||||
}
|
||||
@@ -213,7 +220,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
||||
base_ref: undefined,
|
||||
base_sha: undefined,
|
||||
};
|
||||
if (actionsUtil.workflowEventName() === "pull_request") {
|
||||
if (actionsUtil.getWorkflowEventName() === "pull_request") {
|
||||
if (commitOid === util.getRequiredEnvParam("GITHUB_SHA") &&
|
||||
mergeBaseCommitOid) {
|
||||
// We're uploading results for the merge commit
|
||||
|
||||
File diff suppressed because one or more lines are too long
12
lib/upload-lib.test.js
generated
12
lib/upload-lib.test.js
generated
@@ -233,6 +233,18 @@ ava_1.default.beforeEach(() => {
|
||||
t.deepEqual(loggedMessages.length, 1);
|
||||
t.assert(loggedMessages[0].includes("Pruned 2 results"));
|
||||
});
|
||||
(0, ava_1.default)("accept results with invalid artifactLocation.uri value", (t) => {
|
||||
const loggedMessages = [];
|
||||
const mockLogger = {
|
||||
info: (message) => {
|
||||
loggedMessages.push(message);
|
||||
},
|
||||
};
|
||||
const sarifFile = `${__dirname}/../src/testdata/with-invalid-uri.sarif`;
|
||||
uploadLib.validateSarifFileSchema(sarifFile, mockLogger);
|
||||
t.deepEqual(loggedMessages.length, 1);
|
||||
t.deepEqual(loggedMessages[0], "Warning: 'not a valid URI' is not a valid URI in 'instance.runs[0].results[0].locations[0].physicalLocation.artifactLocation.uri'.");
|
||||
});
|
||||
const affectedCodeQLVersion = {
|
||||
driver: {
|
||||
name: "CodeQL",
|
||||
|
||||
File diff suppressed because one or more lines are too long
28
lib/util.js
generated
28
lib/util.js
generated
@@ -673,11 +673,6 @@ function removeDuplicateLocations(locations) {
|
||||
});
|
||||
}
|
||||
function fixInvalidNotifications(sarif, logger) {
|
||||
if (process.env[shared_environment_1.CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX] === "true") {
|
||||
logger.info("SARIF notification object duplicate location fix disabled by the " +
|
||||
`${shared_environment_1.CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX} environment variable.`);
|
||||
return sarif;
|
||||
}
|
||||
if (!Array.isArray(sarif.runs)) {
|
||||
return sarif;
|
||||
}
|
||||
@@ -727,10 +722,27 @@ function fixInvalidNotifications(sarif, logger) {
|
||||
return newSarif;
|
||||
}
|
||||
exports.fixInvalidNotifications = fixInvalidNotifications;
|
||||
/**
|
||||
* Removes duplicates from the sarif file.
|
||||
*
|
||||
* When `CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX` is set to true, this will
|
||||
* simply rename the input file to the output file. Otherwise, it will parse the
|
||||
* input file as JSON, remove duplicate locations from the SARIF notification
|
||||
* objects, and write the result to the output file.
|
||||
*
|
||||
* For context, see documentation of:
|
||||
* `CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX`. */
|
||||
function fixInvalidNotificationsInFile(inputPath, outputPath, logger) {
|
||||
let sarif = JSON.parse(fs.readFileSync(inputPath, "utf8"));
|
||||
sarif = fixInvalidNotifications(sarif, logger);
|
||||
fs.writeFileSync(outputPath, JSON.stringify(sarif));
|
||||
if (process.env[shared_environment_1.CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX] === "true") {
|
||||
logger.info("SARIF notification object duplicate location fix disabled by the " +
|
||||
`${shared_environment_1.CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX} environment variable.`);
|
||||
fs.renameSync(inputPath, outputPath);
|
||||
}
|
||||
else {
|
||||
let sarif = JSON.parse(fs.readFileSync(inputPath, "utf8"));
|
||||
sarif = fixInvalidNotifications(sarif, logger);
|
||||
fs.writeFileSync(outputPath, JSON.stringify(sarif));
|
||||
}
|
||||
}
|
||||
exports.fixInvalidNotificationsInFile = fixInvalidNotificationsInFile;
|
||||
function wrapError(error) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
4
lib/workflow.js
generated
4
lib/workflow.js
generated
@@ -298,7 +298,9 @@ function getInputOrThrow(workflow, jobName, actionName, inputName, matrixVars) {
|
||||
* This allows us to test workflow parsing functionality as a CodeQL Action PR check.
|
||||
*/
|
||||
function getAnalyzeActionName() {
|
||||
if ((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY") === "github/codeql-action") {
|
||||
if ((0, util_1.isInTestMode)() ||
|
||||
process.env["CODEQL_ACTION_TESTING_ENVIRONMENT"] ===
|
||||
"codeql-action-pr-checks") {
|
||||
return "./analyze";
|
||||
}
|
||||
else {
|
||||
|
||||
File diff suppressed because one or more lines are too long
2
node_modules/.package-lock.json
generated
vendored
2
node_modules/.package-lock.json
generated
vendored
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "2.3.4",
|
||||
"version": "2.3.7",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "2.3.4",
|
||||
"version": "2.3.7",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "codeql",
|
||||
"version": "2.3.4",
|
||||
"version": "2.3.7",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^1.1.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "2.3.4",
|
||||
"version": "2.3.7",
|
||||
"private": true,
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
|
||||
@@ -21,7 +21,7 @@ steps:
|
||||
with:
|
||||
upload-database: false
|
||||
|
||||
- name: Check language autodetect for all languages excluding Ruby, Swift
|
||||
- name: Check language autodetect for all languages excluding Swift
|
||||
shell: bash
|
||||
run: |
|
||||
CPP_DB=${{ fromJson(steps.analysis.outputs.db-locations).cpp }}
|
||||
@@ -54,11 +54,6 @@ steps:
|
||||
echo "Did not create a database for Python, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check language autodetect for Ruby
|
||||
if: env.CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
RUBY_DB=${{ fromJson(steps.analysis.outputs.db-locations).ruby }}
|
||||
if [[ ! -d $RUBY_DB ]] || [[ ! $RUBY_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Ruby, or created it in the wrong location."
|
||||
@@ -66,7 +61,9 @@ steps:
|
||||
fi
|
||||
|
||||
- name: Check language autodetect for Swift
|
||||
if: env.CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT == 'true'
|
||||
if: >-
|
||||
env.CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT == 'true' ||
|
||||
(runner.os != 'Windows' && matrix.version == 'nightly-latest')
|
||||
shell: bash
|
||||
run: |
|
||||
SWIFT_DB=${{ fromJson(steps.analysis.outputs.db-locations).swift }}
|
||||
|
||||
@@ -24,7 +24,7 @@ defaultTestVersions = [
|
||||
|
||||
header = """# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
"""
|
||||
@@ -81,10 +81,7 @@ for file in os.listdir('checks'):
|
||||
'if': FoldedScalarString(textwrap.dedent('''
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211' ||
|
||||
matrix.version == 'cached' ||
|
||||
matrix.version == 'latest' ||
|
||||
matrix.version == 'nightly-latest'
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
''').strip()),
|
||||
'shell': 'bash',
|
||||
@@ -101,6 +98,10 @@ for file in os.listdir('checks'):
|
||||
}
|
||||
},
|
||||
'name': checkSpecification['name'],
|
||||
'permissions': {
|
||||
'contents': 'read',
|
||||
'security-events': 'write'
|
||||
},
|
||||
'timeout-minutes': 45,
|
||||
'runs-on': '${{ matrix.os }}',
|
||||
'steps': steps,
|
||||
|
||||
27
python-setup/tests/check_requests.ps1
Normal file
27
python-setup/tests/check_requests.ps1
Normal file
@@ -0,0 +1,27 @@
|
||||
#! /usr/bin/pwsh
|
||||
|
||||
$EXPECTED_PYTHON_VERSION=$args[0]
|
||||
$EXPECTED_REQUESTS_VERSION=$args[1]
|
||||
|
||||
$FOUND_PYTHON_VERSION="$Env:LGTM_PYTHON_SETUP_VERSION"
|
||||
$FOUND_PYTHONPATH="$Env:LGTM_INDEX_IMPORT_PATH"
|
||||
|
||||
write-host "FOUND_PYTHON_VERSION=$FOUND_PYTHON_VERSION FOUND_PYTHONPATH=$FOUND_PYTHONPATH "
|
||||
|
||||
if ($FOUND_PYTHON_VERSION -ne $EXPECTED_PYTHON_VERSION) {
|
||||
write-host "Script told us to use Python $FOUND_PYTHON_VERSION, but expected $EXPECTED_PYTHON_VERSION"
|
||||
exit 1
|
||||
} else {
|
||||
write-host "Script told us to use Python $FOUND_PYTHON_VERSION, which was expected"
|
||||
}
|
||||
|
||||
$env:PYTHONPATH=$FOUND_PYTHONPATH
|
||||
|
||||
$INSTALLED_REQUESTS_VERSION = (py -3 -c "import requests; print(requests.__version__)")
|
||||
|
||||
if ($INSTALLED_REQUESTS_VERSION -ne $EXPECTED_REQUESTS_VERSION) {
|
||||
write-host "Using $FOUND_PYTHONPATH as PYTHONPATH, we found version $INSTALLED_REQUESTS_VERSION of requests, but expected $EXPECTED_REQUESTS_VERSION"
|
||||
exit 1
|
||||
} else {
|
||||
write-host "Using $FOUND_PYTHONPATH as PYTHONPATH, we found version $INSTALLED_REQUESTS_VERSION of requests, which was expected"
|
||||
}
|
||||
31
python-setup/tests/check_requests.sh
Executable file
31
python-setup/tests/check_requests.sh
Executable file
@@ -0,0 +1,31 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
|
||||
EXPECTED_PYTHON_VERSION=$1
|
||||
EXPECTED_REQUESTS_VERSION=$2
|
||||
|
||||
FOUND_PYTHON_VERSION="$LGTM_PYTHON_SETUP_VERSION"
|
||||
FOUND_PYTHONPATH="$LGTM_INDEX_IMPORT_PATH"
|
||||
|
||||
echo "FOUND_PYTHON_VERSION=${FOUND_PYTHON_VERSION} FOUND_PYTHONPATH=${FOUND_PYTHONPATH} "
|
||||
|
||||
if [[ $FOUND_PYTHON_VERSION != $EXPECTED_PYTHON_VERSION ]]; then
|
||||
echo "Script told us to use Python ${FOUND_PYTHON_VERSION}, but expected ${EXPECTED_PYTHON_VERSION}"
|
||||
exit 1
|
||||
else
|
||||
echo "Script told us to use Python ${FOUND_PYTHON_VERSION}, which was expected"
|
||||
fi
|
||||
|
||||
PYTHON_EXE="python${EXPECTED_PYTHON_VERSION}"
|
||||
|
||||
INSTALLED_REQUESTS_VERSION=$(PYTHONPATH="${FOUND_PYTHONPATH}" "${PYTHON_EXE}" -c 'import requests; print(requests.__version__)')
|
||||
|
||||
if [[ "$INSTALLED_REQUESTS_VERSION" != "$EXPECTED_REQUESTS_VERSION" ]]; then
|
||||
echo "Using ${FOUND_PYTHONPATH} as PYTHONPATH, we found version $INSTALLED_REQUESTS_VERSION of requests, but expected $EXPECTED_REQUESTS_VERSION"
|
||||
exit 1
|
||||
else
|
||||
echo "Using ${FOUND_PYTHONPATH} as PYTHONPATH, we found version $INSTALLED_REQUESTS_VERSION of requests, which was expected"
|
||||
fi
|
||||
@@ -1,28 +0,0 @@
|
||||
#! /usr/bin/pwsh
|
||||
|
||||
$EXPECTED_VERSION=$args[0]
|
||||
|
||||
$FOUND_VERSION="$Env:LGTM_PYTHON_SETUP_VERSION"
|
||||
$FOUND_PYTHONPATH="$Env:LGTM_INDEX_IMPORT_PATH"
|
||||
|
||||
write-host "FOUND_VERSION=$FOUND_VERSION FOUND_PYTHONPATH=$FOUND_PYTHONPATH "
|
||||
|
||||
if ($FOUND_VERSION -ne $EXPECTED_VERSION) {
|
||||
write-host "Script told us to use Python $FOUND_VERSION, but expected $EXPECTED_VERSION"
|
||||
exit 1
|
||||
} else {
|
||||
write-host "Script told us to use Python $FOUND_VERSION, which was expected"
|
||||
}
|
||||
|
||||
$env:PYTHONPATH=$FOUND_PYTHONPATH
|
||||
|
||||
$INSTALLED_REQUESTS_VERSION = (py -3 -c "import requests; print(requests.__version__)")
|
||||
|
||||
$EXPECTED_REQUESTS="2.26.0"
|
||||
|
||||
if ($INSTALLED_REQUESTS_VERSION -ne $EXPECTED_REQUESTS) {
|
||||
write-host "Using $FOUND_PYTHONPATH as PYTHONPATH, we found version $INSTALLED_REQUESTS_VERSION of requests, but expected $EXPECTED_REQUESTS"
|
||||
exit 1
|
||||
} else {
|
||||
write-host "Using $FOUND_PYTHONPATH as PYTHONPATH, we found version $INSTALLED_REQUESTS_VERSION of requests, which was expected"
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
|
||||
EXPECTED_VERSION=$1
|
||||
|
||||
FOUND_VERSION="$LGTM_PYTHON_SETUP_VERSION"
|
||||
FOUND_PYTHONPATH="$LGTM_INDEX_IMPORT_PATH"
|
||||
|
||||
echo "FOUND_VERSION=${FOUND_VERSION} FOUND_PYTHONPATH=${FOUND_PYTHONPATH} "
|
||||
|
||||
if [[ $FOUND_VERSION != $EXPECTED_VERSION ]]; then
|
||||
echo "Script told us to use Python ${FOUND_VERSION}, but expected ${EXPECTED_VERSION}"
|
||||
exit 1
|
||||
else
|
||||
echo "Script told us to use Python ${FOUND_VERSION}, which was expected"
|
||||
fi
|
||||
|
||||
PYTHON_EXE="python${EXPECTED_VERSION}"
|
||||
|
||||
INSTALLED_REQUESTS_VERSION=$(PYTHONPATH="${FOUND_PYTHONPATH}" "${PYTHON_EXE}" -c 'import requests; print(requests.__version__)')
|
||||
|
||||
EXPECTED_REQUESTS="2.26.0"
|
||||
|
||||
if [[ "$INSTALLED_REQUESTS_VERSION" != "$EXPECTED_REQUESTS" ]]; then
|
||||
echo "Using ${FOUND_PYTHONPATH} as PYTHONPATH, we found version $INSTALLED_REQUESTS_VERSION of requests, but expected $EXPECTED_REQUESTS"
|
||||
exit 1
|
||||
else
|
||||
echo "Using ${FOUND_PYTHONPATH} as PYTHONPATH, we found version $INSTALLED_REQUESTS_VERSION of requests, which was expected"
|
||||
fi
|
||||
105
python-setup/tests/pipenv/python-3.8/Pipfile.lock
generated
105
python-setup/tests/pipenv/python-3.8/Pipfile.lock
generated
@@ -18,43 +18,116 @@
|
||||
"default": {
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
"sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3",
|
||||
"sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"
|
||||
"sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7",
|
||||
"sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2022.12.7"
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2023.5.7"
|
||||
},
|
||||
"charset-normalizer": {
|
||||
"hashes": [
|
||||
"sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597",
|
||||
"sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"
|
||||
"sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6",
|
||||
"sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1",
|
||||
"sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e",
|
||||
"sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373",
|
||||
"sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62",
|
||||
"sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230",
|
||||
"sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be",
|
||||
"sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c",
|
||||
"sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0",
|
||||
"sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448",
|
||||
"sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f",
|
||||
"sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649",
|
||||
"sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d",
|
||||
"sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0",
|
||||
"sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706",
|
||||
"sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a",
|
||||
"sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59",
|
||||
"sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23",
|
||||
"sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5",
|
||||
"sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb",
|
||||
"sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e",
|
||||
"sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e",
|
||||
"sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c",
|
||||
"sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28",
|
||||
"sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d",
|
||||
"sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41",
|
||||
"sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974",
|
||||
"sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce",
|
||||
"sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f",
|
||||
"sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1",
|
||||
"sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d",
|
||||
"sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8",
|
||||
"sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017",
|
||||
"sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31",
|
||||
"sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7",
|
||||
"sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8",
|
||||
"sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e",
|
||||
"sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14",
|
||||
"sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd",
|
||||
"sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d",
|
||||
"sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795",
|
||||
"sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b",
|
||||
"sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b",
|
||||
"sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b",
|
||||
"sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203",
|
||||
"sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f",
|
||||
"sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19",
|
||||
"sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1",
|
||||
"sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a",
|
||||
"sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac",
|
||||
"sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9",
|
||||
"sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0",
|
||||
"sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137",
|
||||
"sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f",
|
||||
"sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6",
|
||||
"sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5",
|
||||
"sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909",
|
||||
"sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f",
|
||||
"sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0",
|
||||
"sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324",
|
||||
"sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755",
|
||||
"sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb",
|
||||
"sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854",
|
||||
"sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c",
|
||||
"sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60",
|
||||
"sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84",
|
||||
"sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0",
|
||||
"sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b",
|
||||
"sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1",
|
||||
"sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531",
|
||||
"sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1",
|
||||
"sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11",
|
||||
"sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326",
|
||||
"sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df",
|
||||
"sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"
|
||||
],
|
||||
"markers": "python_version >= '3'",
|
||||
"version": "==2.0.12"
|
||||
"markers": "python_full_version >= '3.7.0'",
|
||||
"version": "==3.1.0"
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
"sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4",
|
||||
"sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"
|
||||
],
|
||||
"markers": "python_version >= '3'",
|
||||
"markers": "python_version >= '3.5'",
|
||||
"version": "==3.4"
|
||||
},
|
||||
"requests": {
|
||||
"hashes": [
|
||||
"sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24",
|
||||
"sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"
|
||||
"sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f",
|
||||
"sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.26.0"
|
||||
"version": "==2.31.0"
|
||||
},
|
||||
"urllib3": {
|
||||
"hashes": [
|
||||
"sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc",
|
||||
"sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"
|
||||
"sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc",
|
||||
"sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
|
||||
"version": "==1.26.13"
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==2.0.2"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
|
||||
105
python-setup/tests/pipenv/requests-3/Pipfile.lock
generated
105
python-setup/tests/pipenv/requests-3/Pipfile.lock
generated
@@ -16,43 +16,116 @@
|
||||
"default": {
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
"sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3",
|
||||
"sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"
|
||||
"sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7",
|
||||
"sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2022.12.7"
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2023.5.7"
|
||||
},
|
||||
"charset-normalizer": {
|
||||
"hashes": [
|
||||
"sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597",
|
||||
"sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"
|
||||
"sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6",
|
||||
"sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1",
|
||||
"sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e",
|
||||
"sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373",
|
||||
"sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62",
|
||||
"sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230",
|
||||
"sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be",
|
||||
"sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c",
|
||||
"sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0",
|
||||
"sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448",
|
||||
"sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f",
|
||||
"sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649",
|
||||
"sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d",
|
||||
"sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0",
|
||||
"sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706",
|
||||
"sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a",
|
||||
"sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59",
|
||||
"sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23",
|
||||
"sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5",
|
||||
"sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb",
|
||||
"sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e",
|
||||
"sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e",
|
||||
"sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c",
|
||||
"sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28",
|
||||
"sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d",
|
||||
"sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41",
|
||||
"sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974",
|
||||
"sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce",
|
||||
"sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f",
|
||||
"sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1",
|
||||
"sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d",
|
||||
"sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8",
|
||||
"sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017",
|
||||
"sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31",
|
||||
"sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7",
|
||||
"sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8",
|
||||
"sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e",
|
||||
"sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14",
|
||||
"sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd",
|
||||
"sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d",
|
||||
"sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795",
|
||||
"sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b",
|
||||
"sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b",
|
||||
"sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b",
|
||||
"sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203",
|
||||
"sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f",
|
||||
"sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19",
|
||||
"sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1",
|
||||
"sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a",
|
||||
"sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac",
|
||||
"sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9",
|
||||
"sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0",
|
||||
"sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137",
|
||||
"sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f",
|
||||
"sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6",
|
||||
"sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5",
|
||||
"sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909",
|
||||
"sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f",
|
||||
"sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0",
|
||||
"sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324",
|
||||
"sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755",
|
||||
"sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb",
|
||||
"sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854",
|
||||
"sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c",
|
||||
"sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60",
|
||||
"sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84",
|
||||
"sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0",
|
||||
"sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b",
|
||||
"sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1",
|
||||
"sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531",
|
||||
"sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1",
|
||||
"sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11",
|
||||
"sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326",
|
||||
"sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df",
|
||||
"sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"
|
||||
],
|
||||
"markers": "python_version >= '3'",
|
||||
"version": "==2.0.12"
|
||||
"markers": "python_full_version >= '3.7.0'",
|
||||
"version": "==3.1.0"
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
"sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4",
|
||||
"sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"
|
||||
],
|
||||
"markers": "python_version >= '3'",
|
||||
"markers": "python_version >= '3.5'",
|
||||
"version": "==3.4"
|
||||
},
|
||||
"requests": {
|
||||
"hashes": [
|
||||
"sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24",
|
||||
"sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"
|
||||
"sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f",
|
||||
"sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.26.0"
|
||||
"version": "==2.31.0"
|
||||
},
|
||||
"urllib3": {
|
||||
"hashes": [
|
||||
"sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc",
|
||||
"sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"
|
||||
"sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc",
|
||||
"sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
|
||||
"version": "==1.26.13"
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==2.0.2"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
|
||||
60
python-setup/tests/poetry/python-3.8/poetry.lock
generated
60
python-setup/tests/poetry/python-3.8/poetry.lock
generated
@@ -1,3 +1,5 @@
|
||||
# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2022.12.7"
|
||||
@@ -5,6 +7,10 @@ description = "Python package for providing Mozilla's CA Bundle."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
|
||||
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
@@ -13,6 +19,10 @@ description = "The Real First Universal Charset Detector. Open, modern and activ
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.5.0"
|
||||
files = [
|
||||
{file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"},
|
||||
{file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
unicode-backport = ["unicodedata2"]
|
||||
@@ -24,24 +34,32 @@ description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
|
||||
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.26.0"
|
||||
version = "2.31.0"
|
||||
description = "Python HTTP for Humans."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
|
||||
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""}
|
||||
idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""}
|
||||
urllib3 = ">=1.21.1,<1.27"
|
||||
charset-normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.21.1,<3"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<5)"]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
@@ -50,6 +68,10 @@ description = "HTTP library with thread-safe connection pooling, file post, and
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
||||
files = [
|
||||
{file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"},
|
||||
{file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotlipy (>=0.6.0)"]
|
||||
@@ -57,28 +79,6 @@ secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "p
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.8"
|
||||
content-hash = "fabc9cabf9f18437e7b9ea3dbd1895a5a118239c17b3d097c465a290707e6bfd"
|
||||
|
||||
[metadata.files]
|
||||
certifi = [
|
||||
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
|
||||
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
|
||||
]
|
||||
charset-normalizer = [
|
||||
{file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"},
|
||||
{file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"},
|
||||
]
|
||||
idna = [
|
||||
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
|
||||
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
|
||||
]
|
||||
requests = [
|
||||
{file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"},
|
||||
{file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"},
|
||||
]
|
||||
urllib3 = [
|
||||
{file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"},
|
||||
{file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"},
|
||||
]
|
||||
|
||||
64
python-setup/tests/poetry/requests-3/poetry.lock
generated
64
python-setup/tests/poetry/requests-3/poetry.lock
generated
@@ -1,3 +1,5 @@
|
||||
# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2022.12.7"
|
||||
@@ -5,6 +7,10 @@ description = "Python package for providing Mozilla's CA Bundle."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
|
||||
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
@@ -13,6 +19,10 @@ description = "The Real First Universal Charset Detector. Open, modern and activ
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.5.0"
|
||||
files = [
|
||||
{file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"},
|
||||
{file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
unicode-backport = ["unicodedata2"]
|
||||
@@ -24,24 +34,32 @@ description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
|
||||
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.26.0"
|
||||
version = "2.31.0"
|
||||
description = "Python HTTP for Humans."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
|
||||
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""}
|
||||
idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""}
|
||||
urllib3 = ">=1.21.1,<1.27"
|
||||
charset-normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.21.1,<3"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<5)"]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
@@ -50,6 +68,10 @@ description = "HTTP library with thread-safe connection pooling, file post, and
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
||||
files = [
|
||||
{file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"},
|
||||
{file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotlipy (>=0.6.0)"]
|
||||
@@ -57,28 +79,6 @@ secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "p
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.6"
|
||||
content-hash = "3186fede9fea5b617c0bcebda3034f2d889a3c4579d60dd45945772895a28b7d"
|
||||
|
||||
[metadata.files]
|
||||
certifi = [
|
||||
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
|
||||
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
|
||||
]
|
||||
charset-normalizer = [
|
||||
{file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"},
|
||||
{file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"},
|
||||
]
|
||||
idna = [
|
||||
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
|
||||
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
|
||||
]
|
||||
requests = [
|
||||
{file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"},
|
||||
{file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"},
|
||||
]
|
||||
urllib3 = [
|
||||
{file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"},
|
||||
{file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"},
|
||||
]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.7"
|
||||
content-hash = "05ba07023dd383fd84c8e4945f1eae1aac30917ad1e4c10fb03e8235fcf3c248"
|
||||
|
||||
@@ -5,7 +5,7 @@ description = ""
|
||||
authors = ["Your Name <you@example.com>"]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.6"
|
||||
python = "^3.7"
|
||||
requests = "*"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
|
||||
@@ -1 +1 @@
|
||||
requests==2.26.0
|
||||
requests==2.31.0
|
||||
|
||||
@@ -1 +1 @@
|
||||
requests==2.26.0
|
||||
requests==2.31.0
|
||||
|
||||
@@ -7,6 +7,6 @@ from setuptools import setup
|
||||
|
||||
setup(
|
||||
name="example-setup.py",
|
||||
install_requires=["requests==2.26.0"],
|
||||
install_requires=["requests==2.31.0"],
|
||||
python_requires='>=3.7',
|
||||
)
|
||||
|
||||
14
queries/codeql-pack.lock.yml
Normal file
14
queries/codeql-pack.lock.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
---
|
||||
lockVersion: 1.0.0
|
||||
dependencies:
|
||||
codeql-javascript:
|
||||
version: 0.6.1
|
||||
codeql/regex:
|
||||
version: 0.0.12
|
||||
codeql/tutorial:
|
||||
version: 0.0.9
|
||||
codeql/util:
|
||||
version: 0.0.9
|
||||
codeql/yaml:
|
||||
version: 0.0.1
|
||||
compiled: false
|
||||
@@ -1,4 +1,4 @@
|
||||
name: codeql-action-custom-queries-javascript
|
||||
version: 0.0.0
|
||||
libraryPathDependencies: codeql-javascript
|
||||
|
||||
dependencies:
|
||||
codeql/javascript-all: 0.6.1
|
||||
52
queries/default-setup-environment-variables.ql
Normal file
52
queries/default-setup-environment-variables.ql
Normal file
@@ -0,0 +1,52 @@
|
||||
/**
|
||||
* @name Some environment variables may not exist in default setup workflows
|
||||
* @id javascript/codeql-action/default-setup-env-vars
|
||||
* @kind problem
|
||||
* @severity warning
|
||||
*/
|
||||
|
||||
import javascript
|
||||
|
||||
bindingset[envVar]
|
||||
predicate isSafeForDefaultSetup(string envVar) {
|
||||
// Ignore internal Code Scanning environment variables
|
||||
envVar.matches("CODE_SCANNING_%") or
|
||||
envVar.matches("CODEQL_%") or
|
||||
envVar.matches("CODESCANNING_%") or
|
||||
envVar.matches("LGTM_%") or
|
||||
// We flag up usage of potentially unsafe parts of the GitHub event in `default-setup-event-context.ql`.
|
||||
envVar = "GITHUB_EVENT_PATH" or
|
||||
// The following environment variables are known to be safe for use with default setup
|
||||
envVar =
|
||||
[
|
||||
"GITHUB_ACTION_REF", "GITHUB_ACTION_REPOSITORY", "GITHUB_ACTOR", "GITHUB_API_URL",
|
||||
"GITHUB_BASE_REF", "GITHUB_EVENT_NAME", "GITHUB_JOB", "GITHUB_RUN_ATTEMPT", "GITHUB_RUN_ID",
|
||||
"GITHUB_SHA", "GITHUB_REPOSITORY", "GITHUB_SERVER_URL", "GITHUB_TOKEN", "GITHUB_WORKFLOW",
|
||||
"GITHUB_WORKSPACE", "GOFLAGS", "JAVA_TOOL_OPTIONS", "RUNNER_ARCH", "RUNNER_NAME", "RUNNER_OS",
|
||||
"RUNNER_TEMP", "RUNNER_TOOL_CACHE"
|
||||
]
|
||||
}
|
||||
|
||||
predicate envVarRead(DataFlow::Node node, string envVar) {
|
||||
node =
|
||||
any(DataFlow::PropRead read |
|
||||
read = NodeJSLib::process().getAPropertyRead("env").getAPropertyRead() and
|
||||
envVar = read.getPropertyName()
|
||||
) or
|
||||
node =
|
||||
any(DataFlow::CallNode call |
|
||||
call.getCalleeName().matches("get%EnvParam") and
|
||||
envVar = call.getArgument(0).getStringValue()
|
||||
)
|
||||
}
|
||||
|
||||
from DataFlow::Node read, string envVar
|
||||
where
|
||||
envVarRead(read, envVar) and
|
||||
not isSafeForDefaultSetup(envVar)
|
||||
select read,
|
||||
"The environment variable " + envVar +
|
||||
" may not exist in default setup workflows. If all uses are safe, add it to the list of " +
|
||||
"environment variables that are known to be safe in " +
|
||||
"'queries/default-setup-environment-variables.ql'. If this use is safe but others are not, " +
|
||||
"dismiss this alert as a false positive."
|
||||
58
queries/default-setup-event-context.ql
Normal file
58
queries/default-setup-event-context.ql
Normal file
@@ -0,0 +1,58 @@
|
||||
/**
|
||||
* @name Some context properties may not exist in default setup workflows
|
||||
* @id javascript/codeql-action/default-setup-context-properties
|
||||
* @kind path-problem
|
||||
* @severity warning
|
||||
*/
|
||||
|
||||
import javascript
|
||||
import DataFlow::PathGraph
|
||||
|
||||
class NotParsedLabel extends DataFlow::FlowLabel {
|
||||
NotParsedLabel() { this = "not-parsed" }
|
||||
}
|
||||
|
||||
class ParsedLabel extends DataFlow::FlowLabel {
|
||||
ParsedLabel() { this = "parsed" }
|
||||
}
|
||||
|
||||
class EventContextAccessConfiguration extends DataFlow::Configuration {
|
||||
EventContextAccessConfiguration() { this = "EventContextAccessConfiguration" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source, DataFlow::FlowLabel lbl) {
|
||||
source = NodeJSLib::process().getAPropertyRead("env").getAPropertyRead("GITHUB_EVENT_PATH") and
|
||||
lbl instanceof NotParsedLabel
|
||||
}
|
||||
|
||||
override predicate isSink(DataFlow::Node sink, DataFlow::FlowLabel lbl) {
|
||||
sink instanceof DataFlow::PropRead and
|
||||
lbl instanceof ParsedLabel and
|
||||
not exists(DataFlow::PropRead n | sink = n.getBase()) and
|
||||
not sink.asExpr().getFile().getBaseName().matches("%.test.ts")
|
||||
}
|
||||
|
||||
override predicate isAdditionalFlowStep(
|
||||
DataFlow::Node src, DataFlow::Node trg, DataFlow::FlowLabel inlbl, DataFlow::FlowLabel outlbl
|
||||
) {
|
||||
src = trg.(FileSystemReadAccess).getAPathArgument() and inlbl = outlbl
|
||||
or
|
||||
exists(JsonParserCall c |
|
||||
src = c.getInput() and
|
||||
trg = c.getOutput() and
|
||||
inlbl instanceof NotParsedLabel and
|
||||
outlbl instanceof ParsedLabel
|
||||
)
|
||||
or
|
||||
(
|
||||
TaintTracking::sharedTaintStep(src, trg) or
|
||||
DataFlow::SharedFlowStep::step(src, trg) or
|
||||
DataFlow::SharedFlowStep::step(src, trg, _, _)
|
||||
) and
|
||||
inlbl = outlbl
|
||||
}
|
||||
}
|
||||
|
||||
from EventContextAccessConfiguration cfg, DataFlow::PathNode source, DataFlow::PathNode sink
|
||||
where cfg.hasFlowPath(source, sink)
|
||||
select sink.getNode(), source, sink,
|
||||
"This event context property may not exist in default setup workflows."
|
||||
@@ -4,7 +4,7 @@
|
||||
* must be defined in an identical way to avoid confusion for the user.
|
||||
* This also makes writing queries like required-action-input.ql easier.
|
||||
* @kind problem
|
||||
* @problem.severity error
|
||||
* @severity error
|
||||
* @id javascript/codeql-action/inconsistent-action-input
|
||||
*/
|
||||
|
||||
@@ -15,7 +15,9 @@ import javascript
|
||||
*/
|
||||
class ActionDeclaration extends File {
|
||||
ActionDeclaration() {
|
||||
getRelativePath().matches("%/action.yml")
|
||||
getRelativePath().matches("%/action.yml") and
|
||||
// Ignore internal Actions
|
||||
not getRelativePath().matches(".github/actions/%")
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -25,19 +27,19 @@ class ActionDeclaration extends File {
|
||||
result = getRelativePath().regexpCapture("(.*)/action.yml", 1)
|
||||
}
|
||||
|
||||
YAMLDocument getRootNode() {
|
||||
YamlDocument getRootNode() {
|
||||
result.getFile() = this
|
||||
}
|
||||
|
||||
YAMLValue getInput(string inputName) {
|
||||
result = getRootNode().(YAMLMapping).lookup("inputs").(YAMLMapping).lookup(inputName)
|
||||
YamlValue getInput(string inputName) {
|
||||
result = getRootNode().(YamlMapping).lookup("inputs").(YamlMapping).lookup(inputName)
|
||||
}
|
||||
}
|
||||
|
||||
predicate areNotEquivalent(YAMLValue x, YAMLValue y) {
|
||||
predicate areNotEquivalent(YamlValue x, YamlValue y) {
|
||||
x.getTag() != y.getTag()
|
||||
or
|
||||
x.(YAMLScalar).getValue() != y.(YAMLScalar).getValue()
|
||||
x.(YamlScalar).getValue() != y.(YamlScalar).getValue()
|
||||
or
|
||||
x.getNumChild() != y.getNumChild()
|
||||
or
|
||||
|
||||
@@ -214,6 +214,7 @@ test("initializeEnvironment", (t) => {
|
||||
});
|
||||
|
||||
test("isAnalyzingDefaultBranch()", async (t) => {
|
||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "true";
|
||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "false";
|
||||
@@ -264,14 +265,3 @@ test("isAnalyzingDefaultBranch()", async (t) => {
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("workflowEventName()", async (t) => {
|
||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||
t.deepEqual(actionsutil.workflowEventName(), "push");
|
||||
|
||||
process.env["GITHUB_EVENT_NAME"] = "dynamic";
|
||||
t.deepEqual(actionsutil.workflowEventName(), "dynamic");
|
||||
|
||||
process.env["CODESCANNING_EVENT_NAME"] = "push";
|
||||
t.deepEqual(actionsutil.workflowEventName(), "push");
|
||||
});
|
||||
|
||||
@@ -108,7 +108,7 @@ export const getCommitOid = async function (
|
||||
export const determineMergeBaseCommitOid = async function (): Promise<
|
||||
string | undefined
|
||||
> {
|
||||
if (workflowEventName() !== "pull_request") {
|
||||
if (getWorkflowEventName() !== "pull_request") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -168,7 +168,7 @@ export const determineMergeBaseCommitOid = async function (): Promise<
|
||||
*
|
||||
* This will combine the workflow path and current job name.
|
||||
* Computing this the first time requires making requests to
|
||||
* the github API, but after that the result will be cached.
|
||||
* the GitHub API, but after that the result will be cached.
|
||||
*/
|
||||
export async function getAnalysisKey(): Promise<string> {
|
||||
const analysisKeyEnvVar = "CODEQL_ACTION_ANALYSIS_KEY";
|
||||
@@ -540,7 +540,10 @@ export async function sendStatusReport<S extends StatusReportBase>(
|
||||
if (isHTTPError(e)) {
|
||||
switch (e.status) {
|
||||
case 403:
|
||||
if (workflowIsTriggeredByPushEvent() && isDependabotActor()) {
|
||||
if (
|
||||
getWorkflowEventName() === "push" &&
|
||||
process.env["GITHUB_ACTOR"] === "dependabot[bot]"
|
||||
) {
|
||||
core.setFailed(
|
||||
'Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
||||
"Uploading Code Scanning results requires write access. " +
|
||||
@@ -576,30 +579,19 @@ export async function sendStatusReport<S extends StatusReportBase>(
|
||||
}
|
||||
}
|
||||
|
||||
export function workflowEventName() {
|
||||
// If the original event is dynamic CODESCANNING_EVENT_NAME will contain the right info (push/pull_request)
|
||||
if (process.env["GITHUB_EVENT_NAME"] === "dynamic") {
|
||||
const value = process.env["CODESCANNING_EVENT_NAME"];
|
||||
if (value === undefined || value.length === 0) {
|
||||
return process.env["GITHUB_EVENT_NAME"];
|
||||
}
|
||||
return value;
|
||||
}
|
||||
return process.env["GITHUB_EVENT_NAME"];
|
||||
/**
|
||||
* Returns the name of the event that triggered this workflow.
|
||||
*
|
||||
* This will be "dynamic" for default setup workflow runs.
|
||||
*/
|
||||
export function getWorkflowEventName() {
|
||||
return getRequiredEnvParam("GITHUB_EVENT_NAME");
|
||||
}
|
||||
|
||||
// Was the workflow run triggered by a `push` event, for example as opposed to a `pull_request` event.
|
||||
function workflowIsTriggeredByPushEvent() {
|
||||
return workflowEventName() === "push";
|
||||
}
|
||||
|
||||
// Is dependabot the actor that triggered the current workflow run.
|
||||
function isDependabotActor() {
|
||||
return process.env["GITHUB_ACTOR"] === "dependabot[bot]";
|
||||
}
|
||||
|
||||
// Is the current action executing a local copy (i.e. we're running a workflow on the codeql-action repo itself)
|
||||
// as opposed to running a remote action (i.e. when another repo references us)
|
||||
/**
|
||||
* Returns whether the current workflow is executing a local copy of the Action, e.g. we're running
|
||||
* a workflow on the codeql-action repo itself.
|
||||
*/
|
||||
export function isRunningLocalAction(): boolean {
|
||||
const relativeScriptPath = getRelativeScriptPath();
|
||||
return (
|
||||
@@ -607,15 +599,18 @@ export function isRunningLocalAction(): boolean {
|
||||
);
|
||||
}
|
||||
|
||||
// Get the location where the action is running from.
|
||||
// This can be used to get the actions name or tell if we're running a local action.
|
||||
/**
|
||||
* Get the location where the Action is running from.
|
||||
*
|
||||
* This can be used to get the Action's name or tell if we're running a local Action.
|
||||
*/
|
||||
export function getRelativeScriptPath(): string {
|
||||
const runnerTemp = getRequiredEnvParam("RUNNER_TEMP");
|
||||
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
|
||||
return path.relative(actionsDirectory, __filename);
|
||||
}
|
||||
|
||||
// Reads the contents of GITHUB_EVENT_PATH as a JSON object
|
||||
/** Returns the contents of `GITHUB_EVENT_PATH` as a JSON object. */
|
||||
function getWorkflowEvent(): any {
|
||||
const eventJsonFile = getRequiredEnvParam("GITHUB_EVENT_PATH");
|
||||
try {
|
||||
@@ -631,10 +626,13 @@ function removeRefsHeadsPrefix(ref: string): string {
|
||||
return ref.startsWith("refs/heads/") ? ref.slice("refs/heads/".length) : ref;
|
||||
}
|
||||
|
||||
// Returns whether we are analyzing the default branch for the repository.
|
||||
// For cases where the repository information might not be available (e.g.,
|
||||
// dynamic workflows), this can be forced by the environment variable
|
||||
// CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH.
|
||||
/**
|
||||
* Returns whether we are analyzing the default branch for the repository.
|
||||
*
|
||||
* This first checks the environment variable `CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH`. This
|
||||
* environment variable can be set in cases where repository information might not be available, for
|
||||
* example dynamic workflows.
|
||||
*/
|
||||
export async function isAnalyzingDefaultBranch(): Promise<boolean> {
|
||||
if (process.env.CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH === "true") {
|
||||
return true;
|
||||
@@ -647,8 +645,8 @@ export async function isAnalyzingDefaultBranch(): Promise<boolean> {
|
||||
const event = getWorkflowEvent();
|
||||
let defaultBranch = event?.repository?.default_branch;
|
||||
|
||||
if (process.env.GITHUB_EVENT_NAME === "schedule") {
|
||||
defaultBranch = removeRefsHeadsPrefix(getRequiredEnvParam("GITHUB_REF"));
|
||||
if (getWorkflowEventName() === "schedule") {
|
||||
defaultBranch = removeRefsHeadsPrefix(getRefFromEnv());
|
||||
}
|
||||
|
||||
return currentRef === defaultBranch;
|
||||
@@ -687,7 +685,10 @@ export async function printDebugLogs(config: Config) {
|
||||
|
||||
export type UploadKind = "always" | "failure-only" | "never";
|
||||
|
||||
// Parses the `upload` input into an `UploadKind`, converting unspecified and deprecated upload inputs appropriately.
|
||||
/**
|
||||
* Parses the `upload` input into an `UploadKind`, converting unspecified and deprecated upload
|
||||
* inputs appropriately.
|
||||
*/
|
||||
export function getUploadValue(input: string | undefined): UploadKind {
|
||||
switch (input) {
|
||||
case undefined:
|
||||
|
||||
@@ -352,7 +352,7 @@ export async function runQueries(
|
||||
logger.endGroup();
|
||||
logger.info(analysisSummary);
|
||||
}
|
||||
logger.info(await runPrintLinesOfCode(language));
|
||||
await runPrintLinesOfCode(language);
|
||||
} catch (e) {
|
||||
logger.info(String(e));
|
||||
if (e instanceof Error) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20230428",
|
||||
"cliVersion": "2.13.1",
|
||||
"priorBundleVersion": "codeql-bundle-20230414",
|
||||
"priorCliVersion": "2.13.0"
|
||||
"bundleVersion": "codeql-bundle-20230524",
|
||||
"cliVersion": "2.13.3",
|
||||
"priorBundleVersion": "codeql-bundle-20230428",
|
||||
"priorCliVersion": "2.13.1"
|
||||
}
|
||||
|
||||
@@ -201,7 +201,14 @@ export function resolveUriToFile(
|
||||
logger.debug(`Ignoring location as URI "${location.uri}" is invalid`);
|
||||
return undefined;
|
||||
}
|
||||
let uri = decodeURIComponent(location.uri as string);
|
||||
|
||||
let uri: string;
|
||||
try {
|
||||
uri = decodeURIComponent(location.uri as string);
|
||||
} catch (e: any) {
|
||||
logger.debug(`Ignoring location as URI "${location.uri}" is invalid`);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Remove a file scheme, and abort if the scheme is anything else
|
||||
const fileUriPrefix = "file://";
|
||||
|
||||
@@ -171,9 +171,10 @@ export async function run(
|
||||
process.env["CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF"] === "true" &&
|
||||
!uploadFailedSarifResult.raw_upload_size_bytes
|
||||
) {
|
||||
const error = JSON.stringify(uploadFailedSarifResult);
|
||||
throw new Error(
|
||||
"Expected to upload a failed SARIF file for this CodeQL code scanning run, " +
|
||||
`but the result was instead ${uploadFailedSarifResult}.`
|
||||
`but the result was instead ${error}.`
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
42
src/testdata/with-invalid-uri.sarif
vendored
Normal file
42
src/testdata/with-invalid-uri.sarif
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
{
|
||||
"$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
|
||||
"version": "2.1.0",
|
||||
"runs": [
|
||||
{
|
||||
"tool": {
|
||||
"driver": {
|
||||
"name": "LGTM.com",
|
||||
"organization": "Semmle",
|
||||
"version": "1.24.0-SNAPSHOT",
|
||||
"rules": []
|
||||
}
|
||||
},
|
||||
"results" : [ {
|
||||
"ruleId" : "js/unused-local-variable",
|
||||
"ruleIndex" : 0,
|
||||
"message" : {
|
||||
"text" : "Unused variable foo."
|
||||
},
|
||||
"locations" : [ {
|
||||
"physicalLocation" : {
|
||||
"artifactLocation" : {
|
||||
"uri" : "not a valid URI",
|
||||
"uriBaseId" : "%SRCROOT%",
|
||||
"index" : 0
|
||||
},
|
||||
"region" : {
|
||||
"startLine" : 2,
|
||||
"startColumn" : 7,
|
||||
"endColumn" : 10
|
||||
}
|
||||
}
|
||||
} ]
|
||||
} ],
|
||||
"columnKind": "utf16CodeUnits",
|
||||
"properties": {
|
||||
"semmle.formatSpecifier": "2.1.0",
|
||||
"semmle.sourceLanguage": "java"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -99,7 +99,7 @@ export async function downloadTrapCaches(
|
||||
let baseSha = "unknown";
|
||||
const eventPath = process.env.GITHUB_EVENT_PATH;
|
||||
if (
|
||||
actionsUtil.workflowEventName() === "pull_request" &&
|
||||
actionsUtil.getWorkflowEventName() === "pull_request" &&
|
||||
eventPath !== undefined
|
||||
) {
|
||||
const event = JSON.parse(fs.readFileSync(path.resolve(eventPath), "utf-8"));
|
||||
|
||||
@@ -360,6 +360,23 @@ test("pruneInvalidResults", (t) => {
|
||||
t.assert(loggedMessages[0].includes("Pruned 2 results"));
|
||||
});
|
||||
|
||||
test("accept results with invalid artifactLocation.uri value", (t) => {
|
||||
const loggedMessages: string[] = [];
|
||||
const mockLogger = {
|
||||
info: (message: string) => {
|
||||
loggedMessages.push(message);
|
||||
},
|
||||
} as Logger;
|
||||
|
||||
const sarifFile = `${__dirname}/../src/testdata/with-invalid-uri.sarif`;
|
||||
uploadLib.validateSarifFileSchema(sarifFile, mockLogger);
|
||||
|
||||
t.deepEqual(loggedMessages.length, 1);
|
||||
t.deepEqual(
|
||||
loggedMessages[0],
|
||||
"Warning: 'not a valid URI' is not a valid URI in 'instance.runs[0].results[0].locations[0].physicalLocation.artifactLocation.uri'."
|
||||
);
|
||||
});
|
||||
const affectedCodeQLVersion = {
|
||||
driver: {
|
||||
name: "CodeQL",
|
||||
|
||||
@@ -228,9 +228,24 @@ export function validateSarifFileSchema(sarifFilePath: string, logger: Logger) {
|
||||
const schema = require("../src/sarif-schema-2.1.0.json") as jsonschema.Schema;
|
||||
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
if (!result.valid) {
|
||||
// Filter errors related to invalid URIs in the artifactLocation field as this
|
||||
// is a breaking change. See https://github.com/github/codeql-action/issues/1703
|
||||
const errors = (result.errors || []).filter(
|
||||
(err) => err.argument !== "uri-reference"
|
||||
);
|
||||
const warnings = (result.errors || []).filter(
|
||||
(err) => err.argument === "uri-reference"
|
||||
);
|
||||
|
||||
for (const warning of warnings) {
|
||||
logger.info(
|
||||
`Warning: '${warning.instance}' is not a valid URI in '${warning.property}'.`
|
||||
);
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
// Output the more verbose error messages in groups as these may be very large.
|
||||
for (const error of result.errors) {
|
||||
for (const error of errors) {
|
||||
logger.startGroup(`Error details: ${error.stack}`);
|
||||
logger.info(JSON.stringify(error, null, 2));
|
||||
logger.endGroup();
|
||||
@@ -238,7 +253,7 @@ export function validateSarifFileSchema(sarifFilePath: string, logger: Logger) {
|
||||
|
||||
// Set the main error message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const sarifErrors = result.errors.map((e) => `- ${e.stack}`);
|
||||
const sarifErrors = errors.map((e) => `- ${e.stack}`);
|
||||
throw new Error(
|
||||
`Unable to upload "${sarifFilePath}" as it is not valid SARIF:\n${sarifErrors.join(
|
||||
"\n"
|
||||
@@ -278,7 +293,7 @@ export function buildPayload(
|
||||
base_sha: undefined as undefined | string,
|
||||
};
|
||||
|
||||
if (actionsUtil.workflowEventName() === "pull_request") {
|
||||
if (actionsUtil.getWorkflowEventName() === "pull_request") {
|
||||
if (
|
||||
commitOid === util.getRequiredEnvParam("GITHUB_SHA") &&
|
||||
mergeBaseCommitOid
|
||||
|
||||
31
src/util.ts
31
src/util.ts
@@ -818,13 +818,6 @@ export function fixInvalidNotifications(
|
||||
sarif: SarifFile,
|
||||
logger: Logger
|
||||
): SarifFile {
|
||||
if (process.env[CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX] === "true") {
|
||||
logger.info(
|
||||
"SARIF notification object duplicate location fix disabled by the " +
|
||||
`${CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX} environment variable.`
|
||||
);
|
||||
return sarif;
|
||||
}
|
||||
if (!Array.isArray(sarif.runs)) {
|
||||
return sarif;
|
||||
}
|
||||
@@ -883,14 +876,32 @@ export function fixInvalidNotifications(
|
||||
return newSarif;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes duplicates from the sarif file.
|
||||
*
|
||||
* When `CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX` is set to true, this will
|
||||
* simply rename the input file to the output file. Otherwise, it will parse the
|
||||
* input file as JSON, remove duplicate locations from the SARIF notification
|
||||
* objects, and write the result to the output file.
|
||||
*
|
||||
* For context, see documentation of:
|
||||
* `CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX`. */
|
||||
export function fixInvalidNotificationsInFile(
|
||||
inputPath: string,
|
||||
outputPath: string,
|
||||
logger: Logger
|
||||
): void {
|
||||
let sarif = JSON.parse(fs.readFileSync(inputPath, "utf8")) as SarifFile;
|
||||
sarif = fixInvalidNotifications(sarif, logger);
|
||||
fs.writeFileSync(outputPath, JSON.stringify(sarif));
|
||||
if (process.env[CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX] === "true") {
|
||||
logger.info(
|
||||
"SARIF notification object duplicate location fix disabled by the " +
|
||||
`${CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX} environment variable.`
|
||||
);
|
||||
fs.renameSync(inputPath, outputPath);
|
||||
} else {
|
||||
let sarif = JSON.parse(fs.readFileSync(inputPath, "utf8")) as SarifFile;
|
||||
sarif = fixInvalidNotifications(sarif, logger);
|
||||
fs.writeFileSync(outputPath, JSON.stringify(sarif));
|
||||
}
|
||||
}
|
||||
|
||||
export function wrapError(error: unknown): Error {
|
||||
|
||||
@@ -7,7 +7,7 @@ import * as yaml from "js-yaml";
|
||||
|
||||
import * as api from "./api-client";
|
||||
import { Logger } from "./logging";
|
||||
import { getRequiredEnvParam } from "./util";
|
||||
import { getRequiredEnvParam, isInTestMode } from "./util";
|
||||
|
||||
export interface WorkflowJobStep {
|
||||
name?: string;
|
||||
@@ -389,7 +389,11 @@ function getInputOrThrow(
|
||||
* This allows us to test workflow parsing functionality as a CodeQL Action PR check.
|
||||
*/
|
||||
function getAnalyzeActionName() {
|
||||
if (getRequiredEnvParam("GITHUB_REPOSITORY") === "github/codeql-action") {
|
||||
if (
|
||||
isInTestMode() ||
|
||||
process.env["CODEQL_ACTION_TESTING_ENVIRONMENT"] ===
|
||||
"codeql-action-pr-checks"
|
||||
) {
|
||||
return "./analyze";
|
||||
} else {
|
||||
return "github/codeql-action/analyze";
|
||||
|
||||
Reference in New Issue
Block a user