mirror of
https://github.com/github/codeql-action.git
synced 2025-12-06 15:58:06 +08:00
Compare commits
94 Commits
codeql-bun
...
codeql-bun
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3407610120 | ||
|
|
5bf0e05e7b | ||
|
|
ea41240bed | ||
|
|
0a7d6c210b | ||
|
|
1e7001479f | ||
|
|
5b6e617dc0 | ||
|
|
adc78ec946 | ||
|
|
d43af810ec | ||
|
|
dd5146d090 | ||
|
|
3239a39f73 | ||
|
|
78760076e3 | ||
|
|
985eb4f8f2 | ||
|
|
2012e4b9c6 | ||
|
|
48ab28a6f5 | ||
|
|
4946b765de | ||
|
|
d01b25e645 | ||
|
|
7d6d36ce5f | ||
|
|
b58f4471c8 | ||
|
|
64cc90bcd4 | ||
|
|
d8f8eca6c5 | ||
|
|
562042d742 | ||
|
|
beed6ff2e9 | ||
|
|
5f0a4d3e67 | ||
|
|
34cb19c91b | ||
|
|
5fc1bccccc | ||
|
|
9d599696ef | ||
|
|
38fd34c412 | ||
|
|
0de662d785 | ||
|
|
5a8fab3748 | ||
|
|
04cce6be94 | ||
|
|
706ef5896a | ||
|
|
62192f8dab | ||
|
|
dd75594246 | ||
|
|
ac43a2e599 | ||
|
|
48fe0d8fb1 | ||
|
|
df409f7d92 | ||
|
|
feca44ddf6 | ||
|
|
7972a42f3d | ||
|
|
44bf16d3a1 | ||
|
|
f124ad0e7e | ||
|
|
92753708cf | ||
|
|
a059a7a0ee | ||
|
|
8a93837afd | ||
|
|
90a270091b | ||
|
|
9cfbef4bda | ||
|
|
9a8645df7a | ||
|
|
78d0136ff7 | ||
|
|
c4bbe15558 | ||
|
|
47dd68ef62 | ||
|
|
849b60e504 | ||
|
|
f327a84ce5 | ||
|
|
1e5b59114b | ||
|
|
9e8cd42adc | ||
|
|
88bcf64e02 | ||
|
|
932369573c | ||
|
|
4d64ab66ad | ||
|
|
c6454d58c8 | ||
|
|
300d251cd6 | ||
|
|
b2c41ecd38 | ||
|
|
b0cd76b9fb | ||
|
|
dfed55caa4 | ||
|
|
417bb84fbc | ||
|
|
18cf30d984 | ||
|
|
9fd5c24857 | ||
|
|
babb554ede | ||
|
|
0a5a1c0d75 | ||
|
|
4c20d4f58a | ||
|
|
51e71f81a0 | ||
|
|
3951a82275 | ||
|
|
f9b0c1f2ea | ||
|
|
57a28594b9 | ||
|
|
3e10d3452b | ||
|
|
dd4aa40016 | ||
|
|
70aac4e018 | ||
|
|
c901aeec28 | ||
|
|
89757925c7 | ||
|
|
d853bec339 | ||
|
|
aab34601c1 | ||
|
|
0d3e640d0c | ||
|
|
b13515409a | ||
|
|
a89fbc80a2 | ||
|
|
3d09005851 | ||
|
|
8ba1205033 | ||
|
|
182c5e787f | ||
|
|
6f9e628e6f | ||
|
|
b706e37699 | ||
|
|
b4bc093eca | ||
|
|
9c48c8bf18 | ||
|
|
0bb80075c0 | ||
|
|
25488cc9b0 | ||
|
|
87548a27e8 | ||
|
|
78be2f1333 | ||
|
|
8f2cb3a931 | ||
|
|
0dc76a996d |
2
.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml
generated
vendored
2
.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml
generated
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.14.6
|
||||
version: default
|
||||
name: 'Go: diagnostic when Go is changed after init step'
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
2
.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml
generated
vendored
2
.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml
generated
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.14.6
|
||||
version: default
|
||||
name: 'Go: diagnostic when `file` is not installed'
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
2
.github/workflows/__go-indirect-tracing-workaround.yml
generated
vendored
2
.github/workflows/__go-indirect-tracing-workaround.yml
generated
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.14.6
|
||||
version: default
|
||||
name: 'Go: workaround for indirect tracing'
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
8
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
8
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
@@ -27,10 +27,6 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.14.6
|
||||
- os: macos-13
|
||||
version: stable-v2.14.6
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.15.5
|
||||
- os: macos-latest
|
||||
@@ -47,6 +43,10 @@ jobs:
|
||||
version: stable-v2.18.4
|
||||
- os: macos-latest
|
||||
version: stable-v2.18.4
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.19.4
|
||||
- os: macos-latest
|
||||
version: stable-v2.19.4
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
- os: macos-latest
|
||||
|
||||
8
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
8
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
@@ -27,10 +27,6 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.14.6
|
||||
- os: macos-13
|
||||
version: stable-v2.14.6
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.15.5
|
||||
- os: macos-latest
|
||||
@@ -47,6 +43,10 @@ jobs:
|
||||
version: stable-v2.18.4
|
||||
- os: macos-latest
|
||||
version: stable-v2.18.4
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.19.4
|
||||
- os: macos-latest
|
||||
version: stable-v2.19.4
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
- os: macos-latest
|
||||
|
||||
8
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
8
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
@@ -27,10 +27,6 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.14.6
|
||||
- os: macos-13
|
||||
version: stable-v2.14.6
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.15.5
|
||||
- os: macos-latest
|
||||
@@ -47,6 +43,10 @@ jobs:
|
||||
version: stable-v2.18.4
|
||||
- os: macos-latest
|
||||
version: stable-v2.18.4
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.19.4
|
||||
- os: macos-latest
|
||||
version: stable-v2.19.4
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
- os: macos-latest
|
||||
|
||||
20
.github/workflows/__multi-language-autodetect.yml
generated
vendored
20
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -27,10 +27,6 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-13
|
||||
version: stable-v2.14.6
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.14.6
|
||||
- os: macos-latest
|
||||
version: stable-v2.15.5
|
||||
- os: ubuntu-latest
|
||||
@@ -47,6 +43,10 @@ jobs:
|
||||
version: stable-v2.18.4
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.18.4
|
||||
- os: macos-latest
|
||||
version: stable-v2.19.4
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.19.4
|
||||
- os: macos-latest
|
||||
version: default
|
||||
- os: ubuntu-latest
|
||||
@@ -88,15 +88,12 @@ jobs:
|
||||
id: init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
# Swift is not supported on Ubuntu or codeql 2.14 so we manually exclude it from the list here
|
||||
languages: ${{ (runner.os == 'Linux' || (runner.os == 'macOS' && matrix.version
|
||||
== 'stable-v2.14.6')) && 'cpp,csharp,go,java,javascript,python,ruby' ||
|
||||
'' }}
|
||||
languages: ${{ runner.os == 'Linux' && 'cpp,csharp,go,java,javascript,python,ruby'
|
||||
|| '' }}
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- uses: ./../action/.github/actions/setup-swift
|
||||
# Exclude macos on v2.14.6 since we can not longer run swift on ARM runners
|
||||
if: runner.os == 'macOS' && matrix.version != 'stable-v2.14.6'
|
||||
if: runner.os == 'macOS'
|
||||
with:
|
||||
codeql-path: ${{ steps.init.outputs.codeql-path }}
|
||||
|
||||
@@ -149,8 +146,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Check language autodetect for Swift on macOS
|
||||
# Exclude macos on v2.14.6 since we can not longer run swift on ARM runners
|
||||
if: runner.os == 'macOS' && matrix.version != 'stable-v2.14.6'
|
||||
if: runner.os == 'macOS'
|
||||
shell: bash
|
||||
run: |
|
||||
SWIFT_DB=${{ fromJson(steps.analysis.outputs.db-locations).swift }}
|
||||
|
||||
80
.github/workflows/__start-proxy.yml
generated
vendored
Normal file
80
.github/workflows/__start-proxy.yml
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Start proxy
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v*
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
start-proxy:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: linked
|
||||
- os: macos-latest
|
||||
version: linked
|
||||
- os: windows-latest
|
||||
version: linked
|
||||
name: Start proxy
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Setup Python on macOS
|
||||
uses: actions/setup-python@v5
|
||||
if: runner.os == 'macOS' && matrix.version == 'stable-v2.14.6'
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/actions/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: csharp
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- name: Setup proxy for registries
|
||||
id: proxy
|
||||
uses: ./../action/start-proxy
|
||||
with:
|
||||
registry_secrets: '[{ "type": "nuget_feed", "url": "https://api.nuget.org/v3/index.json"
|
||||
}]'
|
||||
|
||||
- name: Print proxy outputs
|
||||
run: |
|
||||
echo "${{ steps.proxy.outputs.proxy_host }}"
|
||||
echo "${{ steps.proxy.outputs.proxy_port }}"
|
||||
echo "${{ steps.proxy.outputs.proxy_urls }}"
|
||||
|
||||
- name: Fail if proxy outputs are not set
|
||||
if: (!steps.proxy.outputs.proxy_host) || (!steps.proxy.outputs.proxy_port)
|
||||
|| (!steps.proxy.outputs.proxy_ca_certificate) || (!steps.proxy.outputs.proxy_urls)
|
||||
run: exit 1
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
15
.github/workflows/__test-proxy.yml
generated
vendored
15
.github/workflows/__test-proxy.yml
generated
vendored
@@ -29,6 +29,8 @@ jobs:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: linked
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Proxy test
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -36,6 +38,18 @@ jobs:
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
# These steps are required to initialise the `gh` cli in a container that doesn't
|
||||
# come pre-installed with it. The reason for that is that this is later
|
||||
# needed by the `prepare-test` workflow to find the latest release of CodeQL.
|
||||
- name: Set up GitHub CLI
|
||||
run: |
|
||||
apt update
|
||||
apt install -y curl libreadline8 gnupg2 software-properties-common zstd
|
||||
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
apt-key add /usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
apt-add-repository https://cli.github.com/packages
|
||||
apt install -y gh
|
||||
env: {}
|
||||
- name: Setup Python on macOS
|
||||
uses: actions/setup-python@v5
|
||||
if: runner.os == 'macOS' && matrix.version == 'stable-v2.14.6'
|
||||
@@ -60,7 +74,6 @@ jobs:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
container:
|
||||
image: ubuntu:22.04
|
||||
options: --dns 127.0.0.1
|
||||
services:
|
||||
squid-proxy:
|
||||
image: ubuntu/squid:latest
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
name: PR Check - Debug artifacts after failure
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODEQL_ACTION_ARTIFACT_V4_UPGRADE: true
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
||||
99
.github/workflows/debug-artifacts-legacy.yml
vendored
99
.github/workflows/debug-artifacts-legacy.yml
vendored
@@ -1,99 +0,0 @@
|
||||
# Checks logs, SARIF, and database bundle debug artifacts exist and are accessible
|
||||
# with download-artifact@v3 when CODEQL_ACTION_ARTIFACT_V4_UPGRADE is set to false.
|
||||
name: PR Check - Debug artifact upload using artifact@v2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODEQL_ACTION_ARTIFACT_V4_UPGRADE: false
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v*
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
upload-artifacts:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version:
|
||||
- stable-v2.14.6
|
||||
- stable-v2.15.5
|
||||
- stable-v2.16.6
|
||||
- stable-v2.17.6
|
||||
- stable-v2.18.4
|
||||
- default
|
||||
- linked
|
||||
- nightly-latest
|
||||
name: Upload debug artifacts
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/actions/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
debug: true
|
||||
debug-artifact-name: my-debug-artifacts
|
||||
debug-database-name: my-db
|
||||
# We manually exclude Swift from the languages list here, as it is not supported on Ubuntu
|
||||
languages: cpp,csharp,go,java,javascript,python,ruby
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
download-and-check-artifacts:
|
||||
name: Download and check debug artifacts
|
||||
needs: upload-artifacts
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
- name: Check expected artifacts exist
|
||||
shell: bash
|
||||
run: |
|
||||
VERSIONS="stable-v2.14.6 stable-v2.15.5 stable-v2.16.6 stable-v2.17.6 stable-v2.18.4 default linked nightly-latest"
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
for version in $VERSIONS; do
|
||||
pushd "./my-debug-artifacts-${version//./}"
|
||||
echo "Artifacts from version $version:"
|
||||
for language in $LANGUAGES; do
|
||||
echo "- Checking $language"
|
||||
if [[ ! -f "$language.sarif" ]] ; then
|
||||
echo "Missing a SARIF file for $language"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -f "my-db-$language.zip" ]] ; then
|
||||
echo "Missing a database bundle for $language"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -d "$language/log" ]] ; then
|
||||
echo "Missing logs for $language"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
popd
|
||||
done
|
||||
env:
|
||||
GO111MODULE: auto
|
||||
5
.github/workflows/debug-artifacts.yml
vendored
5
.github/workflows/debug-artifacts.yml
vendored
@@ -2,7 +2,6 @@
|
||||
name: PR Check - Debug artifact upload
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODEQL_ACTION_ARTIFACT_V4_UPGRADE: true
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -23,11 +22,11 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version:
|
||||
- stable-v2.14.6
|
||||
- stable-v2.15.5
|
||||
- stable-v2.16.6
|
||||
- stable-v2.17.6
|
||||
- stable-v2.18.4
|
||||
- stable-v2.19.4
|
||||
- default
|
||||
- linked
|
||||
- nightly-latest
|
||||
@@ -72,7 +71,7 @@ jobs:
|
||||
- name: Check expected artifacts exist
|
||||
shell: bash
|
||||
run: |
|
||||
VERSIONS="stable-v2.14.6 stable-v2.15.5 stable-v2.16.6 stable-v2.17.6 stable-v2.18.4 default linked nightly-latest"
|
||||
VERSIONS="stable-v2.15.5 stable-v2.16.6 stable-v2.17.6 stable-v2.18.4 stable-v2.19.4 default linked nightly-latest"
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
for version in $VERSIONS; do
|
||||
pushd "./my-debug-artifacts-${version//./}"
|
||||
|
||||
10
.github/workflows/post-release-mergeback.yml
vendored
10
.github/workflows/post-release-mergeback.yml
vendored
@@ -21,6 +21,7 @@ on:
|
||||
jobs:
|
||||
merge-back:
|
||||
runs-on: ubuntu-latest
|
||||
environment: Automation
|
||||
if: github.repository == 'github/codeql-action'
|
||||
env:
|
||||
BASE_BRANCH: "${{ github.event.inputs.baseBranch || 'main' }}"
|
||||
@@ -162,11 +163,18 @@ jobs:
|
||||
--assignee "${GITHUB_ACTOR}" \
|
||||
--draft
|
||||
|
||||
- name: Generate token
|
||||
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.AUTOMATION_APP_ID }}
|
||||
private-key: ${{ secrets.AUTOMATION_PRIVATE_KEY }}
|
||||
|
||||
- name: Create the GitHub release
|
||||
env:
|
||||
PARTIAL_CHANGELOG: "${{ runner.temp }}/partial_changelog.md"
|
||||
VERSION: "${{ steps.getVersion.outputs.version }}"
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
run: |
|
||||
# Do not mark this release as latest. The most recent CLI release must be marked as latest.
|
||||
gh release create \
|
||||
|
||||
2
.github/workflows/update-release-branch.yml
vendored
2
.github/workflows/update-release-branch.yml
vendored
@@ -116,7 +116,7 @@ jobs:
|
||||
TARGET_BRANCH: ${{ matrix.target_branch }}
|
||||
steps:
|
||||
- name: Generate token
|
||||
uses: actions/create-github-app-token@5d869da34e18e7287c1daad50e0b8ea0f506ce69
|
||||
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.AUTOMATION_APP_ID }}
|
||||
|
||||
18
CHANGELOG.md
18
CHANGELOG.md
@@ -6,7 +6,25 @@ Note that the only difference between `v2` and `v3` of the CodeQL Action is the
|
||||
|
||||
## [UNRELEASED]
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 3.28.0 - 20 Dec 2024
|
||||
|
||||
- Bump the minimum CodeQL bundle version to 2.15.5. [#2655](https://github.com/github/codeql-action/pull/2655)
|
||||
- Don't fail in the unusual case that a file is on the search path. [#2660](https://github.com/github/codeql-action/pull/2660).
|
||||
|
||||
## 3.27.9 - 12 Dec 2024
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 3.27.8 - 12 Dec 2024
|
||||
|
||||
- Fixed an issue where streaming the download and extraction of the CodeQL bundle did not respect proxy settings. [#2624](https://github.com/github/codeql-action/pull/2624)
|
||||
|
||||
## 3.27.7 - 10 Dec 2024
|
||||
|
||||
- We are rolling out a change in December 2024 that will extract the CodeQL bundle directly to the toolcache to improve performance. [#2631](https://github.com/github/codeql-action/pull/2631)
|
||||
- Update default CodeQL bundle version to 2.20.0. [#2636](https://github.com/github/codeql-action/pull/2636)
|
||||
|
||||
## 3.27.6 - 03 Dec 2024
|
||||
|
||||
|
||||
@@ -81,9 +81,8 @@ We typically release new minor versions of the CodeQL Action and Bundle when a n
|
||||
| `v3.25.11` | `2.17.6` | Enterprise Server 3.14 | |
|
||||
| `v3.24.11` | `2.16.6` | Enterprise Server 3.13 | |
|
||||
| `v3.22.12` | `2.15.5` | Enterprise Server 3.12 | |
|
||||
| `v2.22.1` | `2.14.6` | Enterprise Server 3.11 | Supports CodeQL Action v3, but did not ship with CodeQL Action v3. For more information, see "[Code scanning: deprecation of CodeQL Action v2](https://github.blog/changelog/2024-01-12-code-scanning-deprecation-of-codeql-action-v2/#users-of-github-enterprise-server-311)." |
|
||||
|
||||
CodeQL Action v2 will stop receiving updates when GHES 3.11 is deprecated.
|
||||
CodeQL Action v2 has stopped receiving updates now that GHES 3.11 is deprecated.
|
||||
|
||||
See the full list of GHES release and deprecation dates at [GitHub Enterprise Server releases](https://docs.github.com/en/enterprise-server/admin/all-releases#releases-of-github-enterprise-server).
|
||||
|
||||
|
||||
323
lib/actions-util.js
generated
323
lib/actions-util.js
generated
@@ -33,15 +33,13 @@ var __importStar = (this && this.__importStar) || (function () {
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.restoreInputs = exports.persistInputs = exports.CommandInvocationError = exports.getFileType = exports.FileCmdNotFoundError = exports.decodeGitFilePath = exports.getGitDiffHunkHeaders = exports.getAllGitMergeBases = exports.gitFetch = exports.deepenGitHistory = exports.determineBaseBranchHeadCommitOid = exports.getCommitOid = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.restoreInputs = exports.persistInputs = exports.CommandInvocationError = exports.getFileType = exports.FileCmdNotFoundError = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.getTemporaryDirectory = getTemporaryDirectory;
|
||||
exports.getRef = getRef;
|
||||
exports.getActionVersion = getActionVersion;
|
||||
exports.getWorkflowEventName = getWorkflowEventName;
|
||||
exports.isRunningLocalAction = isRunningLocalAction;
|
||||
exports.getRelativeScriptPath = getRelativeScriptPath;
|
||||
exports.getWorkflowEvent = getWorkflowEvent;
|
||||
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
|
||||
exports.printDebugLogs = printDebugLogs;
|
||||
exports.getUploadValue = getUploadValue;
|
||||
exports.getWorkflowRunID = getWorkflowRunID;
|
||||
@@ -55,7 +53,7 @@ const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs, @typescript-eslint/no-require-imports
|
||||
const pkg = require("../package.json");
|
||||
@@ -90,297 +88,6 @@ function getTemporaryDirectory() {
|
||||
? value
|
||||
: (0, util_1.getRequiredEnvParam)("RUNNER_TEMP");
|
||||
}
|
||||
async function runGitCommand(checkoutPath, args, customErrorMessage) {
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
core.debug(`Running git command: git ${args.join(" ")}`);
|
||||
try {
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), args, {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
stdout += data.toString();
|
||||
},
|
||||
stderr: (data) => {
|
||||
stderr += data.toString();
|
||||
},
|
||||
},
|
||||
cwd: checkoutPath,
|
||||
}).exec();
|
||||
return stdout;
|
||||
}
|
||||
catch (error) {
|
||||
let reason = stderr;
|
||||
if (stderr.includes("not a git repository")) {
|
||||
reason =
|
||||
"The checkout path provided to the action does not appear to be a git repository.";
|
||||
}
|
||||
core.info(`git call failed. ${customErrorMessage} Error: ${reason}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
const getCommitOid = async function (checkoutPath, ref = "HEAD") {
|
||||
// Try to use git to get the current commit SHA. If that fails then
|
||||
// log but otherwise silently fall back to using the SHA from the environment.
|
||||
// The only time these two values will differ is during analysis of a PR when
|
||||
// the workflow has changed the current commit to the head commit instead of
|
||||
// the merge commit, which must mean that git is available.
|
||||
// Even if this does go wrong, it's not a huge problem for the alerts to
|
||||
// reported on the merge commit.
|
||||
try {
|
||||
const stdout = await runGitCommand(checkoutPath, ["rev-parse", ref], "Continuing with commit SHA from user input or environment.");
|
||||
return stdout.trim();
|
||||
}
|
||||
catch {
|
||||
return (0, exports.getOptionalInput)("sha") || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
}
|
||||
};
|
||||
exports.getCommitOid = getCommitOid;
|
||||
/**
|
||||
* If the action was triggered by a pull request, determine the commit sha at
|
||||
* the head of the base branch, using the merge commit that this workflow analyzes.
|
||||
* Returns undefined if run by other triggers or the base branch commit cannot be
|
||||
* determined.
|
||||
*/
|
||||
const determineBaseBranchHeadCommitOid = async function (checkoutPathOverride) {
|
||||
if (getWorkflowEventName() !== "pull_request") {
|
||||
return undefined;
|
||||
}
|
||||
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
const checkoutPath = checkoutPathOverride ?? (0, exports.getOptionalInput)("checkout_path");
|
||||
try {
|
||||
let commitOid = "";
|
||||
let baseOid = "";
|
||||
let headOid = "";
|
||||
const stdout = await runGitCommand(checkoutPath, ["show", "-s", "--format=raw", mergeSha], "Will calculate the base branch SHA on the server.");
|
||||
for (const data of stdout.split("\n")) {
|
||||
if (data.startsWith("commit ") && commitOid === "") {
|
||||
commitOid = data.substring(7);
|
||||
}
|
||||
else if (data.startsWith("parent ")) {
|
||||
if (baseOid === "") {
|
||||
baseOid = data.substring(7);
|
||||
}
|
||||
else if (headOid === "") {
|
||||
headOid = data.substring(7);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
|
||||
if (commitOid === mergeSha &&
|
||||
headOid.length === 40 &&
|
||||
baseOid.length === 40) {
|
||||
return baseOid;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
catch {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
exports.determineBaseBranchHeadCommitOid = determineBaseBranchHeadCommitOid;
|
||||
/**
|
||||
* Deepen the git history of the given ref by one level. Errors are logged.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
const deepenGitHistory = async function () {
|
||||
try {
|
||||
await runGitCommand((0, exports.getOptionalInput)("checkout_path"), ["fetch", "--no-tags", "--deepen=1"], "Cannot deepen the shallow repository.");
|
||||
}
|
||||
catch {
|
||||
// Errors are already logged by runGitCommand()
|
||||
}
|
||||
};
|
||||
exports.deepenGitHistory = deepenGitHistory;
|
||||
/**
|
||||
* Fetch the given remote branch. Errors are logged.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
const gitFetch = async function (branch, extraFlags) {
|
||||
try {
|
||||
await runGitCommand((0, exports.getOptionalInput)("checkout_path"), ["fetch", "--no-tags", ...extraFlags, "origin", `${branch}:${branch}`], `Cannot fetch ${branch}.`);
|
||||
}
|
||||
catch {
|
||||
// Errors are already logged by runGitCommand()
|
||||
}
|
||||
};
|
||||
exports.gitFetch = gitFetch;
|
||||
/**
|
||||
* Compute the all merge bases between the given refs. Returns an empty array
|
||||
* if no merge base is found, or if there is an error.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
const getAllGitMergeBases = async function (refs) {
|
||||
try {
|
||||
const stdout = await runGitCommand((0, exports.getOptionalInput)("checkout_path"), ["merge-base", "--all", ...refs], `Cannot get merge base of ${refs}.`);
|
||||
return stdout.trim().split("\n");
|
||||
}
|
||||
catch {
|
||||
return [];
|
||||
}
|
||||
};
|
||||
exports.getAllGitMergeBases = getAllGitMergeBases;
|
||||
/**
|
||||
* Compute the diff hunk headers between the two given refs.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*
|
||||
* @returns an array of diff hunk headers (one element per line), or undefined
|
||||
* if the action was not triggered by a pull request, or if the diff could not
|
||||
* be determined.
|
||||
*/
|
||||
const getGitDiffHunkHeaders = async function (fromRef, toRef) {
|
||||
let stdout = "";
|
||||
try {
|
||||
stdout = await runGitCommand((0, exports.getOptionalInput)("checkout_path"), [
|
||||
"-c",
|
||||
"core.quotePath=false",
|
||||
"diff",
|
||||
"--no-renames",
|
||||
"--irreversible-delete",
|
||||
"-U0",
|
||||
fromRef,
|
||||
toRef,
|
||||
], `Cannot get diff from ${fromRef} to ${toRef}.`);
|
||||
}
|
||||
catch {
|
||||
return undefined;
|
||||
}
|
||||
const headers = [];
|
||||
for (const line of stdout.split("\n")) {
|
||||
if (line.startsWith("--- ") ||
|
||||
line.startsWith("+++ ") ||
|
||||
line.startsWith("@@ ")) {
|
||||
headers.push(line);
|
||||
}
|
||||
}
|
||||
return headers;
|
||||
};
|
||||
exports.getGitDiffHunkHeaders = getGitDiffHunkHeaders;
|
||||
/**
|
||||
* Decode, if necessary, a file path produced by Git. See
|
||||
* https://git-scm.com/docs/git-config#Documentation/git-config.txt-corequotePath
|
||||
* for details on how Git encodes file paths with special characters.
|
||||
*
|
||||
* This function works only for Git output with `core.quotePath=false`.
|
||||
*/
|
||||
const decodeGitFilePath = function (filePath) {
|
||||
if (filePath.startsWith('"') && filePath.endsWith('"')) {
|
||||
filePath = filePath.substring(1, filePath.length - 1);
|
||||
return filePath.replace(/\\([abfnrtv\\"]|[0-7]{1,3})/g, (_match, seq) => {
|
||||
switch (seq[0]) {
|
||||
case "a":
|
||||
return "\x07";
|
||||
case "b":
|
||||
return "\b";
|
||||
case "f":
|
||||
return "\f";
|
||||
case "n":
|
||||
return "\n";
|
||||
case "r":
|
||||
return "\r";
|
||||
case "t":
|
||||
return "\t";
|
||||
case "v":
|
||||
return "\v";
|
||||
case "\\":
|
||||
return "\\";
|
||||
case '"':
|
||||
return '"';
|
||||
default:
|
||||
// Both String.fromCharCode() and String.fromCodePoint() works only
|
||||
// for constructing an entire character at once. If a Unicode
|
||||
// character is encoded as a sequence of escaped bytes, calling these
|
||||
// methods sequentially on the individual byte values would *not*
|
||||
// produce the original multi-byte Unicode character. As a result,
|
||||
// this implementation works only with the Git option core.quotePath
|
||||
// set to false.
|
||||
return String.fromCharCode(parseInt(seq, 8));
|
||||
}
|
||||
});
|
||||
}
|
||||
return filePath;
|
||||
};
|
||||
exports.decodeGitFilePath = decodeGitFilePath;
|
||||
/**
|
||||
* Get the ref currently being analyzed.
|
||||
*/
|
||||
async function getRef() {
|
||||
// Will be in the form "refs/heads/master" on a push event
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const refInput = (0, exports.getOptionalInput)("ref");
|
||||
const shaInput = (0, exports.getOptionalInput)("sha");
|
||||
const checkoutPath = (0, exports.getOptionalInput)("checkout_path") ||
|
||||
(0, exports.getOptionalInput)("source-root") ||
|
||||
(0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE");
|
||||
const hasRefInput = !!refInput;
|
||||
const hasShaInput = !!shaInput;
|
||||
// If one of 'ref' or 'sha' are provided, both are required
|
||||
if ((hasRefInput || hasShaInput) && !(hasRefInput && hasShaInput)) {
|
||||
throw new util_1.ConfigurationError("Both 'ref' and 'sha' are required if one of them is provided.");
|
||||
}
|
||||
const ref = refInput || getRefFromEnv();
|
||||
const sha = shaInput || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
// If the ref is a user-provided input, we have to skip logic
|
||||
// and assume that it is really where they want to upload the results.
|
||||
if (refInput) {
|
||||
return refInput;
|
||||
}
|
||||
// For pull request refs we want to detect whether the workflow
|
||||
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
||||
// than the 'merge' ref. If so, we want to convert the ref that
|
||||
// we report back.
|
||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||
if (!pull_ref_regex.test(ref)) {
|
||||
return ref;
|
||||
}
|
||||
const head = await (0, exports.getCommitOid)(checkoutPath, "HEAD");
|
||||
// in actions/checkout@v2+ we can check if git rev-parse HEAD == GITHUB_SHA
|
||||
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||
// using GITHUB_REF. There is a subtle race condition where
|
||||
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
||||
// git rev-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||
const hasChangedRef = sha !== head &&
|
||||
(await (0, exports.getCommitOid)(checkoutPath, ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !== head;
|
||||
if (hasChangedRef) {
|
||||
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
||||
core.debug(`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`);
|
||||
return newRef;
|
||||
}
|
||||
else {
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
function getRefFromEnv() {
|
||||
// To workaround a limitation of Actions dynamic workflows not setting
|
||||
// the GITHUB_REF in some cases, we accept also the ref within the
|
||||
// CODE_SCANNING_REF variable. When possible, however, we prefer to use
|
||||
// the GITHUB_REF as that is a protected variable and cannot be overwritten.
|
||||
let refEnv;
|
||||
try {
|
||||
refEnv = (0, util_1.getRequiredEnvParam)("GITHUB_REF");
|
||||
}
|
||||
catch (e) {
|
||||
// If the GITHUB_REF is not set, we try to rescue by getting the
|
||||
// CODE_SCANNING_REF.
|
||||
const maybeRef = process.env["CODE_SCANNING_REF"];
|
||||
if (maybeRef === undefined || maybeRef.length === 0) {
|
||||
throw e;
|
||||
}
|
||||
refEnv = maybeRef;
|
||||
}
|
||||
return refEnv;
|
||||
}
|
||||
function getActionVersion() {
|
||||
return pkg.version;
|
||||
}
|
||||
@@ -420,30 +127,6 @@ function getWorkflowEvent() {
|
||||
throw new Error(`Unable to read workflow event JSON from ${eventJsonFile}: ${e}`);
|
||||
}
|
||||
}
|
||||
function removeRefsHeadsPrefix(ref) {
|
||||
return ref.startsWith("refs/heads/") ? ref.slice("refs/heads/".length) : ref;
|
||||
}
|
||||
/**
|
||||
* Returns whether we are analyzing the default branch for the repository.
|
||||
*
|
||||
* This first checks the environment variable `CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH`. This
|
||||
* environment variable can be set in cases where repository information might not be available, for
|
||||
* example dynamic workflows.
|
||||
*/
|
||||
async function isAnalyzingDefaultBranch() {
|
||||
if (process.env.CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH === "true") {
|
||||
return true;
|
||||
}
|
||||
// Get the current ref and trim and refs/heads/ prefix
|
||||
let currentRef = await getRef();
|
||||
currentRef = removeRefsHeadsPrefix(currentRef);
|
||||
const event = getWorkflowEvent();
|
||||
let defaultBranch = event?.repository?.default_branch;
|
||||
if (getWorkflowEventName() === "schedule") {
|
||||
defaultBranch = removeRefsHeadsPrefix(getRefFromEnv());
|
||||
}
|
||||
return currentRef === defaultBranch;
|
||||
}
|
||||
async function printDebugLogs(config) {
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||
@@ -536,7 +219,7 @@ const getFileType = async (filePath) => {
|
||||
let stdout = "";
|
||||
let fileCmdPath;
|
||||
try {
|
||||
fileCmdPath = await safeWhich.safeWhich("file");
|
||||
fileCmdPath = await io.which("file", true);
|
||||
}
|
||||
catch (e) {
|
||||
throw new FileCmdNotFoundError(`The \`file\` program is required, but does not appear to be installed. Please install it: ${e}`);
|
||||
|
||||
File diff suppressed because one or more lines are too long
257
lib/actions-util.test.js
generated
257
lib/actions-util.test.js
generated
@@ -1,169 +1,14 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const environment_1 = require("./environment");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("getRef() throws on the empty string", async (t) => {
|
||||
process.env["GITHUB_REF"] = "";
|
||||
await t.throwsAsync(actionsUtil.getRef);
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||
callback.withArgs("HEAD").resolves(currentSha);
|
||||
const actualRef = await actionsUtil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||
const sha = "a".repeat(40);
|
||||
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
||||
callback.withArgs("HEAD").resolves(sha);
|
||||
const actualRef = await actionsUtil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
|
||||
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
|
||||
const actualRef = await actionsUtil.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
||||
// These values are be ignored
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||
const actualRef = await actionsUtil.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/2/merge");
|
||||
callback.restore();
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns CODE_SCANNING_REF as a fallback for GITHUB_REF", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/HEAD";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["CODE_SCANNING_REF"] = expectedRef;
|
||||
process.env["GITHUB_REF"] = "";
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
const actualRef = await actionsUtil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns GITHUB_REF over CODE_SCANNING_REF if both are provided", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["CODE_SCANNING_REF"] = "refs/pull/1/HEAD";
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
const actualRef = await actionsUtil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
||||
await t.throwsAsync(async () => {
|
||||
await actionsUtil.getRef();
|
||||
}, {
|
||||
instanceOf: Error,
|
||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
});
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() throws an error if only `sha` is provided as an input", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
process.env["GITHUB_WORKSPACE"] = "/tmp";
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
||||
await t.throwsAsync(async () => {
|
||||
await actionsUtil.getRef();
|
||||
}, {
|
||||
instanceOf: Error,
|
||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
});
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("computeAutomationID()", async (t) => {
|
||||
let actualAutomationID = (0, api_client_1.computeAutomationID)(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
||||
@@ -184,106 +29,4 @@ const util_1 = require("./util");
|
||||
(0, util_1.initializeEnvironment)("1.2.3");
|
||||
t.deepEqual(process.env[environment_1.EnvVar.VERSION], "1.2.3");
|
||||
});
|
||||
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
|
||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "true";
|
||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "false";
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const envFile = path.join(tmpDir, "event.json");
|
||||
fs.writeFileSync(envFile, JSON.stringify({
|
||||
repository: {
|
||||
default_branch: "main",
|
||||
},
|
||||
}));
|
||||
process.env["GITHUB_EVENT_PATH"] = envFile;
|
||||
process.env["GITHUB_REF"] = "main";
|
||||
process.env["GITHUB_SHA"] = "1234";
|
||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||
process.env["GITHUB_REF"] = "feature";
|
||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), false);
|
||||
fs.writeFileSync(envFile, JSON.stringify({
|
||||
schedule: "0 0 * * *",
|
||||
}));
|
||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub
|
||||
.withArgs("ref")
|
||||
.resolves("refs/heads/something-else");
|
||||
getAdditionalInputStub
|
||||
.withArgs("sha")
|
||||
.resolves("0000000000000000000000000000000000000000");
|
||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), false);
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("determineBaseBranchHeadCommitOid non-pullrequest", async (t) => {
|
||||
const infoStub = sinon.stub(core, "info");
|
||||
process.env["GITHUB_EVENT_NAME"] = "hucairz";
|
||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||
const result = await actionsUtil.determineBaseBranchHeadCommitOid(__dirname);
|
||||
t.deepEqual(result, undefined);
|
||||
t.deepEqual(0, infoStub.callCount);
|
||||
infoStub.restore();
|
||||
});
|
||||
(0, ava_1.default)("determineBaseBranchHeadCommitOid not git repository", async (t) => {
|
||||
const infoStub = sinon.stub(core, "info");
|
||||
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
await actionsUtil.determineBaseBranchHeadCommitOid(tmpDir);
|
||||
});
|
||||
t.deepEqual(1, infoStub.callCount);
|
||||
t.deepEqual(infoStub.firstCall.args[0], "git call failed. Will calculate the base branch SHA on the server. Error: " +
|
||||
"The checkout path provided to the action does not appear to be a git repository.");
|
||||
infoStub.restore();
|
||||
});
|
||||
(0, ava_1.default)("determineBaseBranchHeadCommitOid other error", async (t) => {
|
||||
const infoStub = sinon.stub(core, "info");
|
||||
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||
const result = await actionsUtil.determineBaseBranchHeadCommitOid(path.join(__dirname, "../../i-dont-exist"));
|
||||
t.deepEqual(result, undefined);
|
||||
t.deepEqual(1, infoStub.callCount);
|
||||
t.assert(infoStub.firstCall.args[0].startsWith("git call failed. Will calculate the base branch SHA on the server. Error: "));
|
||||
t.assert(!infoStub.firstCall.args[0].endsWith("The checkout path provided to the action does not appear to be a git repository."));
|
||||
infoStub.restore();
|
||||
});
|
||||
(0, ava_1.default)("decodeGitFilePath unquoted strings", async (t) => {
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo"), "foo");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo bar"), "foo bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\\\bar"), "foo\\\\bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('foo\\"bar'), 'foo\\"bar');
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\001bar"), "foo\\001bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\abar"), "foo\\abar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\bbar"), "foo\\bbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\fbar"), "foo\\fbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\nbar"), "foo\\nbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\rbar"), "foo\\rbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\tbar"), "foo\\tbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\vbar"), "foo\\vbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("\\a\\b\\f\\n\\r\\t\\v"), "\\a\\b\\f\\n\\r\\t\\v");
|
||||
});
|
||||
(0, ava_1.default)("decodeGitFilePath quoted strings", async (t) => {
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo"'), "foo");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo bar"'), "foo bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\\\bar"'), "foo\\bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\"bar"'), 'foo"bar');
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\001bar"'), "foo\x01bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\abar"'), "foo\x07bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\bbar"'), "foo\bbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\fbar"'), "foo\fbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\nbar"'), "foo\nbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\rbar"'), "foo\rbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\tbar"'), "foo\tbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\vbar"'), "foo\vbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"\\a\\b\\f\\n\\r\\t\\v"'), "\x07\b\f\n\r\t\v");
|
||||
});
|
||||
//# sourceMappingURL=actions-util.test.js.map
|
||||
File diff suppressed because one or more lines are too long
3
lib/analyze-action-env.test.js
generated
3
lib/analyze-action-env.test.js
generated
@@ -42,6 +42,7 @@ const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze = __importStar(require("./analyze"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const gitUtils = __importStar(require("./git-utils"));
|
||||
const statusReport = __importStar(require("./status-report"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
@@ -61,7 +62,7 @@ const util = __importStar(require("./util"));
|
||||
.stub(statusReport, "createStatusReportBase")
|
||||
.resolves({});
|
||||
sinon.stub(statusReport, "sendStatusReport").resolves();
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,8DAAgD;AAChD,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,YAAY,EAAE,wBAAwB,CAAC;aAC5C,QAAQ,CAAC,EAAmC,CAAC,CAAC;QACjD,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC,QAAQ,EAAE,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAEnE,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,iEAAiE;QACjE,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,sDAAwC;AACxC,8DAAgD;AAChD,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,YAAY,EAAE,wBAAwB,CAAC;aAC5C,QAAQ,CAAC,EAAmC,CAAC,CAAC;QACjD,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC,QAAQ,EAAE,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAEhE,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,iEAAiE;QACjE,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
3
lib/analyze-action-input.test.js
generated
3
lib/analyze-action-input.test.js
generated
@@ -42,6 +42,7 @@ const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze = __importStar(require("./analyze"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const gitUtils = __importStar(require("./git-utils"));
|
||||
const statusReport = __importStar(require("./status-report"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
@@ -77,7 +78,7 @@ const util = __importStar(require("./util"));
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
optionalInputStub.withArgs("expect-error").returns("false");
|
||||
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
process.env["CODEQL_THREADS"] = "1";
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,8DAAgD;AAChD,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,YAAY,EAAE,wBAAwB,CAAC;aAC5C,QAAQ,CAAC,EAAmC,CAAC,CAAC;QACjD,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC,QAAQ,EAAE,CAAC;QACxD,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnE,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,iEAAiE;QACjE,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,sDAAwC;AACxC,8DAAgD;AAChD,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,YAAY,EAAE,wBAAwB,CAAC;aAC5C,QAAQ,CAAC,EAAmC,CAAC,CAAC;QACjD,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC,QAAQ,EAAE,CAAC;QACxD,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAChE,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,iEAAiE;QACjE,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
6
lib/analyze-action-post.js
generated
6
lib/analyze-action-post.js
generated
@@ -44,9 +44,7 @@ const api_client_1 = require("./api-client");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||
const environment_1 = require("./environment");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const util_1 = require("./util");
|
||||
async function runWrapper() {
|
||||
try {
|
||||
@@ -54,14 +52,12 @@ async function runWrapper() {
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, actionsUtil.getTemporaryDirectory(), logger);
|
||||
// Upload SARIF artifacts if we determine that this is a first-party analysis run.
|
||||
// For third-party runs, this artifact will be uploaded in the `upload-sarif-post` step.
|
||||
if (process.env[environment_1.EnvVar.INIT_ACTION_HAS_RUN] === "true") {
|
||||
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||
if (config !== undefined) {
|
||||
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger, config.gitHubVersion.type, features));
|
||||
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger, config.gitHubVersion.type));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,iDAA2C;AAC3C,kEAAoD;AACpD,+CAAuC;AACvC,mDAA2C;AAC3C,uCAAwD;AACxD,6CAAkD;AAClD,iCAIgB;AAEhB,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,WAAW,CAAC,aAAa,EAAE,CAAC;QAC5B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QACjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;QAEF,kFAAkF;QAClF,wFAAwF;QACxF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAC5B,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;YACF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;gBACzB,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CACzC,MAAM,EACN,MAAM,CAAC,aAAa,CAAC,IAAI,EACzB,QAAQ,CACT,CACF,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,oCAAoC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC7D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,iDAA2C;AAC3C,kEAAoD;AACpD,+CAAuC;AACvC,uCAAwD;AACxD,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,WAAW,CAAC,aAAa,EAAE,CAAC;QAC5B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,kFAAkF;QAClF,wFAAwF;QACxF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAC5B,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;YACF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;gBACzB,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CACzC,MAAM,EACN,MAAM,CAAC,aAAa,CAAC,IAAI,CAC1B,CACF,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,oCAAoC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC7D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
32
lib/analyze.js
generated
32
lib/analyze.js
generated
@@ -47,7 +47,7 @@ exports.runCleanup = runCleanup;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const perf_hooks_1 = require("perf_hooks");
|
||||
const safe_which_1 = require("@chrisgavin/safe-which");
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
@@ -56,6 +56,7 @@ const codeql_1 = require("./codeql");
|
||||
const diagnostics_1 = require("./diagnostics");
|
||||
const environment_1 = require("./environment");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const gitUtils = __importStar(require("./git-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const tools_features_1 = require("./tools-features");
|
||||
@@ -162,7 +163,7 @@ async function setupDiffInformedQueryRun(baseRef, headRef, codeql, logger, featu
|
||||
if (!(await features.getValue(feature_flags_1.Feature.DiffInformedQueries, codeql))) {
|
||||
return undefined;
|
||||
}
|
||||
return await (0, logging_1.withGroup)("Generating diff range extension pack", async () => {
|
||||
return await (0, logging_1.withGroupAsync)("Generating diff range extension pack", async () => {
|
||||
const diffRanges = await getPullRequestEditedDiffRanges(baseRef, headRef, logger);
|
||||
return writeDiffRangeDataExtensionPack(logger, diffRanges);
|
||||
});
|
||||
@@ -185,39 +186,44 @@ async function getPullRequestEditedDiffRanges(baseRef, headRef, logger) {
|
||||
}
|
||||
// To compute the merge bases between the base branch and the PR topic branch,
|
||||
// we need to fetch the commit graph from the branch heads to those merge
|
||||
// babes. The following 4-step procedure does so while limiting the amount of
|
||||
// babes. The following 6-step procedure does so while limiting the amount of
|
||||
// history fetched.
|
||||
// Step 1: Deepen from the PR merge commit to the base branch head and the PR
|
||||
// topic branch head, so that the PR merge commit is no longer considered a
|
||||
// grafted commit.
|
||||
await actionsUtil.deepenGitHistory();
|
||||
await gitUtils.deepenGitHistory();
|
||||
// Step 2: Fetch the base branch shallow history. This step ensures that the
|
||||
// base branch name is present in the local repository. Normally the base
|
||||
// branch name would be added by Step 4. However, if the base branch head is
|
||||
// an ancestor of the PR topic branch head, Step 4 would fail without doing
|
||||
// anything, so we need to fetch the base branch explicitly.
|
||||
await actionsUtil.gitFetch(baseRef, ["--depth=1"]);
|
||||
await gitUtils.gitFetch(baseRef, ["--depth=1"]);
|
||||
// Step 3: Fetch the PR topic branch history, stopping when we reach commits
|
||||
// that are reachable from the base branch head.
|
||||
await actionsUtil.gitFetch(headRef, [`--shallow-exclude=${baseRef}`]);
|
||||
await gitUtils.gitFetch(headRef, [`--shallow-exclude=${baseRef}`]);
|
||||
// Step 4: Fetch the base branch history, stopping when we reach commits that
|
||||
// are reachable from the PR topic branch head.
|
||||
await actionsUtil.gitFetch(baseRef, [`--shallow-exclude=${headRef}`]);
|
||||
// Step 5: Deepen the history so that we have the merge bases between the base
|
||||
await gitUtils.gitFetch(baseRef, [`--shallow-exclude=${headRef}`]);
|
||||
// Step 5: Repack the history to remove the shallow grafts that were added by
|
||||
// the previous fetches. This step works around a bug that causes subsequent
|
||||
// deepening fetches to fail with "fatal: error in object: unshallow <SHA>".
|
||||
// See https://stackoverflow.com/q/63878612
|
||||
await gitUtils.gitRepack(["-d"]);
|
||||
// Step 6: Deepen the history so that we have the merge bases between the base
|
||||
// branch and the PR topic branch.
|
||||
await actionsUtil.deepenGitHistory();
|
||||
await gitUtils.deepenGitHistory();
|
||||
// To compute the exact same diff as GitHub would compute for the PR, we need
|
||||
// to use the same merge base as GitHub. That is easy to do if there is only
|
||||
// one merge base, which is by far the most common case. If there are multiple
|
||||
// merge bases, we stop without producing a diff range.
|
||||
const mergeBases = await actionsUtil.getAllGitMergeBases([baseRef, headRef]);
|
||||
const mergeBases = await gitUtils.getAllGitMergeBases([baseRef, headRef]);
|
||||
logger.info(`Merge bases: ${mergeBases.join(", ")}`);
|
||||
if (mergeBases.length !== 1) {
|
||||
logger.info("Cannot compute diff range because baseRef and headRef " +
|
||||
`have ${mergeBases.length} merge bases (instead of exactly 1).`);
|
||||
return undefined;
|
||||
}
|
||||
const diffHunkHeaders = await actionsUtil.getGitDiffHunkHeaders(mergeBases[0], headRef);
|
||||
const diffHunkHeaders = await gitUtils.getGitDiffHunkHeaders(mergeBases[0], headRef);
|
||||
if (diffHunkHeaders === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
@@ -225,7 +231,7 @@ async function getPullRequestEditedDiffRanges(baseRef, headRef, logger) {
|
||||
let changedFile = "";
|
||||
for (const line of diffHunkHeaders) {
|
||||
if (line.startsWith("+++ ")) {
|
||||
const filePath = actionsUtil.decodeGitFilePath(line.substring(4));
|
||||
const filePath = gitUtils.decodeGitFilePath(line.substring(4));
|
||||
if (filePath.startsWith("b/")) {
|
||||
// The file was edited: track all hunks in the file
|
||||
changedFile = filePath.substring(2);
|
||||
@@ -431,7 +437,7 @@ async function warnIfGoInstalledAfterInit(config, logger) {
|
||||
const goInitPath = process.env[environment_1.EnvVar.GO_BINARY_LOCATION];
|
||||
if (process.env[environment_1.EnvVar.DID_AUTOBUILD_GOLANG] !== "true" &&
|
||||
goInitPath !== undefined) {
|
||||
const goBinaryPath = await (0, safe_which_1.safeWhich)("go");
|
||||
const goBinaryPath = await io.which("go", true);
|
||||
if (goInitPath !== goBinaryPath) {
|
||||
logger.warning(`Expected \`which go\` to return ${goInitPath}, but got ${goBinaryPath}: please ensure that the correct version of Go is installed before the \`codeql-action/init\` Action is used.`);
|
||||
(0, diagnostics_1.addDiagnostic)(config, languages_1.Language.go, (0, diagnostics_1.makeDiagnostic)("go/workflow/go-installed-after-codeql-init", "Go was installed after the `codeql-action/init` Action was run", {
|
||||
|
||||
File diff suppressed because one or more lines are too long
3
lib/autobuild.js
generated
3
lib/autobuild.js
generated
@@ -51,7 +51,8 @@ async function determineAutobuildLanguages(codeql, config, logger) {
|
||||
if ((config.buildMode === util_1.BuildMode.None &&
|
||||
(await codeql.supportsFeature(tools_features_1.ToolsFeature.TraceCommandUseBuildMode))) ||
|
||||
config.buildMode === util_1.BuildMode.Manual) {
|
||||
logger.info(`Using ${config.buildMode} build mode, nothing to autobuild.`);
|
||||
logger.info(`Using build mode "${config.buildMode}", nothing to autobuild. ` +
|
||||
`See ${doc_url_1.DocUrl.CODEQL_BUILD_MODES} for more information.`);
|
||||
return undefined;
|
||||
}
|
||||
// Attempt to find a language to autobuild
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAeA,kEA+FC;AAED,8CAqCC;AAED,oCAsBC;AA7KD,oDAAsC;AAEtC,iDAA6E;AAC7E,6CAAgD;AAChD,qCAA6C;AAE7C,uCAAmC;AACnC,+CAAuC;AACvC,mDAAmE;AACnE,2CAAyD;AAEzD,6CAAkD;AAClD,qDAAgD;AAChD,iCAAwD;AAEjD,KAAK,UAAU,2BAA2B,CAC/C,MAAc,EACd,MAA0B,EAC1B,MAAc;IAEd,IACE,CAAC,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,IAAI;QAClC,CAAC,MAAM,MAAM,CAAC,eAAe,CAAC,6BAAY,CAAC,wBAAwB,CAAC,CAAC,CAAC;QACxE,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,MAAM,EACrC,CAAC;QACD,MAAM,CAAC,IAAI,CAAC,SAAS,MAAM,CAAC,SAAS,oCAAoC,CAAC,CAAC;QAC3E,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CACvD,IAAA,4BAAgB,EAAC,CAAC,CAAC,CACpB,CAAC;IAEF,IAAI,CAAC,kBAAkB,EAAE,CAAC;QACxB,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACH,MAAM,2BAA2B,GAAG,kBAAkB,CAAC,MAAM,CAC3D,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,oBAAQ,CAAC,EAAE,CACzB,CAAC;IAEF,MAAM,SAAS,GAAe,EAAE,CAAC;IACjC,yEAAyE;IACzE,UAAU;IACV,IAAI,2BAA2B,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE,CAAC;QACjD,SAAS,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC,CAAC,CAAC,CAAC;IACjD,CAAC;IACD,uEAAuE;IACvE,wCAAwC;IACxC,IAAI,kBAAkB,CAAC,MAAM,KAAK,2BAA2B,CAAC,MAAM,EAAE,CAAC;QACrE,SAAS,CAAC,IAAI,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,kBAAkB,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAE3D,2EAA2E;IAC3E,4EAA4E;IAC5E,2CAA2C;IAC3C,uEAAuE;IACvE,2EAA2E;IAC3E,uEAAuE;IACvE,yCAAyC;IACzC,IAAI,2BAA2B,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC3C,MAAM,CAAC,OAAO,CACZ,oCAAoC,SAAS,CAAC,IAAI,CAChD,OAAO,CACR,8BAA8B,2BAA2B;aACvD,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CACH,OAAO,CACR,kFAAkF;YACnF,OAAO,gBAAM,CAAC,4BAA4B,wBAAwB,CACrE,CAAC;IACJ,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,MAAM,GAAG,6BAAa,CAAC,uBAAO,CAAC,yBAAyB,CAAC,CAAC,MAAM,CAAC;IACvE,MAAM,WAAW,GAAG,4CAA4C,CAAC;IACjE,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;IAC/C,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;IACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;IACF,IAAI,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,yBAAyB,EAAE,MAAM,CAAC,EAAE,CAAC;QACvE,yEAAyE;QACzE,IACE,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,KAAK,aAAa;YACnD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,MAAM,EAC9B,CAAC;YACD,MAAM,CAAC,IAAI,CACT,aAAa,WAAW,sCACtB,IAAA,mCAAoB,GAAE,KAAK,SAAS;gBAClC,CAAC,CAAC,8BAA8B,MAAM,yDAAyD,gBAAM,CAAC,oBAAoB,wBAAwB;gBAClJ,CAAC,CAAC,EACN,EAAE,CACH,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACvC,CAAC;aAAM,CAAC;YACN,MAAM,CAAC,IAAI,CACT,YAAY,WAAW,yCAAyC,MAAM,yCAAyC,gBAAM,CAAC,oBAAoB,wBAAwB,CACnK,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtC,CAAC;IACH,CAAC;SAAM,CAAC;QACN,MAAM,CAAC,IAAI,CAAC,aAAa,WAAW,GAAG,CAAC,CAAC;QACzC,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,YAAY,CAChC,MAA0B,EAC1B,QAAkB,EAClB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IAAI,QAAQ,KAAK,oBAAQ,CAAC,GAAG,EAAE,CAAC;QAC9B,MAAM,iBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,IACE,MAAM,CAAC,SAAS;QAChB,CAAC,MAAM,MAAM,CAAC,eAAe,CAAC,6BAAY,CAAC,wBAAwB,CAAC,CAAC,EACrE,CAAC;QACD,MAAM,MAAM,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACvD,CAAC;SAAM,CAAC;QACN,MAAM,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAC9C,CAAC;IACD,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE,CAAC;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;IAC3D,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC"}
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAeA,kEAkGC;AAED,8CAqCC;AAED,oCAsBC;AAhLD,oDAAsC;AAEtC,iDAA6E;AAC7E,6CAAgD;AAChD,qCAA6C;AAE7C,uCAAmC;AACnC,+CAAuC;AACvC,mDAAmE;AACnE,2CAAyD;AAEzD,6CAAkD;AAClD,qDAAgD;AAChD,iCAAwD;AAEjD,KAAK,UAAU,2BAA2B,CAC/C,MAAc,EACd,MAA0B,EAC1B,MAAc;IAEd,IACE,CAAC,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,IAAI;QAClC,CAAC,MAAM,MAAM,CAAC,eAAe,CAAC,6BAAY,CAAC,wBAAwB,CAAC,CAAC,CAAC;QACxE,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,MAAM,EACrC,CAAC;QACD,MAAM,CAAC,IAAI,CACT,qBAAqB,MAAM,CAAC,SAAS,2BAA2B;YAC9D,OAAO,gBAAM,CAAC,kBAAkB,wBAAwB,CAC3D,CAAC;QACF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CACvD,IAAA,4BAAgB,EAAC,CAAC,CAAC,CACpB,CAAC;IAEF,IAAI,CAAC,kBAAkB,EAAE,CAAC;QACxB,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACH,MAAM,2BAA2B,GAAG,kBAAkB,CAAC,MAAM,CAC3D,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,oBAAQ,CAAC,EAAE,CACzB,CAAC;IAEF,MAAM,SAAS,GAAe,EAAE,CAAC;IACjC,yEAAyE;IACzE,UAAU;IACV,IAAI,2BAA2B,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE,CAAC;QACjD,SAAS,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC,CAAC,CAAC,CAAC;IACjD,CAAC;IACD,uEAAuE;IACvE,wCAAwC;IACxC,IAAI,kBAAkB,CAAC,MAAM,KAAK,2BAA2B,CAAC,MAAM,EAAE,CAAC;QACrE,SAAS,CAAC,IAAI,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,kBAAkB,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAE3D,2EAA2E;IAC3E,4EAA4E;IAC5E,2CAA2C;IAC3C,uEAAuE;IACvE,2EAA2E;IAC3E,uEAAuE;IACvE,yCAAyC;IACzC,IAAI,2BAA2B,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC3C,MAAM,CAAC,OAAO,CACZ,oCAAoC,SAAS,CAAC,IAAI,CAChD,OAAO,CACR,8BAA8B,2BAA2B;aACvD,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CACH,OAAO,CACR,kFAAkF;YACnF,OAAO,gBAAM,CAAC,4BAA4B,wBAAwB,CACrE,CAAC;IACJ,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,MAAM,GAAG,6BAAa,CAAC,uBAAO,CAAC,yBAAyB,CAAC,CAAC,MAAM,CAAC;IACvE,MAAM,WAAW,GAAG,4CAA4C,CAAC;IACjE,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;IAC/C,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;IACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;IACF,IAAI,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,yBAAyB,EAAE,MAAM,CAAC,EAAE,CAAC;QACvE,yEAAyE;QACzE,IACE,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,KAAK,aAAa;YACnD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,MAAM,EAC9B,CAAC;YACD,MAAM,CAAC,IAAI,CACT,aAAa,WAAW,sCACtB,IAAA,mCAAoB,GAAE,KAAK,SAAS;gBAClC,CAAC,CAAC,8BAA8B,MAAM,yDAAyD,gBAAM,CAAC,oBAAoB,wBAAwB;gBAClJ,CAAC,CAAC,EACN,EAAE,CACH,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACvC,CAAC;aAAM,CAAC;YACN,MAAM,CAAC,IAAI,CACT,YAAY,WAAW,yCAAyC,MAAM,yCAAyC,gBAAM,CAAC,oBAAoB,wBAAwB,CACnK,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtC,CAAC;IACH,CAAC;SAAM,CAAC;QACN,MAAM,CAAC,IAAI,CAAC,aAAa,WAAW,GAAG,CAAC,CAAC;QACzC,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,YAAY,CAChC,MAA0B,EAC1B,QAAkB,EAClB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IAAI,QAAQ,KAAK,oBAAQ,CAAC,GAAG,EAAE,CAAC;QAC9B,MAAM,iBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,IACE,MAAM,CAAC,SAAS;QAChB,CAAC,MAAM,MAAM,CAAC,eAAe,CAAC,6BAAY,CAAC,wBAAwB,CAAC,CAAC,EACrE,CAAC;QACD,MAAM,MAAM,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACvD,CAAC;SAAM,CAAC;QACN,MAAM,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAC9C,CAAC;IACD,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE,CAAC;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;IAC3D,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC"}
|
||||
11
lib/codeql.js
generated
11
lib/codeql.js
generated
@@ -55,6 +55,7 @@ const cli_errors_1 = require("./cli-errors");
|
||||
const doc_url_1 = require("./doc-url");
|
||||
const environment_1 = require("./environment");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const git_utils_1 = require("./git-utils");
|
||||
const languages_1 = require("./languages");
|
||||
const setupCodeql = __importStar(require("./setup-codeql"));
|
||||
const tools_features_1 = require("./tools-features");
|
||||
@@ -74,19 +75,19 @@ let cachedCodeQL = undefined;
|
||||
* The version flags below can be used to conditionally enable certain features
|
||||
* on versions newer than this.
|
||||
*/
|
||||
const CODEQL_MINIMUM_VERSION = "2.14.6";
|
||||
const CODEQL_MINIMUM_VERSION = "2.15.5";
|
||||
/**
|
||||
* This version will shortly become the oldest version of CodeQL that the Action will run with.
|
||||
*/
|
||||
const CODEQL_NEXT_MINIMUM_VERSION = "2.14.6";
|
||||
const CODEQL_NEXT_MINIMUM_VERSION = "2.15.5";
|
||||
/**
|
||||
* This is the version of GHES that was most recently deprecated.
|
||||
*/
|
||||
const GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.10";
|
||||
const GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.11";
|
||||
/**
|
||||
* This is the deprecation date for the version of GHES that was most recently deprecated.
|
||||
*/
|
||||
const GHES_MOST_RECENT_DEPRECATION_DATE = "2024-09-24";
|
||||
const GHES_MOST_RECENT_DEPRECATION_DATE = "2024-12-19";
|
||||
/** The CLI verbosity level to use for extraction in debug mode. */
|
||||
const EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++";
|
||||
/*
|
||||
@@ -830,7 +831,7 @@ async function getTrapCachingExtractorConfigArgsForLang(config, language) {
|
||||
const cacheDir = config.trapCaches[language];
|
||||
if (cacheDir === undefined)
|
||||
return [];
|
||||
const write = await (0, actions_util_1.isAnalyzingDefaultBranch)();
|
||||
const write = await (0, git_utils_1.isAnalyzingDefaultBranch)();
|
||||
return [
|
||||
`-O=${language}.trap.cache.dir=${cacheDir}`,
|
||||
`-O=${language}.trap.cache.bound=${TRAP_CACHE_SIZE_MB}`,
|
||||
|
||||
File diff suppressed because one or more lines are too long
30
lib/codeql.test.js
generated
30
lib/codeql.test.js
generated
@@ -39,8 +39,8 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.stubToolRunnerConstructor = stubToolRunnerConstructor;
|
||||
const fs = __importStar(require("fs"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
@@ -539,8 +539,8 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves(codeqlVersion);
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
// io throws because of the test CodeQL object.
|
||||
sinon.stub(io, "which").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", undefined, "", Object.assign({}, stubConfig, { gitHubVersion: githubVersion }), (0, testing_utils_1.createFeatures)([]));
|
||||
const actualArgs = runnerConstructorStub.firstCall.args[1];
|
||||
t.is(actualArgs.includes("--new-analysis-summary"), flagPassed, `--new-analysis-summary should${flagPassed ? "" : "n't"} be passed`);
|
||||
@@ -555,8 +555,8 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
||||
stubToolRunnerConstructor(32, cliStderr);
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
// io throws because of the test CodeQL object.
|
||||
sinon.stub(io, "which").resolves("");
|
||||
await t.throwsAsync(async () => await codeqlObject.finalizeDatabase("db", "--threads=2", "--ram=2048", false), {
|
||||
instanceOf: util.ConfigurationError,
|
||||
message: new RegExp('Encountered a fatal error while running \\"codeql-for-testing database finalize --finalize-dataset --threads=2 --ram=2048 db\\"\\. ' +
|
||||
@@ -578,8 +578,8 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
||||
sinon.stub(codeqlObject, "resolveExtractor").resolves("/path/to/extractor");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
// io throws because of the test CodeQL object.
|
||||
sinon.stub(io, "which").resolves("");
|
||||
await t.throwsAsync(async () => await codeqlObject.runAutobuild(stubConfig, languages_1.Language.java), {
|
||||
instanceOf: util.ConfigurationError,
|
||||
message: "We were unable to automatically build your code. Please provide manual build steps. " +
|
||||
@@ -596,8 +596,8 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
||||
sinon.stub(codeqlObject, "resolveExtractor").resolves("/path/to/extractor");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
// io throws because of the test CodeQL object.
|
||||
sinon.stub(io, "which").resolves("");
|
||||
await t.throwsAsync(async () => await codeqlObject.runAutobuild(stubConfig, languages_1.Language.java), {
|
||||
instanceOf: util.ConfigurationError,
|
||||
message: "We were unable to automatically build your code. Please provide manual build steps. " +
|
||||
@@ -616,8 +616,8 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
||||
sinon.stub(codeqlObject, "resolveExtractor").resolves("/path/to/extractor");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
// io throws because of the test CodeQL object.
|
||||
sinon.stub(io, "which").resolves("");
|
||||
await t.throwsAsync(async () => await codeqlObject.databaseRunQueries(stubConfig.dbLocation, []), {
|
||||
instanceOf: cli_errors_1.CliError,
|
||||
message: `Encountered a fatal error while running "codeql-for-testing database run-queries --expect-discarded-cache --min-disk-free=1024 -v --intra-layer-parallelism". Exit code was 1 and error was: Oops! A fatal internal error occurred. Details:
|
||||
@@ -630,8 +630,8 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
||||
stubToolRunnerConstructor(32, cliStderr);
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
// io throws because of the test CodeQL object.
|
||||
sinon.stub(io, "which").resolves("");
|
||||
await t.throwsAsync(async () => await codeqlObject.finalizeDatabase("db", "--threads=2", "--ram=2048", false), {
|
||||
instanceOf: util.ConfigurationError,
|
||||
message: new RegExp('Encountered a fatal error while running \\"codeql-for-testing database finalize --finalize-dataset --threads=2 --ram=2048 db\\"\\. ' +
|
||||
@@ -642,8 +642,8 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
// io throws because of the test CodeQL object.
|
||||
sinon.stub(io, "which").resolves("");
|
||||
process.env["CODEQL_ACTION_EXTRA_OPTIONS"] =
|
||||
'{ "database": { "init": ["--overwrite"] } }';
|
||||
await codeqlObject.databaseInitCluster(stubConfig, "sourceRoot", undefined, undefined, (0, logging_1.getRunnerLogger)(false));
|
||||
|
||||
File diff suppressed because one or more lines are too long
5
lib/database-upload.js
generated
5
lib/database-upload.js
generated
@@ -38,6 +38,7 @@ const fs = __importStar(require("fs"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const codeql_1 = require("./codeql");
|
||||
const gitUtils = __importStar(require("./git-utils"));
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||
@@ -55,7 +56,7 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||
logger.debug("Not running against github.com or GHEC-DR. Skipping upload.");
|
||||
return;
|
||||
}
|
||||
if (!(await actionsUtil.isAnalyzingDefaultBranch())) {
|
||||
if (!(await gitUtils.isAnalyzingDefaultBranch())) {
|
||||
// We only want to upload a database if we are analyzing the default branch.
|
||||
logger.debug("Not analyzing default branch. Skipping upload.");
|
||||
return;
|
||||
@@ -79,7 +80,7 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||
const bundledDb = await (0, util_1.bundleDb)(config, language, codeql, language);
|
||||
const bundledDbSize = fs.statSync(bundledDb).size;
|
||||
const bundledDbReadStream = fs.createReadStream(bundledDb);
|
||||
const commitOid = await actionsUtil.getCommitOid(actionsUtil.getRequiredInput("checkout_path"));
|
||||
const commitOid = await gitUtils.getCommitOid(actionsUtil.getRequiredInput("checkout_path"));
|
||||
try {
|
||||
await client.request(`POST /repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name&commit_oid=:commit_oid`, {
|
||||
baseUrl: uploadsBaseUrl,
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAWA,0CAmFC;AA9FD,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAC/B,iCAAkD;AAE3C,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE,CAAC;QAC/D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;IACT,CAAC;IAED,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE,CAAC;QACxB,MAAM,CAAC,KAAK,CAAC,yCAAyC,CAAC,CAAC;QACxD,OAAO;IACT,CAAC;IAED,iDAAiD;IACjD,IACE,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM;QACvD,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,UAAU,EAC3D,CAAC;QACD,MAAM,CAAC,KAAK,CAAC,6DAA6D,CAAC,CAAC;QAC5E,OAAO;IACT,CAAC;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE,CAAC;QACpD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;IACT,CAAC;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,GAAE,CAAC;IAC9B,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,MAAM,UAAU,GAAG,IAAI,GAAG,CAAC,IAAA,qBAAc,EAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC;IAC3D,UAAU,CAAC,QAAQ,GAAG,WAAW,UAAU,CAAC,QAAQ,EAAE,CAAC;IAEvD,4DAA4D;IAC5D,0CAA0C;IAC1C,IAAI,cAAc,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;IAC3C,IAAI,cAAc,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;QACjC,cAAc,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IAC/C,CAAC;IAED,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;QACxC,IAAI,CAAC;YACH,8BAA8B;YAC9B,2EAA2E;YAC3E,8EAA8E;YAC9E,wEAAwE;YACxE,MAAM,SAAS,GAAG,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;YACrE,MAAM,aAAa,GAAG,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC;YAClD,MAAM,mBAAmB,GAAG,EAAE,CAAC,gBAAgB,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,SAAS,GAAG,MAAM,WAAW,CAAC,YAAY,CAC9C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,CAC9C,CAAC;YACF,IAAI,CAAC;gBACH,MAAM,MAAM,CAAC,OAAO,CAClB,qGAAqG,EACrG;oBACE,OAAO,EAAE,cAAc;oBACvB,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;oBAC5B,UAAU,EAAE,SAAS;oBACrB,IAAI,EAAE,mBAAmB;oBACzB,OAAO,EAAE;wBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;wBACzC,cAAc,EAAE,iBAAiB;wBACjC,gBAAgB,EAAE,aAAa;qBAChC;iBACF,CACF,CAAC;gBACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;YACjE,CAAC;oBAAS,CAAC;gBACT,mBAAmB,CAAC,KAAK,EAAE,CAAC;YAC9B,CAAC;QACH,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;QACpE,CAAC;IACH,CAAC;AACH,CAAC"}
|
||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAYA,0CAmFC;AA/FD,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAErC,sDAAwC;AAGxC,6CAA+B;AAC/B,iCAAkD;AAE3C,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE,CAAC;QAC/D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;IACT,CAAC;IAED,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE,CAAC;QACxB,MAAM,CAAC,KAAK,CAAC,yCAAyC,CAAC,CAAC;QACxD,OAAO;IACT,CAAC;IAED,iDAAiD;IACjD,IACE,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM;QACvD,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,UAAU,EAC3D,CAAC;QACD,MAAM,CAAC,KAAK,CAAC,6DAA6D,CAAC,CAAC;QAC5E,OAAO;IACT,CAAC;IAED,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,wBAAwB,EAAE,CAAC,EAAE,CAAC;QACjD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;IACT,CAAC;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,GAAE,CAAC;IAC9B,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,MAAM,UAAU,GAAG,IAAI,GAAG,CAAC,IAAA,qBAAc,EAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC;IAC3D,UAAU,CAAC,QAAQ,GAAG,WAAW,UAAU,CAAC,QAAQ,EAAE,CAAC;IAEvD,4DAA4D;IAC5D,0CAA0C;IAC1C,IAAI,cAAc,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;IAC3C,IAAI,cAAc,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;QACjC,cAAc,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IAC/C,CAAC;IAED,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;QACxC,IAAI,CAAC;YACH,8BAA8B;YAC9B,2EAA2E;YAC3E,8EAA8E;YAC9E,wEAAwE;YACxE,MAAM,SAAS,GAAG,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;YACrE,MAAM,aAAa,GAAG,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC;YAClD,MAAM,mBAAmB,GAAG,EAAE,CAAC,gBAAgB,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,YAAY,CAC3C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,CAC9C,CAAC;YACF,IAAI,CAAC;gBACH,MAAM,MAAM,CAAC,OAAO,CAClB,qGAAqG,EACrG;oBACE,OAAO,EAAE,cAAc;oBACvB,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;oBAC5B,UAAU,EAAE,SAAS;oBACrB,IAAI,EAAE,mBAAmB;oBACzB,OAAO,EAAE;wBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;wBACzC,cAAc,EAAE,iBAAiB;wBACjC,gBAAgB,EAAE,aAAa;qBAChC;iBACF,CACF,CAAC;gBACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;YACjE,CAAC;oBAAS,CAAC;gBACT,mBAAmB,CAAC,KAAK,EAAE,CAAC;YAC9B,CAAC;QACH,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;QACpE,CAAC;IACH,CAAC;AACH,CAAC"}
|
||||
13
lib/database-upload.test.js
generated
13
lib/database-upload.test.js
generated
@@ -44,6 +44,7 @@ const actionsUtil = __importStar(require("./actions-util"));
|
||||
const apiClient = __importStar(require("./api-client"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const database_upload_1 = require("./database-upload");
|
||||
const gitUtils = __importStar(require("./git-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
@@ -85,7 +86,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("false");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
@@ -100,7 +101,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const config = getTestConfig(tmpDir);
|
||||
config.gitHubVersion = { type: util_1.GitHubVariant.GHES, version: "3.0" };
|
||||
const loggedMessages = [];
|
||||
@@ -117,7 +118,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false);
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
@@ -131,7 +132,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
await mockHttpRequests(500);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
@@ -152,7 +153,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
await mockHttpRequests(201);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
@@ -172,7 +173,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const databaseUploadSpy = await mockHttpRequests(201);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
19
lib/debug-artifacts.js
generated
19
lib/debug-artifacts.js
generated
@@ -52,7 +52,6 @@ const actions_util_1 = require("./actions-util");
|
||||
const analyze_1 = require("./analyze");
|
||||
const codeql_1 = require("./codeql");
|
||||
const environment_1 = require("./environment");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const util_1 = require("./util");
|
||||
function sanitizeArtifactName(name) {
|
||||
@@ -62,7 +61,7 @@ function sanitizeArtifactName(name) {
|
||||
* Upload Actions SARIF artifacts for debugging when CODEQL_ACTION_DEBUG_COMBINED_SARIF
|
||||
* environment variable is set
|
||||
*/
|
||||
async function uploadCombinedSarifArtifacts(logger, gitHubVariant, features) {
|
||||
async function uploadCombinedSarifArtifacts(logger, gitHubVariant) {
|
||||
const tempDir = (0, actions_util_1.getTemporaryDirectory)();
|
||||
// Upload Actions SARIF artifacts for debugging when environment variable is set
|
||||
if (process.env["CODEQL_ACTION_DEBUG_COMBINED_SARIF"] === "true") {
|
||||
@@ -81,7 +80,7 @@ async function uploadCombinedSarifArtifacts(logger, gitHubVariant, features) {
|
||||
}
|
||||
}
|
||||
try {
|
||||
await uploadDebugArtifacts(logger, toUpload, baseTempDir, "combined-sarif-artifacts", gitHubVariant, features);
|
||||
await uploadDebugArtifacts(logger, toUpload, baseTempDir, "combined-sarif-artifacts", gitHubVariant);
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Failed to upload combined SARIF files as Actions debugging artifact. Reason: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
@@ -141,7 +140,7 @@ async function tryBundleDatabase(config, language, logger) {
|
||||
*
|
||||
* Logs and suppresses any errors that occur.
|
||||
*/
|
||||
async function tryUploadAllAvailableDebugArtifacts(config, logger, features) {
|
||||
async function tryUploadAllAvailableDebugArtifacts(config, logger) {
|
||||
const filesToUpload = [];
|
||||
try {
|
||||
for (const language of config.languages) {
|
||||
@@ -181,13 +180,13 @@ async function tryUploadAllAvailableDebugArtifacts(config, logger, features) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await (0, logging_1.withGroup)("Uploading debug artifacts", async () => uploadDebugArtifacts(logger, filesToUpload, config.dbLocation, config.debugArtifactName, config.gitHubVersion.type, features));
|
||||
await (0, logging_1.withGroup)("Uploading debug artifacts", async () => uploadDebugArtifacts(logger, filesToUpload, config.dbLocation, config.debugArtifactName, config.gitHubVersion.type));
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Failed to upload debug artifacts. Reason: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
}
|
||||
}
|
||||
async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghVariant, features) {
|
||||
async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghVariant) {
|
||||
if (toUpload.length === 0) {
|
||||
return;
|
||||
}
|
||||
@@ -202,7 +201,7 @@ async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghV
|
||||
core.info("Could not parse user-specified `matrix` input into JSON. The debug artifact will not be named with the user's `matrix` input.");
|
||||
}
|
||||
}
|
||||
const artifactUploader = await getArtifactUploaderClient(logger, ghVariant, features);
|
||||
const artifactUploader = await getArtifactUploaderClient(logger, ghVariant);
|
||||
try {
|
||||
await artifactUploader.uploadArtifact(sanitizeArtifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir), {
|
||||
// ensure we don't keep the debug artifacts around for too long since they can be large.
|
||||
@@ -218,15 +217,11 @@ async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghV
|
||||
// until it is supported. We also use the legacy version of the client if the feature flag is disabled.
|
||||
// The feature flag is named `ArtifactV4Upgrade` to reduce customer confusion; customers are primarily affected by
|
||||
// `actions/download-artifact`, whose upgrade to v4 must be accompanied by the `@actions/artifact@v2` upgrade.
|
||||
async function getArtifactUploaderClient(logger, ghVariant, features) {
|
||||
async function getArtifactUploaderClient(logger, ghVariant) {
|
||||
if (ghVariant === util_1.GitHubVariant.GHES) {
|
||||
logger.info("Debug artifacts can be consumed with `actions/download-artifact@v3` because the `v4` version is not yet compatible on GHES.");
|
||||
return artifactLegacy.create();
|
||||
}
|
||||
else if (!(await features.getValue(feature_flags_1.Feature.ArtifactV4Upgrade))) {
|
||||
logger.info("Debug artifacts can be consumed with `actions/download-artifact@v3`. To use the `actions/download-artifact@v4`, set the `CODEQL_ACTION_ARTIFACT_V4_UPGRADE` environment variable to true.");
|
||||
return artifactLegacy.create();
|
||||
}
|
||||
else {
|
||||
logger.info("Debug artifacts can be consumed with `actions/download-artifact@v4`.");
|
||||
return new artifact.DefaultArtifactClient();
|
||||
|
||||
File diff suppressed because one or more lines are too long
5
lib/debug-artifacts.test.js
generated
5
lib/debug-artifacts.test.js
generated
@@ -38,9 +38,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
(0, ava_1.default)("sanitizeArtifactName", (t) => {
|
||||
t.deepEqual(debugArtifacts.sanitizeArtifactName("hello-world_"), "hello-world_");
|
||||
@@ -51,7 +49,6 @@ const util_1 = require("./util");
|
||||
(0, ava_1.default)("uploadDebugArtifacts", async (t) => {
|
||||
// Test that no error is thrown if artifacts list is empty.
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
const mockFeature = (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.ArtifactV4Upgrade]);
|
||||
await t.notThrowsAsync(debugArtifacts.uploadDebugArtifacts(logger, [], "rootDir", "artifactName", util_1.GitHubVariant.DOTCOM, mockFeature));
|
||||
await t.notThrowsAsync(debugArtifacts.uploadDebugArtifacts(logger, [], "rootDir", "artifactName", util_1.GitHubVariant.DOTCOM));
|
||||
});
|
||||
//# sourceMappingURL=debug-artifacts.test.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"debug-artifacts.test.js","sourceRoot":"","sources":["../src/debug-artifacts.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AAEvB,kEAAoD;AACpD,mDAA0C;AAC1C,uCAA6C;AAC7C,mDAAiD;AACjD,iCAAuC;AAEvC,IAAA,aAAI,EAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE,EAAE;IACjC,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,cAAc,CACf,CAAC;IACF,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,YAAY,CACb,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,oBAAoB,CAAC,aAAa,CAAC,EAAE,UAAU,CAAC,CAAC;IAC5E,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,yBAAyB,CAAC,EAC9D,aAAa,CACd,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,2DAA2D;IAC3D,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,WAAW,GAAG,IAAA,8BAAc,EAAC,CAAC,uBAAO,CAAC,iBAAiB,CAAC,CAAC,CAAC;IAChE,MAAM,CAAC,CAAC,cAAc,CACpB,cAAc,CAAC,oBAAoB,CACjC,MAAM,EACN,EAAE,EACF,SAAS,EACT,cAAc,EACd,oBAAa,CAAC,MAAM,EACpB,WAAW,CACZ,CACF,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"debug-artifacts.test.js","sourceRoot":"","sources":["../src/debug-artifacts.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AAEvB,kEAAoD;AACpD,uCAA6C;AAC7C,iCAAuC;AAEvC,IAAA,aAAI,EAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE,EAAE;IACjC,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,cAAc,CACf,CAAC;IACF,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,YAAY,CACb,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,oBAAoB,CAAC,aAAa,CAAC,EAAE,UAAU,CAAC,CAAC;IAC5E,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,yBAAyB,CAAC,EAC9D,aAAa,CACd,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,2DAA2D;IAC3D,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,CAAC,CAAC,cAAc,CACpB,cAAc,CAAC,oBAAoB,CACjC,MAAM,EACN,EAAE,EACF,SAAS,EACT,cAAc,EACd,oBAAa,CAAC,MAAM,CACrB,CACF,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.19.4",
|
||||
"cliVersion": "2.19.4",
|
||||
"priorBundleVersion": "codeql-bundle-v2.19.3",
|
||||
"priorCliVersion": "2.19.3"
|
||||
"bundleVersion": "codeql-bundle-v2.20.0",
|
||||
"cliVersion": "2.20.0",
|
||||
"priorBundleVersion": "codeql-bundle-v2.19.4",
|
||||
"priorCliVersion": "2.19.4"
|
||||
}
|
||||
|
||||
1
lib/doc-url.js
generated
1
lib/doc-url.js
generated
@@ -12,5 +12,6 @@ var DocUrl;
|
||||
DocUrl["SCANNING_ON_PUSH"] = "https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push";
|
||||
DocUrl["SPECIFY_BUILD_STEPS_MANUALLY"] = "https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages#about-specifying-build-steps-manually";
|
||||
DocUrl["TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS"] = "https://docs.github.com/en/enterprise-cloud@latest/code-security/code-scanning/integrating-with-code-scanning/sarif-support-for-code-scanning#providing-data-to-track-code-scanning-alerts-across-runs";
|
||||
DocUrl["CODEQL_BUILD_MODES"] = "https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages#codeql-build-modes";
|
||||
})(DocUrl || (exports.DocUrl = DocUrl = {}));
|
||||
//# sourceMappingURL=doc-url.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"doc-url.js","sourceRoot":"","sources":["../src/doc-url.ts"],"names":[],"mappings":";AAAA;;GAEG;;;AAEH,IAAY,MAOX;AAPD,WAAY,MAAM;IAChB,uHAA6G,CAAA;IAC7G,gJAAsI,CAAA;IACtI,yJAA+I,CAAA;IAC/I,qMAA2L,CAAA;IAC3L,gOAAsN,CAAA;IACtN,2PAAiP,CAAA;AACnP,CAAC,EAPW,MAAM,sBAAN,MAAM,QAOjB"}
|
||||
{"version":3,"file":"doc-url.js","sourceRoot":"","sources":["../src/doc-url.ts"],"names":[],"mappings":";AAAA;;GAEG;;;AAEH,IAAY,MAQX;AARD,WAAY,MAAM;IAChB,uHAA6G,CAAA;IAC7G,gJAAsI,CAAA;IACtI,yJAA+I,CAAA;IAC/I,qMAA2L,CAAA;IAC3L,gOAAsN,CAAA;IACtN,2PAAiP,CAAA;IACjP,mMAAyL,CAAA;AAC3L,CAAC,EARW,MAAM,sBAAN,MAAM,QAQjB"}
|
||||
6
lib/feature-flags.js
generated
6
lib/feature-flags.js
generated
@@ -62,7 +62,6 @@ exports.CODEQL_VERSION_ZSTD_BUNDLE = "2.19.0";
|
||||
*/
|
||||
var Feature;
|
||||
(function (Feature) {
|
||||
Feature["ArtifactV4Upgrade"] = "artifact_v4_upgrade";
|
||||
Feature["CleanupTrapCaches"] = "cleanup_trap_caches";
|
||||
Feature["CppBuildModeNone"] = "cpp_build_mode_none";
|
||||
Feature["CppDependencyInstallation"] = "cpp_dependency_installation_enabled";
|
||||
@@ -77,11 +76,6 @@ var Feature;
|
||||
Feature["ZstdBundleStreamingExtraction"] = "zstd_bundle_streaming_extraction";
|
||||
})(Feature || (exports.Feature = Feature = {}));
|
||||
exports.featureConfig = {
|
||||
[Feature.ArtifactV4Upgrade]: {
|
||||
defaultValue: true,
|
||||
envVar: "CODEQL_ACTION_ARTIFACT_V4_UPGRADE",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.CleanupTrapCaches]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
||||
|
||||
File diff suppressed because one or more lines are too long
381
lib/git-utils.js
generated
Normal file
381
lib/git-utils.js
generated
Normal file
@@ -0,0 +1,381 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.decodeGitFilePath = exports.getGitDiffHunkHeaders = exports.getAllGitMergeBases = exports.gitRepack = exports.gitFetch = exports.deepenGitHistory = exports.determineBaseBranchHeadCommitOid = exports.getCommitOid = void 0;
|
||||
exports.getRef = getRef;
|
||||
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const util_1 = require("./util");
|
||||
async function runGitCommand(checkoutPath, args, customErrorMessage) {
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
core.debug(`Running git command: git ${args.join(" ")}`);
|
||||
try {
|
||||
await new toolrunner.ToolRunner(await io.which("git", true), args, {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
stdout += data.toString();
|
||||
},
|
||||
stderr: (data) => {
|
||||
stderr += data.toString();
|
||||
},
|
||||
},
|
||||
cwd: checkoutPath,
|
||||
}).exec();
|
||||
return stdout;
|
||||
}
|
||||
catch (error) {
|
||||
let reason = stderr;
|
||||
if (stderr.includes("not a git repository")) {
|
||||
reason =
|
||||
"The checkout path provided to the action does not appear to be a git repository.";
|
||||
}
|
||||
core.info(`git call failed. ${customErrorMessage} Error: ${reason}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
const getCommitOid = async function (checkoutPath, ref = "HEAD") {
|
||||
// Try to use git to get the current commit SHA. If that fails then
|
||||
// log but otherwise silently fall back to using the SHA from the environment.
|
||||
// The only time these two values will differ is during analysis of a PR when
|
||||
// the workflow has changed the current commit to the head commit instead of
|
||||
// the merge commit, which must mean that git is available.
|
||||
// Even if this does go wrong, it's not a huge problem for the alerts to
|
||||
// reported on the merge commit.
|
||||
try {
|
||||
const stdout = await runGitCommand(checkoutPath, ["rev-parse", ref], "Continuing with commit SHA from user input or environment.");
|
||||
return stdout.trim();
|
||||
}
|
||||
catch {
|
||||
return (0, actions_util_1.getOptionalInput)("sha") || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
}
|
||||
};
|
||||
exports.getCommitOid = getCommitOid;
|
||||
/**
|
||||
* If the action was triggered by a pull request, determine the commit sha at
|
||||
* the head of the base branch, using the merge commit that this workflow analyzes.
|
||||
* Returns undefined if run by other triggers or the base branch commit cannot be
|
||||
* determined.
|
||||
*/
|
||||
const determineBaseBranchHeadCommitOid = async function (checkoutPathOverride) {
|
||||
if ((0, actions_util_1.getWorkflowEventName)() !== "pull_request") {
|
||||
return undefined;
|
||||
}
|
||||
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
const checkoutPath = checkoutPathOverride ?? (0, actions_util_1.getOptionalInput)("checkout_path");
|
||||
try {
|
||||
let commitOid = "";
|
||||
let baseOid = "";
|
||||
let headOid = "";
|
||||
const stdout = await runGitCommand(checkoutPath, ["show", "-s", "--format=raw", mergeSha], "Will calculate the base branch SHA on the server.");
|
||||
for (const data of stdout.split("\n")) {
|
||||
if (data.startsWith("commit ") && commitOid === "") {
|
||||
commitOid = data.substring(7);
|
||||
}
|
||||
else if (data.startsWith("parent ")) {
|
||||
if (baseOid === "") {
|
||||
baseOid = data.substring(7);
|
||||
}
|
||||
else if (headOid === "") {
|
||||
headOid = data.substring(7);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
|
||||
if (commitOid === mergeSha &&
|
||||
headOid.length === 40 &&
|
||||
baseOid.length === 40) {
|
||||
return baseOid;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
catch {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
exports.determineBaseBranchHeadCommitOid = determineBaseBranchHeadCommitOid;
|
||||
/**
|
||||
* Deepen the git history of HEAD by one level. Errors are logged.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
const deepenGitHistory = async function () {
|
||||
try {
|
||||
await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), [
|
||||
"fetch",
|
||||
"origin",
|
||||
"HEAD",
|
||||
"--no-tags",
|
||||
"--no-recurse-submodules",
|
||||
"--deepen=1",
|
||||
], "Cannot deepen the shallow repository.");
|
||||
}
|
||||
catch {
|
||||
// Errors are already logged by runGitCommand()
|
||||
}
|
||||
};
|
||||
exports.deepenGitHistory = deepenGitHistory;
|
||||
/**
|
||||
* Fetch the given remote branch. Errors are logged.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
const gitFetch = async function (branch, extraFlags) {
|
||||
try {
|
||||
await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), ["fetch", "--no-tags", ...extraFlags, "origin", `${branch}:${branch}`], `Cannot fetch ${branch}.`);
|
||||
}
|
||||
catch {
|
||||
// Errors are already logged by runGitCommand()
|
||||
}
|
||||
};
|
||||
exports.gitFetch = gitFetch;
|
||||
/**
|
||||
* Repack the git repository, using with the given flags. Errors are logged.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
const gitRepack = async function (flags) {
|
||||
try {
|
||||
await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), ["repack", ...flags], "Cannot repack the repository.");
|
||||
}
|
||||
catch {
|
||||
// Errors are already logged by runGitCommand()
|
||||
}
|
||||
};
|
||||
exports.gitRepack = gitRepack;
|
||||
/**
|
||||
* Compute the all merge bases between the given refs. Returns an empty array
|
||||
* if no merge base is found, or if there is an error.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
const getAllGitMergeBases = async function (refs) {
|
||||
try {
|
||||
const stdout = await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), ["merge-base", "--all", ...refs], `Cannot get merge base of ${refs}.`);
|
||||
return stdout.trim().split("\n");
|
||||
}
|
||||
catch {
|
||||
return [];
|
||||
}
|
||||
};
|
||||
exports.getAllGitMergeBases = getAllGitMergeBases;
|
||||
/**
|
||||
* Compute the diff hunk headers between the two given refs.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*
|
||||
* @returns an array of diff hunk headers (one element per line), or undefined
|
||||
* if the action was not triggered by a pull request, or if the diff could not
|
||||
* be determined.
|
||||
*/
|
||||
const getGitDiffHunkHeaders = async function (fromRef, toRef) {
|
||||
let stdout = "";
|
||||
try {
|
||||
stdout = await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), [
|
||||
"-c",
|
||||
"core.quotePath=false",
|
||||
"diff",
|
||||
"--no-renames",
|
||||
"--irreversible-delete",
|
||||
"-U0",
|
||||
fromRef,
|
||||
toRef,
|
||||
], `Cannot get diff from ${fromRef} to ${toRef}.`);
|
||||
}
|
||||
catch {
|
||||
return undefined;
|
||||
}
|
||||
const headers = [];
|
||||
for (const line of stdout.split("\n")) {
|
||||
if (line.startsWith("--- ") ||
|
||||
line.startsWith("+++ ") ||
|
||||
line.startsWith("@@ ")) {
|
||||
headers.push(line);
|
||||
}
|
||||
}
|
||||
return headers;
|
||||
};
|
||||
exports.getGitDiffHunkHeaders = getGitDiffHunkHeaders;
|
||||
/**
|
||||
* Decode, if necessary, a file path produced by Git. See
|
||||
* https://git-scm.com/docs/git-config#Documentation/git-config.txt-corequotePath
|
||||
* for details on how Git encodes file paths with special characters.
|
||||
*
|
||||
* This function works only for Git output with `core.quotePath=false`.
|
||||
*/
|
||||
const decodeGitFilePath = function (filePath) {
|
||||
if (filePath.startsWith('"') && filePath.endsWith('"')) {
|
||||
filePath = filePath.substring(1, filePath.length - 1);
|
||||
return filePath.replace(/\\([abfnrtv\\"]|[0-7]{1,3})/g, (_match, seq) => {
|
||||
switch (seq[0]) {
|
||||
case "a":
|
||||
return "\x07";
|
||||
case "b":
|
||||
return "\b";
|
||||
case "f":
|
||||
return "\f";
|
||||
case "n":
|
||||
return "\n";
|
||||
case "r":
|
||||
return "\r";
|
||||
case "t":
|
||||
return "\t";
|
||||
case "v":
|
||||
return "\v";
|
||||
case "\\":
|
||||
return "\\";
|
||||
case '"':
|
||||
return '"';
|
||||
default:
|
||||
// Both String.fromCharCode() and String.fromCodePoint() works only
|
||||
// for constructing an entire character at once. If a Unicode
|
||||
// character is encoded as a sequence of escaped bytes, calling these
|
||||
// methods sequentially on the individual byte values would *not*
|
||||
// produce the original multi-byte Unicode character. As a result,
|
||||
// this implementation works only with the Git option core.quotePath
|
||||
// set to false.
|
||||
return String.fromCharCode(parseInt(seq, 8));
|
||||
}
|
||||
});
|
||||
}
|
||||
return filePath;
|
||||
};
|
||||
exports.decodeGitFilePath = decodeGitFilePath;
|
||||
function getRefFromEnv() {
|
||||
// To workaround a limitation of Actions dynamic workflows not setting
|
||||
// the GITHUB_REF in some cases, we accept also the ref within the
|
||||
// CODE_SCANNING_REF variable. When possible, however, we prefer to use
|
||||
// the GITHUB_REF as that is a protected variable and cannot be overwritten.
|
||||
let refEnv;
|
||||
try {
|
||||
refEnv = (0, util_1.getRequiredEnvParam)("GITHUB_REF");
|
||||
}
|
||||
catch (e) {
|
||||
// If the GITHUB_REF is not set, we try to rescue by getting the
|
||||
// CODE_SCANNING_REF.
|
||||
const maybeRef = process.env["CODE_SCANNING_REF"];
|
||||
if (maybeRef === undefined || maybeRef.length === 0) {
|
||||
throw e;
|
||||
}
|
||||
refEnv = maybeRef;
|
||||
}
|
||||
return refEnv;
|
||||
}
|
||||
/**
|
||||
* Get the ref currently being analyzed.
|
||||
*/
|
||||
async function getRef() {
|
||||
// Will be in the form "refs/heads/master" on a push event
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const refInput = (0, actions_util_1.getOptionalInput)("ref");
|
||||
const shaInput = (0, actions_util_1.getOptionalInput)("sha");
|
||||
const checkoutPath = (0, actions_util_1.getOptionalInput)("checkout_path") ||
|
||||
(0, actions_util_1.getOptionalInput)("source-root") ||
|
||||
(0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE");
|
||||
const hasRefInput = !!refInput;
|
||||
const hasShaInput = !!shaInput;
|
||||
// If one of 'ref' or 'sha' are provided, both are required
|
||||
if ((hasRefInput || hasShaInput) && !(hasRefInput && hasShaInput)) {
|
||||
throw new util_1.ConfigurationError("Both 'ref' and 'sha' are required if one of them is provided.");
|
||||
}
|
||||
const ref = refInput || getRefFromEnv();
|
||||
const sha = shaInput || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
// If the ref is a user-provided input, we have to skip logic
|
||||
// and assume that it is really where they want to upload the results.
|
||||
if (refInput) {
|
||||
return refInput;
|
||||
}
|
||||
// For pull request refs we want to detect whether the workflow
|
||||
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
||||
// than the 'merge' ref. If so, we want to convert the ref that
|
||||
// we report back.
|
||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||
if (!pull_ref_regex.test(ref)) {
|
||||
return ref;
|
||||
}
|
||||
const head = await (0, exports.getCommitOid)(checkoutPath, "HEAD");
|
||||
// in actions/checkout@v2+ we can check if git rev-parse HEAD == GITHUB_SHA
|
||||
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||
// using GITHUB_REF. There is a subtle race condition where
|
||||
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
||||
// git rev-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||
const hasChangedRef = sha !== head &&
|
||||
(await (0, exports.getCommitOid)(checkoutPath, ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !== head;
|
||||
if (hasChangedRef) {
|
||||
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
||||
core.debug(`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`);
|
||||
return newRef;
|
||||
}
|
||||
else {
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
function removeRefsHeadsPrefix(ref) {
|
||||
return ref.startsWith("refs/heads/") ? ref.slice("refs/heads/".length) : ref;
|
||||
}
|
||||
/**
|
||||
* Returns whether we are analyzing the default branch for the repository.
|
||||
*
|
||||
* This first checks the environment variable `CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH`. This
|
||||
* environment variable can be set in cases where repository information might not be available, for
|
||||
* example dynamic workflows.
|
||||
*/
|
||||
async function isAnalyzingDefaultBranch() {
|
||||
if (process.env.CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH === "true") {
|
||||
return true;
|
||||
}
|
||||
// Get the current ref and trim and refs/heads/ prefix
|
||||
let currentRef = await getRef();
|
||||
currentRef = removeRefsHeadsPrefix(currentRef);
|
||||
const event = (0, actions_util_1.getWorkflowEvent)();
|
||||
let defaultBranch = event?.repository?.default_branch;
|
||||
if ((0, actions_util_1.getWorkflowEventName)() === "schedule") {
|
||||
defaultBranch = removeRefsHeadsPrefix(getRefFromEnv());
|
||||
}
|
||||
return currentRef === defaultBranch;
|
||||
}
|
||||
//# sourceMappingURL=git-utils.js.map
|
||||
1
lib/git-utils.js.map
Normal file
1
lib/git-utils.js.map
Normal file
File diff suppressed because one or more lines are too long
268
lib/git-utils.test.js
generated
Normal file
268
lib/git-utils.test.js
generated
Normal file
@@ -0,0 +1,268 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const gitUtils = __importStar(require("./git-utils"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("getRef() throws on the empty string", async (t) => {
|
||||
process.env["GITHUB_REF"] = "";
|
||||
await t.throwsAsync(gitUtils.getRef);
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
const callback = sinon.stub(gitUtils, "getCommitOid");
|
||||
callback.withArgs("HEAD").resolves(currentSha);
|
||||
const actualRef = await gitUtils.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||
const sha = "a".repeat(40);
|
||||
const callback = sinon.stub(gitUtils, "getCommitOid");
|
||||
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
||||
callback.withArgs("HEAD").resolves(sha);
|
||||
const actualRef = await gitUtils.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
const callback = sinon.stub(gitUtils, "getCommitOid");
|
||||
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
|
||||
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
|
||||
const actualRef = await gitUtils.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
||||
// These values are be ignored
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
const callback = sinon.stub(gitUtils, "getCommitOid");
|
||||
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||
const actualRef = await gitUtils.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/2/merge");
|
||||
callback.restore();
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns CODE_SCANNING_REF as a fallback for GITHUB_REF", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/HEAD";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["CODE_SCANNING_REF"] = expectedRef;
|
||||
process.env["GITHUB_REF"] = "";
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
const actualRef = await gitUtils.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns GITHUB_REF over CODE_SCANNING_REF if both are provided", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["CODE_SCANNING_REF"] = "refs/pull/1/HEAD";
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
const actualRef = await gitUtils.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
||||
await t.throwsAsync(async () => {
|
||||
await gitUtils.getRef();
|
||||
}, {
|
||||
instanceOf: Error,
|
||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
});
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() throws an error if only `sha` is provided as an input", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
process.env["GITHUB_WORKSPACE"] = "/tmp";
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
||||
await t.throwsAsync(async () => {
|
||||
await gitUtils.getRef();
|
||||
}, {
|
||||
instanceOf: Error,
|
||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
});
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
|
||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "true";
|
||||
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), true);
|
||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "false";
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const envFile = path.join(tmpDir, "event.json");
|
||||
fs.writeFileSync(envFile, JSON.stringify({
|
||||
repository: {
|
||||
default_branch: "main",
|
||||
},
|
||||
}));
|
||||
process.env["GITHUB_EVENT_PATH"] = envFile;
|
||||
process.env["GITHUB_REF"] = "main";
|
||||
process.env["GITHUB_SHA"] = "1234";
|
||||
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), true);
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), true);
|
||||
process.env["GITHUB_REF"] = "feature";
|
||||
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), false);
|
||||
fs.writeFileSync(envFile, JSON.stringify({
|
||||
schedule: "0 0 * * *",
|
||||
}));
|
||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), true);
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub
|
||||
.withArgs("ref")
|
||||
.resolves("refs/heads/something-else");
|
||||
getAdditionalInputStub
|
||||
.withArgs("sha")
|
||||
.resolves("0000000000000000000000000000000000000000");
|
||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), false);
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("determineBaseBranchHeadCommitOid non-pullrequest", async (t) => {
|
||||
const infoStub = sinon.stub(core, "info");
|
||||
process.env["GITHUB_EVENT_NAME"] = "hucairz";
|
||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||
const result = await gitUtils.determineBaseBranchHeadCommitOid(__dirname);
|
||||
t.deepEqual(result, undefined);
|
||||
t.deepEqual(0, infoStub.callCount);
|
||||
infoStub.restore();
|
||||
});
|
||||
(0, ava_1.default)("determineBaseBranchHeadCommitOid not git repository", async (t) => {
|
||||
const infoStub = sinon.stub(core, "info");
|
||||
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
await gitUtils.determineBaseBranchHeadCommitOid(tmpDir);
|
||||
});
|
||||
t.deepEqual(1, infoStub.callCount);
|
||||
t.deepEqual(infoStub.firstCall.args[0], "git call failed. Will calculate the base branch SHA on the server. Error: " +
|
||||
"The checkout path provided to the action does not appear to be a git repository.");
|
||||
infoStub.restore();
|
||||
});
|
||||
(0, ava_1.default)("determineBaseBranchHeadCommitOid other error", async (t) => {
|
||||
const infoStub = sinon.stub(core, "info");
|
||||
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||
const result = await gitUtils.determineBaseBranchHeadCommitOid(path.join(__dirname, "../../i-dont-exist"));
|
||||
t.deepEqual(result, undefined);
|
||||
t.deepEqual(1, infoStub.callCount);
|
||||
t.assert(infoStub.firstCall.args[0].startsWith("git call failed. Will calculate the base branch SHA on the server. Error: "));
|
||||
t.assert(!infoStub.firstCall.args[0].endsWith("The checkout path provided to the action does not appear to be a git repository."));
|
||||
infoStub.restore();
|
||||
});
|
||||
(0, ava_1.default)("decodeGitFilePath unquoted strings", async (t) => {
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo"), "foo");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo bar"), "foo bar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\\\bar"), "foo\\\\bar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('foo\\"bar'), 'foo\\"bar');
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\001bar"), "foo\\001bar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\abar"), "foo\\abar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\bbar"), "foo\\bbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\fbar"), "foo\\fbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\nbar"), "foo\\nbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\rbar"), "foo\\rbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\tbar"), "foo\\tbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\vbar"), "foo\\vbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("\\a\\b\\f\\n\\r\\t\\v"), "\\a\\b\\f\\n\\r\\t\\v");
|
||||
});
|
||||
(0, ava_1.default)("decodeGitFilePath quoted strings", async (t) => {
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo"'), "foo");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo bar"'), "foo bar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\\\bar"'), "foo\\bar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\"bar"'), 'foo"bar');
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\001bar"'), "foo\x01bar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\abar"'), "foo\x07bar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\bbar"'), "foo\bbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\fbar"'), "foo\fbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\nbar"'), "foo\nbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\rbar"'), "foo\rbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\tbar"'), "foo\tbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\vbar"'), "foo\vbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"\\a\\b\\f\\n\\r\\t\\v"'), "\x07\b\f\n\r\t\v");
|
||||
});
|
||||
//# sourceMappingURL=git-utils.test.js.map
|
||||
1
lib/git-utils.test.js.map
Normal file
1
lib/git-utils.test.js.map
Normal file
File diff suppressed because one or more lines are too long
4
lib/init-action.js
generated
4
lib/init-action.js
generated
@@ -36,7 +36,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const safe_which_1 = require("@chrisgavin/safe-which");
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const uuid_1 = require("uuid");
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api_client_1 = require("./api-client");
|
||||
@@ -258,7 +258,7 @@ async function run() {
|
||||
if (config.languages.includes(languages_1.Language.go) &&
|
||||
process.platform === "linux") {
|
||||
try {
|
||||
const goBinaryPath = await (0, safe_which_1.safeWhich)("go");
|
||||
const goBinaryPath = await io.which("go", true);
|
||||
const fileOutput = await (0, actions_util_1.getFileType)(goBinaryPath);
|
||||
// Go 1.21 and above ships with statically linked binaries on Linux. CodeQL cannot currently trace custom builds
|
||||
// where the entry point is a statically linked binary. Until that is fixed, we work around the problem by
|
||||
|
||||
File diff suppressed because one or more lines are too long
4
lib/init.js
generated
4
lib/init.js
generated
@@ -42,7 +42,7 @@ exports.cleanupDatabaseClusterDirectory = cleanupDatabaseClusterDirectory;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
@@ -102,7 +102,7 @@ async function checkInstallPython311(languages, codeql) {
|
||||
process.platform === "win32" &&
|
||||
!(await codeql.getVersion()).features?.supportsPython312) {
|
||||
const script = path.resolve(__dirname, "../python-setup", "check_python12.ps1");
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("powershell"), [
|
||||
await new toolrunner.ToolRunner(await io.which("powershell", true), [
|
||||
script,
|
||||
]).exec();
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAoBA,gCAyCC;AAED,gCAgBC;AAED,0BAkCC;AAED,0DAeC;AAMD,sDAkBC;AAED,0EAkDC;AAhND,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,iDAAsE;AAEtE,qCAA+C;AAC/C,4DAA8C;AAE9C,2CAA0D;AAK1D,qDAAgD;AAChD,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,UAA8B,EAC9B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,iBAA2C,EAC3C,QAA2B,EAC3B,MAAc;IAQd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EACJ,MAAM,EACN,yBAAyB,EACzB,WAAW,EACX,YAAY,EACZ,gBAAgB,GACjB,GAAG,MAAM,IAAA,oBAAW,EACnB,UAAU,EACV,UAAU,EACV,OAAO,EACP,OAAO,EACP,iBAAiB,EACjB,MAAM,EACN,QAAQ,EACR,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO;QACL,MAAM;QACN,yBAAyB;QACzB,WAAW;QACX,YAAY;QACZ,gBAAgB;KACjB,CAAC;AACJ,CAAC;AAEM,KAAK,UAAU,UAAU,CAC9B,MAAoC,EACpC,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;IAC7B,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;IACpD,IACE,CAAC,CAAC,MAAM,MAAM,CAAC,eAAe,CAC5B,6BAAY,CAAC,kCAAkC,CAChD,CAAC,EACF,CAAC;QACD,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,eAAmC,EACnC,UAAoC,EACpC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,MAAM,EAAE,oBAAoB,EAAE,YAAY,EAAE,GAC1C,MAAM,WAAW,CAAC,kBAAkB,CAClC,eAAe,EACf,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC;IACJ,MAAM,WAAW,CAAC,eAAe,CAC/B;QACE,YAAY,EAAE,UAAU,CAAC,IAAI;QAC7B,sBAAsB,EAAE,oBAAoB;KAC7C;IAED,0BAA0B;IAC1B,KAAK,IAAI,EAAE,CACT,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,YAAY,EACZ,MAAM,CACP,CACJ,CAAC;IACF,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,EAAE,MAAM;QACrC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,EAAE,MAAM,CAAC;QACnD,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,6BAAiB,CAAC,EAC1C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;;GAGG;AACI,KAAK,UAAU,qBAAqB,CACzC,SAAqB,EACrB,MAAc;IAEd,IACE,SAAS,CAAC,QAAQ,CAAC,oBAAQ,CAAC,MAAM,CAAC;QACnC,OAAO,CAAC,QAAQ,KAAK,OAAO;QAC5B,CAAC,CAAC,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC,CAAC,QAAQ,EAAE,iBAAiB,EACxD,CAAC;QACD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CACzB,SAAS,EACT,iBAAiB,EACjB,oBAAoB,CACrB,CAAC;QACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;YACvE,MAAM;SACP,CAAC,CAAC,IAAI,EAAE,CAAC;IACZ,CAAC;AACH,CAAC;AAED,SAAgB,+BAA+B,CAC7C,MAA0B,EAC1B,MAAc;AACd,+FAA+F;AAC/F,eAAe;AACf,MAAM,GAAG,EAAE,CAAC,MAAM;IAElB,IACE,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC;QAChC,CAAC,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,EAAE;YACtC,EAAE,CAAC,WAAW,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,EAC3C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,kCAAkC,MAAM,CAAC,UAAU,4CAA4C,CAChG,CAAC;QACF,IAAI,CAAC;YACH,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE;gBACxB,KAAK,EAAE,IAAI;gBACX,UAAU,EAAE,CAAC;gBACb,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,IAAI,CACT,yCAAyC,MAAM,CAAC,UAAU,GAAG,CAC9D,CAAC;QACJ,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,MAAM,KAAK,GAAG,mEACZ,IAAA,+BAAgB,EAAC,aAAa,CAAC;gBAC7B,CAAC,CAAC,sCAAsC,MAAM,CAAC,UAAU,IAAI;gBAC7D,CAAC,CAAC,kCAAkC,MAAM,CAAC,UAAU,IAAI;oBACvD,yEACN,iEAAiE,CAAC;YAElE,kGAAkG;YAClG,IAAI,IAAA,iCAAkB,GAAE,EAAE,CAAC;gBACzB,MAAM,IAAI,IAAI,CAAC,kBAAkB,CAC/B,GAAG,KAAK,4GAA4G;oBAClH,sEAAsE,IAAI,CAAC,eAAe,CACxF,CAAC,CACF,EAAE,CACN,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACN,MAAM,IAAI,KAAK,CACb,GAAG,KAAK,sDAAsD;oBAC5D,+EAA+E;oBAC/E,yCAAyC,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,CACrE,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC"}
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAoBA,gCAyCC;AAED,gCAgBC;AAED,0BAkCC;AAED,0DAeC;AAMD,sDAkBC;AAED,0EAkDC;AAhND,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,gDAAkC;AAElC,iDAAsE;AAEtE,qCAA+C;AAC/C,4DAA8C;AAE9C,2CAA0D;AAK1D,qDAAgD;AAChD,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,UAA8B,EAC9B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,iBAA2C,EAC3C,QAA2B,EAC3B,MAAc;IAQd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EACJ,MAAM,EACN,yBAAyB,EACzB,WAAW,EACX,YAAY,EACZ,gBAAgB,GACjB,GAAG,MAAM,IAAA,oBAAW,EACnB,UAAU,EACV,UAAU,EACV,OAAO,EACP,OAAO,EACP,iBAAiB,EACjB,MAAM,EACN,QAAQ,EACR,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO;QACL,MAAM;QACN,yBAAyB;QACzB,WAAW;QACX,YAAY;QACZ,gBAAgB;KACjB,CAAC;AACJ,CAAC;AAEM,KAAK,UAAU,UAAU,CAC9B,MAAoC,EACpC,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;IAC7B,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;IACpD,IACE,CAAC,CAAC,MAAM,MAAM,CAAC,eAAe,CAC5B,6BAAY,CAAC,kCAAkC,CAChD,CAAC,EACF,CAAC;QACD,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,eAAmC,EACnC,UAAoC,EACpC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,MAAM,EAAE,oBAAoB,EAAE,YAAY,EAAE,GAC1C,MAAM,WAAW,CAAC,kBAAkB,CAClC,eAAe,EACf,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC;IACJ,MAAM,WAAW,CAAC,eAAe,CAC/B;QACE,YAAY,EAAE,UAAU,CAAC,IAAI;QAC7B,sBAAsB,EAAE,oBAAoB;KAC7C;IAED,0BAA0B;IAC1B,KAAK,IAAI,EAAE,CACT,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,YAAY,EACZ,MAAM,CACP,CACJ,CAAC;IACF,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,EAAE,MAAM;QACrC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,EAAE,MAAM,CAAC;QACnD,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,6BAAiB,CAAC,EAC1C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;;GAGG;AACI,KAAK,UAAU,qBAAqB,CACzC,SAAqB,EACrB,MAAc;IAEd,IACE,SAAS,CAAC,QAAQ,CAAC,oBAAQ,CAAC,MAAM,CAAC;QACnC,OAAO,CAAC,QAAQ,KAAK,OAAO;QAC5B,CAAC,CAAC,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC,CAAC,QAAQ,EAAE,iBAAiB,EACxD,CAAC;QACD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CACzB,SAAS,EACT,iBAAiB,EACjB,oBAAoB,CACrB,CAAC;QACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE,IAAI,CAAC,EAAE;YAClE,MAAM;SACP,CAAC,CAAC,IAAI,EAAE,CAAC;IACZ,CAAC;AACH,CAAC;AAED,SAAgB,+BAA+B,CAC7C,MAA0B,EAC1B,MAAc;AACd,+FAA+F;AAC/F,eAAe;AACf,MAAM,GAAG,EAAE,CAAC,MAAM;IAElB,IACE,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC;QAChC,CAAC,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,EAAE;YACtC,EAAE,CAAC,WAAW,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,EAC3C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,kCAAkC,MAAM,CAAC,UAAU,4CAA4C,CAChG,CAAC;QACF,IAAI,CAAC;YACH,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE;gBACxB,KAAK,EAAE,IAAI;gBACX,UAAU,EAAE,CAAC;gBACb,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,IAAI,CACT,yCAAyC,MAAM,CAAC,UAAU,GAAG,CAC9D,CAAC;QACJ,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,MAAM,KAAK,GAAG,mEACZ,IAAA,+BAAgB,EAAC,aAAa,CAAC;gBAC7B,CAAC,CAAC,sCAAsC,MAAM,CAAC,UAAU,IAAI;gBAC7D,CAAC,CAAC,kCAAkC,MAAM,CAAC,UAAU,IAAI;oBACvD,yEACN,iEAAiE,CAAC;YAElE,kGAAkG;YAClG,IAAI,IAAA,iCAAkB,GAAE,EAAE,CAAC;gBACzB,MAAM,IAAI,IAAI,CAAC,kBAAkB,CAC/B,GAAG,KAAK,4GAA4G;oBAClH,sEAAsE,IAAI,CAAC,eAAe,CACxF,CAAC,CACF,EAAE,CACN,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACN,MAAM,IAAI,KAAK,CACb,GAAG,KAAK,sDAAsD;oBAC5D,+EAA+E;oBAC/E,yCAAyC,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,CACrE,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC"}
|
||||
10
lib/logging.js
generated
10
lib/logging.js
generated
@@ -36,6 +36,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getActionsLogger = getActionsLogger;
|
||||
exports.getRunnerLogger = getRunnerLogger;
|
||||
exports.withGroup = withGroup;
|
||||
exports.withGroupAsync = withGroupAsync;
|
||||
exports.formatDuration = formatDuration;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
function getActionsLogger() {
|
||||
@@ -65,6 +66,15 @@ function withGroup(groupName, f) {
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
async function withGroupAsync(groupName, f) {
|
||||
core.startGroup(groupName);
|
||||
try {
|
||||
return await f();
|
||||
}
|
||||
finally {
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
/** Format a duration for use in logs. */
|
||||
function formatDuration(durationMs) {
|
||||
if (durationMs < 1000) {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../src/logging.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAcA,4CAEC;AAED,0CAcC;AAED,8BAOC;AAGD,wCAWC;AAvDD,oDAAsC;AActC,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC;AACd,CAAC;AAED,SAAgB,eAAe,CAAC,SAAkB;IAChD,OAAO;QACL,sCAAsC;QACtC,KAAK,EAAE,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,SAAS;QAClD,sCAAsC;QACtC,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,sCAAsC;QACtC,OAAO,EAAE,OAAO,CAAC,IAAI;QACrB,sCAAsC;QACtC,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,OAAO,EAAE,GAAG,EAAE,CAAC,SAAS;QACxB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAED,SAAgB,SAAS,CAAI,SAAiB,EAAE,CAAU;IACxD,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;IAC3B,IAAI,CAAC;QACH,OAAO,CAAC,EAAE,CAAC;IACb,CAAC;YAAS,CAAC;QACT,IAAI,CAAC,QAAQ,EAAE,CAAC;IAClB,CAAC;AACH,CAAC;AAED,yCAAyC;AACzC,SAAgB,cAAc,CAAC,UAAkB;IAC/C,IAAI,UAAU,GAAG,IAAI,EAAE,CAAC;QACtB,OAAO,GAAG,UAAU,IAAI,CAAC;IAC3B,CAAC;IAED,IAAI,UAAU,GAAG,EAAE,GAAG,IAAI,EAAE,CAAC;QAC3B,OAAO,GAAG,CAAC,UAAU,GAAG,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC;IAC9C,CAAC;IACD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC;IACrD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,UAAU,GAAG,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IAC9D,OAAO,GAAG,OAAO,IAAI,OAAO,GAAG,CAAC;AAClC,CAAC"}
|
||||
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../src/logging.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAcA,4CAEC;AAED,0CAcC;AAED,8BAOC;AAED,wCAUC;AAGD,wCAWC;AAnED,oDAAsC;AActC,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC;AACd,CAAC;AAED,SAAgB,eAAe,CAAC,SAAkB;IAChD,OAAO;QACL,sCAAsC;QACtC,KAAK,EAAE,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,SAAS;QAClD,sCAAsC;QACtC,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,sCAAsC;QACtC,OAAO,EAAE,OAAO,CAAC,IAAI;QACrB,sCAAsC;QACtC,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,OAAO,EAAE,GAAG,EAAE,CAAC,SAAS;QACxB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAED,SAAgB,SAAS,CAAI,SAAiB,EAAE,CAAU;IACxD,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;IAC3B,IAAI,CAAC;QACH,OAAO,CAAC,EAAE,CAAC;IACb,CAAC;YAAS,CAAC;QACT,IAAI,CAAC,QAAQ,EAAE,CAAC;IAClB,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,cAAc,CAClC,SAAiB,EACjB,CAAmB;IAEnB,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;IAC3B,IAAI,CAAC;QACH,OAAO,MAAM,CAAC,EAAE,CAAC;IACnB,CAAC;YAAS,CAAC;QACT,IAAI,CAAC,QAAQ,EAAE,CAAC;IAClB,CAAC;AACH,CAAC;AAED,yCAAyC;AACzC,SAAgB,cAAc,CAAC,UAAkB;IAC/C,IAAI,UAAU,GAAG,IAAI,EAAE,CAAC;QACtB,OAAO,GAAG,UAAU,IAAI,CAAC;IAC3B,CAAC;IAED,IAAI,UAAU,GAAG,EAAE,GAAG,IAAI,EAAE,CAAC;QAC3B,OAAO,GAAG,CAAC,UAAU,GAAG,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC;IAC9C,CAAC;IACD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC;IACrD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,UAAU,GAAG,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IAC9D,OAAO,GAAG,OAAO,IAAI,OAAO,GAAG,CAAC;AAClC,CAAC"}
|
||||
2
lib/setup-codeql.js
generated
2
lib/setup-codeql.js
generated
@@ -417,7 +417,7 @@ const downloadCodeQL = async function (codeqlURL, maybeBundleVersion, maybeCliVe
|
||||
const extractedBundlePath = extractToToolcache
|
||||
? toolcacheInfo.path
|
||||
: getTempExtractionDir(tempDir);
|
||||
let statusReport = await (0, tools_download_1.downloadAndExtract)(codeqlURL, extractedBundlePath, authorization, { "User-Agent": "CodeQL Action", ...headers }, tarVersion, features, logger);
|
||||
let statusReport = await (0, tools_download_1.downloadAndExtract)(codeqlURL, extractedBundlePath, authorization, { "User-Agent": "CodeQL Action", ...headers }, tarVersion, logger);
|
||||
if (!toolcacheInfo) {
|
||||
logger.debug("Could not cache CodeQL tools because we could not determine the bundle version from the " +
|
||||
`URL ${codeqlURL}.`);
|
||||
|
||||
File diff suppressed because one or more lines are too long
42
lib/start-proxy-action-post.js
generated
42
lib/start-proxy-action-post.js
generated
@@ -43,46 +43,38 @@ const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const debug_artifacts_1 = require("./debug-artifacts");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const util_1 = require("./util");
|
||||
async function runWrapper() {
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
try {
|
||||
// Restore inputs from `start-proxy` Action.
|
||||
actionsUtil.restoreInputs();
|
||||
// Kill the running proxy
|
||||
const pid = core.getState("proxy-process-pid");
|
||||
if (pid) {
|
||||
process.kill(Number(pid));
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`start-proxy post-action step failed: ${(0, util_1.getErrorMessage)(error)}`);
|
||||
}
|
||||
const config = await configUtils.getConfig(actionsUtil.getTemporaryDirectory(), core);
|
||||
if ((config && config.debugMode) || core.isDebug()) {
|
||||
const logFilePath = core.getState("proxy-log-file");
|
||||
core.info("Debug mode is on. Uploading proxy log as Actions debugging artifact...");
|
||||
if (config?.gitHubVersion.type === undefined) {
|
||||
core.warning(`Did not upload debug artifacts because cannot determine the GitHub variant running.`);
|
||||
return;
|
||||
}
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, actionsUtil.getTemporaryDirectory(), logger);
|
||||
try {
|
||||
const artifactUploader = await (0, debug_artifacts_1.getArtifactUploaderClient)(logger, gitHubVersion.type, features);
|
||||
const config = await configUtils.getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
||||
if ((config && config.debugMode) || core.isDebug()) {
|
||||
const logFilePath = core.getState("proxy-log-file");
|
||||
logger.info("Debug mode is on. Uploading proxy log as Actions debugging artifact...");
|
||||
if (config?.gitHubVersion.type === undefined) {
|
||||
logger.warning(`Did not upload debug artifacts because cannot determine the GitHub variant running.`);
|
||||
return;
|
||||
}
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
||||
const artifactUploader = await (0, debug_artifacts_1.getArtifactUploaderClient)(logger, gitHubVersion.type);
|
||||
await artifactUploader.uploadArtifact("proxy-log-file", [logFilePath], actionsUtil.getTemporaryDirectory(), {
|
||||
// ensure we don't keep the debug artifacts around for too long since they can be large.
|
||||
retentionDays: 7,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
// A failure to upload debug artifacts should not fail the entire action.
|
||||
core.warning(`Failed to upload debug artifacts: ${e}`);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// A failure in the post step should not fail the entire action.
|
||||
logger.warning(`start-proxy post-action step failed: ${(0, util_1.getErrorMessage)(error)}`);
|
||||
}
|
||||
}
|
||||
void runWrapper();
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"start-proxy-action-post.js","sourceRoot":"","sources":["../src/start-proxy-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,4DAA8C;AAC9C,uDAA8D;AAC9D,mDAA2C;AAC3C,uCAA6C;AAC7C,6CAAkD;AAClD,iCAIgB;AAEhB,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,4CAA4C;QAC5C,WAAW,CAAC,aAAa,EAAE,CAAC;QAC5B,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;QAC/C,IAAI,GAAG,EAAE,CAAC;YACR,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;QAC5B,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,wCAAwC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CACjE,CAAC;IACJ,CAAC;IACD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CACxC,WAAW,CAAC,qBAAqB,EAAE,EACnC,IAAI,CACL,CAAC;IAEF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE,CAAC;QACnD,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC;QACpD,IAAI,CAAC,IAAI,CACP,wEAAwE,CACzE,CAAC;QACF,IAAI,MAAM,EAAE,aAAa,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAC7C,IAAI,CAAC,OAAO,CACV,qFAAqF,CACtF,CAAC;YACF,OAAO;QACT,CAAC;QAED,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QACjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;QAEF,IAAI,CAAC;YACH,MAAM,gBAAgB,GAAG,MAAM,IAAA,2CAAyB,EACtD,MAAM,EACN,aAAa,CAAC,IAAI,EAClB,QAAQ,CACT,CAAC;YAEF,MAAM,gBAAgB,CAAC,cAAc,CACnC,gBAAgB,EAChB,CAAC,WAAW,CAAC,EACb,WAAW,CAAC,qBAAqB,EAAE,EACnC;gBACE,wFAAwF;gBACxF,aAAa,EAAE,CAAC;aACjB,CACF,CAAC;QACJ,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,yEAAyE;YACzE,IAAI,CAAC,OAAO,CAAC,qCAAqC,CAAC,EAAE,CAAC,CAAC;QACzD,CAAC;IACH,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"start-proxy-action-post.js","sourceRoot":"","sources":["../src/start-proxy-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,4DAA8C;AAC9C,uDAA8D;AAC9D,uCAA6C;AAC7C,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,IAAI,CAAC;QACH,4CAA4C;QAC5C,WAAW,CAAC,aAAa,EAAE,CAAC;QAE5B,yBAAyB;QACzB,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;QAC/C,IAAI,GAAG,EAAE,CAAC;YACR,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;QAC5B,CAAC;QAED,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CACxC,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;QAEF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE,CAAC;YACnD,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC;YACpD,MAAM,CAAC,IAAI,CACT,wEAAwE,CACzE,CAAC;YACF,IAAI,MAAM,EAAE,aAAa,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC7C,MAAM,CAAC,OAAO,CACZ,qFAAqF,CACtF,CAAC;gBACF,OAAO;YACT,CAAC;YACD,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;YAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;YAEjD,MAAM,gBAAgB,GAAG,MAAM,IAAA,2CAAyB,EACtD,MAAM,EACN,aAAa,CAAC,IAAI,CACnB,CAAC;YAEF,MAAM,gBAAgB,CAAC,cAAc,CACnC,gBAAgB,EAChB,CAAC,WAAW,CAAC,EACb,WAAW,CAAC,qBAAqB,EAAE,EACnC;gBACE,wFAAwF;gBACxF,aAAa,EAAE,CAAC;aACjB,CACF,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,gEAAgE;QAChE,MAAM,CAAC,OAAO,CACZ,wCAAwC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CACjE,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
4
lib/start-proxy-action.js
generated
4
lib/start-proxy-action.js
generated
@@ -152,6 +152,10 @@ async function startProxy(binPath, config, logFilePath, logger) {
|
||||
core.setOutput("proxy_host", host);
|
||||
core.setOutput("proxy_port", port.toString());
|
||||
core.setOutput("proxy_ca_certificate", config.ca.cert);
|
||||
const registry_urls = config.all_credentials
|
||||
.filter((credential) => credential.url !== undefined)
|
||||
.map((credential) => credential.url);
|
||||
core.setOutput("proxy_urls", JSON.stringify(registry_urls));
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`start-proxy action failed: ${util.getErrorMessage(error)}`);
|
||||
|
||||
File diff suppressed because one or more lines are too long
3
lib/status-report.js
generated
3
lib/status-report.js
generated
@@ -45,6 +45,7 @@ const actions_util_1 = require("./actions-util");
|
||||
const api_client_1 = require("./api-client");
|
||||
const doc_url_1 = require("./doc-url");
|
||||
const environment_1 = require("./environment");
|
||||
const git_utils_1 = require("./git-utils");
|
||||
const util_1 = require("./util");
|
||||
var ActionName;
|
||||
(function (ActionName) {
|
||||
@@ -125,7 +126,7 @@ function setJobStatusIfUnsuccessful(actionStatus) {
|
||||
async function createStatusReportBase(actionName, status, actionStartedAt, config, diskInfo, logger, cause, exception) {
|
||||
try {
|
||||
const commitOid = (0, actions_util_1.getOptionalInput)("sha") || process.env["GITHUB_SHA"] || "";
|
||||
const ref = await (0, actions_util_1.getRef)();
|
||||
const ref = await (0, git_utils_1.getRef)();
|
||||
const jobRunUUID = process.env[environment_1.EnvVar.JOB_RUN_UUID] || "";
|
||||
const workflowRunID = (0, actions_util_1.getWorkflowRunID)();
|
||||
const workflowRunAttempt = (0, actions_util_1.getWorkflowRunAttempt)();
|
||||
|
||||
File diff suppressed because one or more lines are too long
4
lib/tar.js
generated
4
lib/tar.js
generated
@@ -41,14 +41,14 @@ const child_process_1 = require("child_process");
|
||||
const fs = __importStar(require("fs"));
|
||||
const stream = __importStar(require("stream"));
|
||||
const toolrunner_1 = require("@actions/exec/lib/toolrunner");
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const safe_which_1 = require("@chrisgavin/safe-which");
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const util_1 = require("./util");
|
||||
const MIN_REQUIRED_BSD_TAR_VERSION = "3.4.3";
|
||||
const MIN_REQUIRED_GNU_TAR_VERSION = "1.31";
|
||||
async function getTarVersion() {
|
||||
const tar = await (0, safe_which_1.safeWhich)("tar");
|
||||
const tar = await io.which("tar", true);
|
||||
let stdout = "";
|
||||
const exitCode = await new toolrunner_1.ToolRunner(tar, ["--version"], {
|
||||
listeners: {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"tar.js","sourceRoot":"","sources":["../src/tar.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA2DA,0CA+BC;AAID,0BAyBC;AAQD,sCA2EC;AAED,wDAKC;AAjND,iDAAsC;AACtC,uCAAyB;AACzB,+CAAiC;AAEjC,6DAA0D;AAC1D,+DAAiD;AACjD,uDAAmD;AAEnD,iDAAwD;AAExD,iCAAsE;AAEtE,MAAM,4BAA4B,GAAG,OAAO,CAAC;AAC7C,MAAM,4BAA4B,GAAG,MAAM,CAAC;AAO5C,KAAK,UAAU,aAAa;IAC1B,MAAM,GAAG,GAAG,MAAM,IAAA,sBAAS,EAAC,KAAK,CAAC,CAAC;IACnC,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,QAAQ,GAAG,MAAM,IAAI,uBAAU,CAAC,GAAG,EAAE,CAAC,WAAW,CAAC,EAAE;QACxD,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC,CAAC,IAAI,EAAE,CAAC;IACV,IAAI,QAAQ,KAAK,CAAC,EAAE,CAAC;QACnB,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;IAClD,CAAC;IACD,oEAAoE;IACpE,IAAI,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAC/B,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,2BAA2B,CAAC,CAAC;QACxD,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9D,CAAC;QAED,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC5C,CAAC;SAAM,IAAI,MAAM,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;QACrC,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;QAC/C,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9D,CAAC;QAED,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC5C,CAAC;SAAM,CAAC;QACN,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;IACzC,CAAC;AACH,CAAC;AAQM,KAAK,UAAU,eAAe,CACnC,MAAc;IAEd,MAAM,eAAe,GAAG,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjE,IAAI,CAAC;QACH,MAAM,UAAU,GAAG,MAAM,aAAa,EAAE,CAAC;QACzC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,UAAU,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,SAAS,IAAI,gBAAgB,OAAO,GAAG,CAAC,CAAC;QACrD,QAAQ,IAAI,EAAE,CAAC;YACb,KAAK,KAAK;gBACR,OAAO;oBACL,SAAS,EAAE,eAAe,IAAI,OAAO,IAAI,4BAA4B;oBACrE,eAAe;oBACf,OAAO,EAAE,UAAU;iBACpB,CAAC;YACJ,KAAK,KAAK;gBACR,OAAO;oBACL,SAAS,EAAE,eAAe,IAAI,OAAO,IAAI,4BAA4B;oBACrE,eAAe;oBACf,OAAO,EAAE,UAAU;iBACpB,CAAC;YACJ;gBACE,IAAA,kBAAW,EAAC,IAAI,CAAC,CAAC;QACtB,CAAC;IACH,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,CAAC,OAAO,CACZ,gFAAgF;YAC9E,6BAA6B,CAAC,EAAE,CACnC,CAAC;QACF,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,eAAe,EAAE,CAAC;IAC/C,CAAC;AACH,CAAC;AAIM,KAAK,UAAU,OAAO,CAC3B,OAAe,EACf,IAAY,EACZ,iBAAoC,EACpC,UAAkC,EAClC,MAAc;IAEd,4BAA4B;IAC5B,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAExC,QAAQ,iBAAiB,EAAE,CAAC;QAC1B,KAAK,MAAM;YACT,yEAAyE;YACzE,mCAAmC;YACnC,OAAO,MAAM,SAAS,CAAC,UAAU,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QACnD,KAAK,MAAM,CAAC,CAAC,CAAC;YACZ,IAAI,CAAC,UAAU,EAAE,CAAC;gBAChB,MAAM,IAAI,KAAK,CACb,oFAAoF,CACrF,CAAC;YACJ,CAAC;YACD,MAAM,aAAa,CAAC,OAAO,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;YACvD,OAAO,IAAI,CAAC;QACd,CAAC;IACH,CAAC;AACH,CAAC;AAED;;;;;GAKG;AACI,KAAK,UAAU,aAAa,CACjC,GAA6B,EAC7B,IAAY,EACZ,UAAsB,EACtB,MAAc;IAEd,MAAM,CAAC,KAAK,CACV,iBAAiB,IAAI,IACnB,GAAG,YAAY,MAAM,CAAC,QAAQ;QAC5B,CAAC,CAAC,qCAAqC,GAAG,CAAC,qBAAqB,GAAG;QACnE,CAAC,CAAC,EACN,EAAE,CACH,CAAC;IAEF,IAAI,CAAC;QACH,kBAAkB;QAClB,MAAM,IAAI,GAAG,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;QAE9B,IAAI,UAAU,CAAC,IAAI,KAAK,KAAK,EAAE,CAAC;YAC9B,8EAA8E;YAC9E,IAAI,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;YAC1C,IAAI,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;QAC3B,CAAC;QAED,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,YAAY,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;QAExE,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,gBAAgB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QAEzD,MAAM,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC1C,MAAM,UAAU,GAAG,IAAA,qBAAK,EAAC,KAAK,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC;YAEzD,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBAC7C,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBAC1B,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC;YAEH,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBAC7C,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBAC1B,4EAA4E;gBAC5E,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC;YAEH,UAAU,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;gBAC7B,MAAM,CAAC,IAAI,KAAK,CAAC,+BAA+B,GAAG,EAAE,CAAC,CAAC,CAAC;YAC1D,CAAC,CAAC,CAAC;YAEH,IAAI,GAAG,YAAY,MAAM,CAAC,QAAQ,EAAE,CAAC;gBACnC,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;oBAC7C,MAAM,CACJ,IAAI,KAAK,CAAC,+CAA+C,GAAG,EAAE,CAAC,CAChE,CAAC;gBACJ,CAAC,CAAC,CAAC;YACL,CAAC;YAED,UAAU,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;gBAC7B,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;oBACf,MAAM,CACJ,IAAI,qCAAsB,CACxB,KAAK,EACL,IAAI,EACJ,IAAI,IAAI,SAAS,EACjB,MAAM,EACN,MAAM,CACP,CACF,CAAC;gBACJ,CAAC;gBACD,OAAO,EAAE,CAAC;YACZ,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,IAAA,kBAAW,EAAC,IAAI,EAAE,kCAAkC,EAAE,MAAM,CAAC,CAAC;QACpE,MAAM,CAAC,CAAC;IACV,CAAC;AACH,CAAC;AAED,SAAgB,sBAAsB,CAAC,OAAe;IACpD,IAAI,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAChC,OAAO,MAAM,CAAC;IAChB,CAAC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC"}
|
||||
{"version":3,"file":"tar.js","sourceRoot":"","sources":["../src/tar.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA2DA,0CA+BC;AAID,0BAyBC;AAQD,sCA2EC;AAED,wDAKC;AAjND,iDAAsC;AACtC,uCAAyB;AACzB,+CAAiC;AAEjC,6DAA0D;AAC1D,gDAAkC;AAClC,+DAAiD;AAEjD,iDAAwD;AAExD,iCAAsE;AAEtE,MAAM,4BAA4B,GAAG,OAAO,CAAC;AAC7C,MAAM,4BAA4B,GAAG,MAAM,CAAC;AAO5C,KAAK,UAAU,aAAa;IAC1B,MAAM,GAAG,GAAG,MAAM,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;IACxC,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,QAAQ,GAAG,MAAM,IAAI,uBAAU,CAAC,GAAG,EAAE,CAAC,WAAW,CAAC,EAAE;QACxD,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC,CAAC,IAAI,EAAE,CAAC;IACV,IAAI,QAAQ,KAAK,CAAC,EAAE,CAAC;QACnB,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;IAClD,CAAC;IACD,oEAAoE;IACpE,IAAI,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAC/B,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,2BAA2B,CAAC,CAAC;QACxD,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9D,CAAC;QAED,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC5C,CAAC;SAAM,IAAI,MAAM,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;QACrC,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;QAC/C,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9D,CAAC;QAED,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC5C,CAAC;SAAM,CAAC;QACN,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;IACzC,CAAC;AACH,CAAC;AAQM,KAAK,UAAU,eAAe,CACnC,MAAc;IAEd,MAAM,eAAe,GAAG,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjE,IAAI,CAAC;QACH,MAAM,UAAU,GAAG,MAAM,aAAa,EAAE,CAAC;QACzC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,UAAU,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,SAAS,IAAI,gBAAgB,OAAO,GAAG,CAAC,CAAC;QACrD,QAAQ,IAAI,EAAE,CAAC;YACb,KAAK,KAAK;gBACR,OAAO;oBACL,SAAS,EAAE,eAAe,IAAI,OAAO,IAAI,4BAA4B;oBACrE,eAAe;oBACf,OAAO,EAAE,UAAU;iBACpB,CAAC;YACJ,KAAK,KAAK;gBACR,OAAO;oBACL,SAAS,EAAE,eAAe,IAAI,OAAO,IAAI,4BAA4B;oBACrE,eAAe;oBACf,OAAO,EAAE,UAAU;iBACpB,CAAC;YACJ;gBACE,IAAA,kBAAW,EAAC,IAAI,CAAC,CAAC;QACtB,CAAC;IACH,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,CAAC,OAAO,CACZ,gFAAgF;YAC9E,6BAA6B,CAAC,EAAE,CACnC,CAAC;QACF,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,eAAe,EAAE,CAAC;IAC/C,CAAC;AACH,CAAC;AAIM,KAAK,UAAU,OAAO,CAC3B,OAAe,EACf,IAAY,EACZ,iBAAoC,EACpC,UAAkC,EAClC,MAAc;IAEd,4BAA4B;IAC5B,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAExC,QAAQ,iBAAiB,EAAE,CAAC;QAC1B,KAAK,MAAM;YACT,yEAAyE;YACzE,mCAAmC;YACnC,OAAO,MAAM,SAAS,CAAC,UAAU,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QACnD,KAAK,MAAM,CAAC,CAAC,CAAC;YACZ,IAAI,CAAC,UAAU,EAAE,CAAC;gBAChB,MAAM,IAAI,KAAK,CACb,oFAAoF,CACrF,CAAC;YACJ,CAAC;YACD,MAAM,aAAa,CAAC,OAAO,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;YACvD,OAAO,IAAI,CAAC;QACd,CAAC;IACH,CAAC;AACH,CAAC;AAED;;;;;GAKG;AACI,KAAK,UAAU,aAAa,CACjC,GAA6B,EAC7B,IAAY,EACZ,UAAsB,EACtB,MAAc;IAEd,MAAM,CAAC,KAAK,CACV,iBAAiB,IAAI,IACnB,GAAG,YAAY,MAAM,CAAC,QAAQ;QAC5B,CAAC,CAAC,qCAAqC,GAAG,CAAC,qBAAqB,GAAG;QACnE,CAAC,CAAC,EACN,EAAE,CACH,CAAC;IAEF,IAAI,CAAC;QACH,kBAAkB;QAClB,MAAM,IAAI,GAAG,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;QAE9B,IAAI,UAAU,CAAC,IAAI,KAAK,KAAK,EAAE,CAAC;YAC9B,8EAA8E;YAC9E,IAAI,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;YAC1C,IAAI,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;QAC3B,CAAC;QAED,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,YAAY,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;QAExE,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,gBAAgB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QAEzD,MAAM,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC1C,MAAM,UAAU,GAAG,IAAA,qBAAK,EAAC,KAAK,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC;YAEzD,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBAC7C,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBAC1B,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC;YAEH,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBAC7C,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBAC1B,4EAA4E;gBAC5E,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC;YAEH,UAAU,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;gBAC7B,MAAM,CAAC,IAAI,KAAK,CAAC,+BAA+B,GAAG,EAAE,CAAC,CAAC,CAAC;YAC1D,CAAC,CAAC,CAAC;YAEH,IAAI,GAAG,YAAY,MAAM,CAAC,QAAQ,EAAE,CAAC;gBACnC,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;oBAC7C,MAAM,CACJ,IAAI,KAAK,CAAC,+CAA+C,GAAG,EAAE,CAAC,CAChE,CAAC;gBACJ,CAAC,CAAC,CAAC;YACL,CAAC;YAED,UAAU,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;gBAC7B,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;oBACf,MAAM,CACJ,IAAI,qCAAsB,CACxB,KAAK,EACL,IAAI,EACJ,IAAI,IAAI,SAAS,EACjB,MAAM,EACN,MAAM,CACP,CACF,CAAC;gBACJ,CAAC;gBACD,OAAO,EAAE,CAAC;YACZ,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,IAAA,kBAAW,EAAC,IAAI,EAAE,kCAAkC,EAAE,MAAM,CAAC,CAAC;QACpE,MAAM,CAAC,CAAC;IACV,CAAC;AACH,CAAC;AAED,SAAgB,sBAAsB,CAAC,OAAe;IACpD,IAAI,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAChC,OAAO,MAAM,CAAC;IAChB,CAAC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC"}
|
||||
43
lib/tools-download.js
generated
43
lib/tools-download.js
generated
@@ -41,10 +41,11 @@ const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const perf_hooks_1 = require("perf_hooks");
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http_client_1 = require("@actions/http-client");
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const follow_redirects_1 = require("follow-redirects");
|
||||
const semver = __importStar(require("semver"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const tar = __importStar(require("./tar"));
|
||||
const util_1 = require("./util");
|
||||
@@ -72,23 +73,29 @@ function makeStreamedToolsDownloadDurations(combinedDurationMs) {
|
||||
streamExtraction: true,
|
||||
};
|
||||
}
|
||||
async function downloadAndExtract(codeqlURL, dest, authorization, headers, tarVersion, features, logger) {
|
||||
async function downloadAndExtract(codeqlURL, dest, authorization, headers, tarVersion, logger) {
|
||||
logger.info(`Downloading CodeQL tools from ${codeqlURL} . This may take a while.`);
|
||||
const compressionMethod = tar.inferCompressionMethod(codeqlURL);
|
||||
// TODO: Re-enable streaming when we have a more reliable way to respect proxy settings.
|
||||
if ((await features.getValue(feature_flags_1.Feature.ZstdBundleStreamingExtraction)) &&
|
||||
compressionMethod === "zstd" &&
|
||||
process.platform === "linux") {
|
||||
logger.info(`Streaming the extraction of the CodeQL bundle.`);
|
||||
const toolsInstallStart = perf_hooks_1.performance.now();
|
||||
await downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger);
|
||||
const combinedDurationMs = Math.round(perf_hooks_1.performance.now() - toolsInstallStart);
|
||||
logger.info(`Finished downloading and extracting CodeQL bundle to ${dest} (${(0, logging_1.formatDuration)(combinedDurationMs)}).`);
|
||||
return {
|
||||
compressionMethod,
|
||||
toolsUrl: sanitizeUrlForStatusReport(codeqlURL),
|
||||
...makeStreamedToolsDownloadDurations(combinedDurationMs),
|
||||
};
|
||||
try {
|
||||
if (compressionMethod === "zstd" && process.platform === "linux") {
|
||||
logger.info(`Streaming the extraction of the CodeQL bundle.`);
|
||||
const toolsInstallStart = perf_hooks_1.performance.now();
|
||||
await downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger);
|
||||
const combinedDurationMs = Math.round(perf_hooks_1.performance.now() - toolsInstallStart);
|
||||
logger.info(`Finished downloading and extracting CodeQL bundle to ${dest} (${(0, logging_1.formatDuration)(combinedDurationMs)}).`);
|
||||
return {
|
||||
compressionMethod,
|
||||
toolsUrl: sanitizeUrlForStatusReport(codeqlURL),
|
||||
...makeStreamedToolsDownloadDurations(combinedDurationMs),
|
||||
};
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
core.warning(`Failed to download and extract CodeQL bundle using streaming with error: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
core.warning(`Falling back to downloading the bundle before extracting.`);
|
||||
// If we failed during processing, we want to clean up the destination directory
|
||||
// before we try again.
|
||||
await (0, util_1.cleanUpGlob)(dest, "CodeQL bundle", logger);
|
||||
}
|
||||
const toolsDownloadStart = perf_hooks_1.performance.now();
|
||||
const archivedBundlePath = await toolcache.downloadTool(codeqlURL, undefined, authorization, headers);
|
||||
@@ -114,12 +121,16 @@ async function downloadAndExtract(codeqlURL, dest, authorization, headers, tarVe
|
||||
async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger) {
|
||||
// Ensure destination exists
|
||||
fs.mkdirSync(dest, { recursive: true });
|
||||
// Get HTTP Agent to use (respects proxy settings).
|
||||
const agent = new http_client_1.HttpClient().getAgent(codeqlURL);
|
||||
// Add User-Agent header and Authorization header if provided.
|
||||
headers = Object.assign({ "User-Agent": "CodeQL Action" }, authorization ? { authorization } : {}, headers);
|
||||
const response = await new Promise((resolve) => follow_redirects_1.https.get(codeqlURL, {
|
||||
headers,
|
||||
// Increase the high water mark to improve performance.
|
||||
highWaterMark: exports.STREAMING_HIGH_WATERMARK_BYTES,
|
||||
// Use the agent to respect proxy settings.
|
||||
agent,
|
||||
}, (r) => resolve(r)));
|
||||
if (response.statusCode !== 200) {
|
||||
throw new Error(`Failed to download CodeQL bundle from ${codeqlURL}. HTTP status code: ${response.statusCode}.`);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"tools-download.js","sourceRoot":"","sources":["../src/tools-download.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiFA,gDA+FC;AAyCD,sDAOC;AAED,4DAOC;AAzOD,uCAAyB;AAEzB,uCAAyB;AACzB,2CAA6B;AAC7B,2CAAyC;AAEzC,+DAAiD;AACjD,uDAAyC;AACzC,+CAAiC;AAEjC,mDAA6D;AAC7D,uCAAmD;AACnD,2CAA6B;AAC7B,iCAA0D;AAE1D;;GAEG;AACU,QAAA,8BAA8B,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,QAAQ;AAEvE;;GAEG;AACH,MAAM,mBAAmB,GAAG,QAAQ,CAAC;AAarC,SAAS,uCAAuC,CAC9C,kBAA0B,EAC1B,oBAA4B;IAE5B,OAAO;QACL,kBAAkB,EAAE,kBAAkB,GAAG,oBAAoB;QAC7D,kBAAkB;QAClB,oBAAoB;QACpB,gBAAgB,EAAE,KAAK;KACxB,CAAC;AACJ,CAAC;AAaD,SAAS,kCAAkC,CACzC,kBAA0B;IAE1B,OAAO;QACL,kBAAkB;QAClB,kBAAkB,EAAE,SAAS;QAC7B,oBAAoB,EAAE,SAAS;QAC/B,gBAAgB,EAAE,IAAI;KACvB,CAAC;AACJ,CAAC;AAaM,KAAK,UAAU,kBAAkB,CACtC,SAAiB,EACjB,IAAY,EACZ,aAAiC,EACjC,OAA4B,EAC5B,UAAsC,EACtC,QAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,IAAI,CACT,iCAAiC,SAAS,2BAA2B,CACtE,CAAC;IAEF,MAAM,iBAAiB,GAAG,GAAG,CAAC,sBAAsB,CAAC,SAAS,CAAC,CAAC;IAEhE,wFAAwF;IAExF,IACE,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,6BAA6B,CAAC,CAAC;QAChE,iBAAiB,KAAK,MAAM;QAC5B,OAAO,CAAC,QAAQ,KAAK,OAAO,EAC5B,CAAC;QACD,MAAM,CAAC,IAAI,CAAC,gDAAgD,CAAC,CAAC;QAE9D,MAAM,iBAAiB,GAAG,wBAAW,CAAC,GAAG,EAAE,CAAC;QAC5C,MAAM,mCAAmC,CACvC,SAAS,EACT,IAAI,EACJ,aAAa,EACb,OAAO,EACP,UAAW,EACX,MAAM,CACP,CAAC;QAEF,MAAM,kBAAkB,GAAG,IAAI,CAAC,KAAK,CACnC,wBAAW,CAAC,GAAG,EAAE,GAAG,iBAAiB,CACtC,CAAC;QACF,MAAM,CAAC,IAAI,CACT,wDAAwD,IAAI,KAAK,IAAA,wBAAc,EAC7E,kBAAkB,CACnB,IAAI,CACN,CAAC;QAEF,OAAO;YACL,iBAAiB;YACjB,QAAQ,EAAE,0BAA0B,CAAC,SAAS,CAAC;YAC/C,GAAG,kCAAkC,CAAC,kBAAkB,CAAC;SAC1D,CAAC;IACJ,CAAC;IAED,MAAM,kBAAkB,GAAG,wBAAW,CAAC,GAAG,EAAE,CAAC;IAC7C,MAAM,kBAAkB,GAAG,MAAM,SAAS,CAAC,YAAY,CACrD,SAAS,EACT,SAAS,EACT,aAAa,EACb,OAAO,CACR,CAAC;IACF,MAAM,kBAAkB,GAAG,IAAI,CAAC,KAAK,CAAC,wBAAW,CAAC,GAAG,EAAE,GAAG,kBAAkB,CAAC,CAAC;IAE9E,MAAM,CAAC,IAAI,CACT,yCAAyC,kBAAkB,KAAK,IAAA,wBAAc,EAC5E,kBAAkB,CACnB,IAAI,CACN,CAAC;IAEF,IAAI,oBAA4B,CAAC;IAEjC,IAAI,CAAC;QACH,MAAM,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC;QACzC,MAAM,eAAe,GAAG,wBAAW,CAAC,GAAG,EAAE,CAAC;QAC1C,MAAM,GAAG,CAAC,OAAO,CACf,kBAAkB,EAClB,IAAI,EACJ,iBAAiB,EACjB,UAAU,EACV,MAAM,CACP,CAAC;QACF,oBAAoB,GAAG,IAAI,CAAC,KAAK,CAAC,wBAAW,CAAC,GAAG,EAAE,GAAG,eAAe,CAAC,CAAC;QACvE,MAAM,CAAC,IAAI,CACT,wCAAwC,IAAI,KAAK,IAAA,wBAAc,EAC7D,oBAAoB,CACrB,IAAI,CACN,CAAC;IACJ,CAAC;YAAS,CAAC;QACT,MAAM,IAAA,kBAAW,EAAC,kBAAkB,EAAE,uBAAuB,EAAE,MAAM,CAAC,CAAC;IACzE,CAAC;IAED,OAAO;QACL,iBAAiB;QACjB,QAAQ,EAAE,0BAA0B,CAAC,SAAS,CAAC;QAC/C,GAAG,uCAAuC,CACxC,kBAAkB,EAClB,oBAAoB,CACrB;KACF,CAAC;AACJ,CAAC;AAED,KAAK,UAAU,mCAAmC,CAChD,SAAiB,EACjB,IAAY,EACZ,aAAiC,EACjC,OAA4B,EAC5B,UAA0B,EAC1B,MAAc;IAEd,4BAA4B;IAC5B,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAExC,8DAA8D;IAC9D,OAAO,GAAG,MAAM,CAAC,MAAM,CACrB,EAAE,YAAY,EAAE,eAAe,EAAE,EACjC,aAAa,CAAC,CAAC,CAAC,EAAE,aAAa,EAAE,CAAC,CAAC,CAAC,EAAE,EACtC,OAAO,CACR,CAAC;IACF,MAAM,QAAQ,GAAG,MAAM,IAAI,OAAO,CAAkB,CAAC,OAAO,EAAE,EAAE,CAC9D,wBAAK,CAAC,GAAG,CACP,SAAS,EACT;QACE,OAAO;QACP,uDAAuD;QACvD,aAAa,EAAE,sCAA8B;KACjB,EAC9B,CAAC,CAAC,EAAE,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAClB,CACF,CAAC;IAEF,IAAI,QAAQ,CAAC,UAAU,KAAK,GAAG,EAAE,CAAC;QAChC,MAAM,IAAI,KAAK,CACb,yCAAyC,SAAS,uBAAuB,QAAQ,CAAC,UAAU,GAAG,CAChG,CAAC;IACJ,CAAC;IAED,MAAM,GAAG,CAAC,aAAa,CAAC,QAAQ,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;AAC9D,CAAC;AAED,8FAA8F;AAC9F,SAAgB,qBAAqB,CAAC,OAAe;IACnD,OAAO,IAAI,CAAC,IAAI,CACd,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,EACxC,mBAAmB,EACnB,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,OAAO,EAChC,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,CAChB,CAAC;AACJ,CAAC;AAED,SAAgB,wBAAwB,CACtC,aAAqB,EACrB,MAAc;IAEd,MAAM,cAAc,GAAG,GAAG,aAAa,WAAW,CAAC;IACnD,EAAE,CAAC,aAAa,CAAC,cAAc,EAAE,EAAE,CAAC,CAAC;IACrC,MAAM,CAAC,IAAI,CAAC,iCAAiC,cAAc,EAAE,CAAC,CAAC;AACjE,CAAC;AAED,SAAS,0BAA0B,CAAC,GAAW;IAC7C,OAAO,CAAC,sBAAsB,EAAE,kCAAkC,CAAC,CAAC,IAAI,CACtE,CAAC,IAAI,EAAE,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,sBAAsB,IAAI,qBAAqB,CAAC,CAC1E;QACC,CAAC,CAAC,GAAG;QACL,CAAC,CAAC,iBAAiB,CAAC;AACxB,CAAC"}
|
||||
{"version":3,"file":"tools-download.js","sourceRoot":"","sources":["../src/tools-download.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkFA,gDAmGC;AA8CD,sDAOC;AAED,4DAOC;AAnPD,uCAAyB;AAEzB,uCAAyB;AACzB,2CAA6B;AAC7B,2CAAyC;AAEzC,oDAAsC;AACtC,sDAAkD;AAClD,+DAAiD;AACjD,uDAAyC;AACzC,+CAAiC;AAEjC,uCAAmD;AACnD,2CAA6B;AAC7B,iCAA2E;AAE3E;;GAEG;AACU,QAAA,8BAA8B,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,QAAQ;AAEvE;;GAEG;AACH,MAAM,mBAAmB,GAAG,QAAQ,CAAC;AAarC,SAAS,uCAAuC,CAC9C,kBAA0B,EAC1B,oBAA4B;IAE5B,OAAO;QACL,kBAAkB,EAAE,kBAAkB,GAAG,oBAAoB;QAC7D,kBAAkB;QAClB,oBAAoB;QACpB,gBAAgB,EAAE,KAAK;KACxB,CAAC;AACJ,CAAC;AAaD,SAAS,kCAAkC,CACzC,kBAA0B;IAE1B,OAAO;QACL,kBAAkB;QAClB,kBAAkB,EAAE,SAAS;QAC7B,oBAAoB,EAAE,SAAS;QAC/B,gBAAgB,EAAE,IAAI;KACvB,CAAC;AACJ,CAAC;AAaM,KAAK,UAAU,kBAAkB,CACtC,SAAiB,EACjB,IAAY,EACZ,aAAiC,EACjC,OAA4B,EAC5B,UAAsC,EACtC,MAAc;IAEd,MAAM,CAAC,IAAI,CACT,iCAAiC,SAAS,2BAA2B,CACtE,CAAC;IAEF,MAAM,iBAAiB,GAAG,GAAG,CAAC,sBAAsB,CAAC,SAAS,CAAC,CAAC;IAEhE,IAAI,CAAC;QACH,IAAI,iBAAiB,KAAK,MAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE,CAAC;YACjE,MAAM,CAAC,IAAI,CAAC,gDAAgD,CAAC,CAAC;YAE9D,MAAM,iBAAiB,GAAG,wBAAW,CAAC,GAAG,EAAE,CAAC;YAC5C,MAAM,mCAAmC,CACvC,SAAS,EACT,IAAI,EACJ,aAAa,EACb,OAAO,EACP,UAAW,EACX,MAAM,CACP,CAAC;YAEF,MAAM,kBAAkB,GAAG,IAAI,CAAC,KAAK,CACnC,wBAAW,CAAC,GAAG,EAAE,GAAG,iBAAiB,CACtC,CAAC;YACF,MAAM,CAAC,IAAI,CACT,wDAAwD,IAAI,KAAK,IAAA,wBAAc,EAC7E,kBAAkB,CACnB,IAAI,CACN,CAAC;YAEF,OAAO;gBACL,iBAAiB;gBACjB,QAAQ,EAAE,0BAA0B,CAAC,SAAS,CAAC;gBAC/C,GAAG,kCAAkC,CAAC,kBAAkB,CAAC;aAC1D,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,IAAI,CAAC,OAAO,CACV,4EAA4E,IAAA,sBAAe,EAAC,CAAC,CAAC,EAAE,CACjG,CAAC;QACF,IAAI,CAAC,OAAO,CAAC,2DAA2D,CAAC,CAAC;QAE1E,gFAAgF;QAChF,uBAAuB;QACvB,MAAM,IAAA,kBAAW,EAAC,IAAI,EAAE,eAAe,EAAE,MAAM,CAAC,CAAC;IACnD,CAAC;IAED,MAAM,kBAAkB,GAAG,wBAAW,CAAC,GAAG,EAAE,CAAC;IAC7C,MAAM,kBAAkB,GAAG,MAAM,SAAS,CAAC,YAAY,CACrD,SAAS,EACT,SAAS,EACT,aAAa,EACb,OAAO,CACR,CAAC;IACF,MAAM,kBAAkB,GAAG,IAAI,CAAC,KAAK,CAAC,wBAAW,CAAC,GAAG,EAAE,GAAG,kBAAkB,CAAC,CAAC;IAE9E,MAAM,CAAC,IAAI,CACT,yCAAyC,kBAAkB,KAAK,IAAA,wBAAc,EAC5E,kBAAkB,CACnB,IAAI,CACN,CAAC;IAEF,IAAI,oBAA4B,CAAC;IAEjC,IAAI,CAAC;QACH,MAAM,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC;QACzC,MAAM,eAAe,GAAG,wBAAW,CAAC,GAAG,EAAE,CAAC;QAC1C,MAAM,GAAG,CAAC,OAAO,CACf,kBAAkB,EAClB,IAAI,EACJ,iBAAiB,EACjB,UAAU,EACV,MAAM,CACP,CAAC;QACF,oBAAoB,GAAG,IAAI,CAAC,KAAK,CAAC,wBAAW,CAAC,GAAG,EAAE,GAAG,eAAe,CAAC,CAAC;QACvE,MAAM,CAAC,IAAI,CACT,wCAAwC,IAAI,KAAK,IAAA,wBAAc,EAC7D,oBAAoB,CACrB,IAAI,CACN,CAAC;IACJ,CAAC;YAAS,CAAC;QACT,MAAM,IAAA,kBAAW,EAAC,kBAAkB,EAAE,uBAAuB,EAAE,MAAM,CAAC,CAAC;IACzE,CAAC;IAED,OAAO;QACL,iBAAiB;QACjB,QAAQ,EAAE,0BAA0B,CAAC,SAAS,CAAC;QAC/C,GAAG,uCAAuC,CACxC,kBAAkB,EAClB,oBAAoB,CACrB;KACF,CAAC;AACJ,CAAC;AAED,KAAK,UAAU,mCAAmC,CAChD,SAAiB,EACjB,IAAY,EACZ,aAAiC,EACjC,OAA4B,EAC5B,UAA0B,EAC1B,MAAc;IAEd,4BAA4B;IAC5B,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAExC,mDAAmD;IACnD,MAAM,KAAK,GAAG,IAAI,wBAAU,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;IAEnD,8DAA8D;IAC9D,OAAO,GAAG,MAAM,CAAC,MAAM,CACrB,EAAE,YAAY,EAAE,eAAe,EAAE,EACjC,aAAa,CAAC,CAAC,CAAC,EAAE,aAAa,EAAE,CAAC,CAAC,CAAC,EAAE,EACtC,OAAO,CACR,CAAC;IACF,MAAM,QAAQ,GAAG,MAAM,IAAI,OAAO,CAAkB,CAAC,OAAO,EAAE,EAAE,CAC9D,wBAAK,CAAC,GAAG,CACP,SAAS,EACT;QACE,OAAO;QACP,uDAAuD;QACvD,aAAa,EAAE,sCAA8B;QAC7C,2CAA2C;QAC3C,KAAK;KACuB,EAC9B,CAAC,CAAC,EAAE,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAClB,CACF,CAAC;IAEF,IAAI,QAAQ,CAAC,UAAU,KAAK,GAAG,EAAE,CAAC;QAChC,MAAM,IAAI,KAAK,CACb,yCAAyC,SAAS,uBAAuB,QAAQ,CAAC,UAAU,GAAG,CAChG,CAAC;IACJ,CAAC;IAED,MAAM,GAAG,CAAC,aAAa,CAAC,QAAQ,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;AAC9D,CAAC;AAED,8FAA8F;AAC9F,SAAgB,qBAAqB,CAAC,OAAe;IACnD,OAAO,IAAI,CAAC,IAAI,CACd,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,EACxC,mBAAmB,EACnB,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,OAAO,EAChC,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,CAChB,CAAC;AACJ,CAAC;AAED,SAAgB,wBAAwB,CACtC,aAAqB,EACrB,MAAc;IAEd,MAAM,cAAc,GAAG,GAAG,aAAa,WAAW,CAAC;IACnD,EAAE,CAAC,aAAa,CAAC,cAAc,EAAE,EAAE,CAAC,CAAC;IACrC,MAAM,CAAC,IAAI,CAAC,iCAAiC,cAAc,EAAE,CAAC,CAAC;AACjE,CAAC;AAED,SAAS,0BAA0B,CAAC,GAAW;IAC7C,OAAO,CAAC,sBAAsB,EAAE,kCAAkC,CAAC,CAAC,IAAI,CACtE,CAAC,IAAI,EAAE,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,sBAAsB,IAAI,qBAAqB,CAAC,CAC1E;QACC,CAAC,CAAC,GAAG;QACL,CAAC,CAAC,iBAAiB,CAAC;AACxB,CAAC"}
|
||||
9
lib/trap-caching.js
generated
9
lib/trap-caching.js
generated
@@ -44,6 +44,7 @@ const actionsUtil = __importStar(require("./actions-util"));
|
||||
const apiClient = __importStar(require("./api-client"));
|
||||
const doc_url_1 = require("./doc-url");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const gitUtils = __importStar(require("./git-utils"));
|
||||
const util_1 = require("./util");
|
||||
// This constant should be bumped if we make a breaking change
|
||||
// to how the CodeQL Action stores or retrieves the TRAP cache,
|
||||
@@ -80,7 +81,7 @@ async function downloadTrapCaches(codeql, languages, logger) {
|
||||
fs.mkdirSync(cacheDir, { recursive: true });
|
||||
result[language] = cacheDir;
|
||||
}
|
||||
if (await actionsUtil.isAnalyzingDefaultBranch()) {
|
||||
if (await gitUtils.isAnalyzingDefaultBranch()) {
|
||||
logger.info("Analyzing default branch. Skipping downloading of TRAP caches.");
|
||||
return result;
|
||||
}
|
||||
@@ -122,7 +123,7 @@ async function downloadTrapCaches(codeql, languages, logger) {
|
||||
* @returns Whether the TRAP caches were uploaded.
|
||||
*/
|
||||
async function uploadTrapCaches(codeql, config, logger) {
|
||||
if (!(await actionsUtil.isAnalyzingDefaultBranch()))
|
||||
if (!(await gitUtils.isAnalyzingDefaultBranch()))
|
||||
return false; // Only upload caches from the default branch
|
||||
for (const language of config.languages) {
|
||||
const cacheDir = config.trapCaches[language];
|
||||
@@ -151,14 +152,14 @@ async function cleanupTrapCaches(config, features, logger) {
|
||||
trap_cache_cleanup_skipped_because: "feature disabled",
|
||||
};
|
||||
}
|
||||
if (!(await actionsUtil.isAnalyzingDefaultBranch())) {
|
||||
if (!(await gitUtils.isAnalyzingDefaultBranch())) {
|
||||
return {
|
||||
trap_cache_cleanup_skipped_because: "not analyzing default branch",
|
||||
};
|
||||
}
|
||||
try {
|
||||
let totalBytesCleanedUp = 0;
|
||||
const allCaches = await apiClient.listActionsCaches(CODEQL_TRAP_CACHE_PREFIX, await actionsUtil.getRef());
|
||||
const allCaches = await apiClient.listActionsCaches(CODEQL_TRAP_CACHE_PREFIX, await gitUtils.getRef());
|
||||
for (const language of config.languages) {
|
||||
if (config.trapCaches[language]) {
|
||||
const cachesToRemove = await getTrapCachesForLanguage(allCaches, language, logger);
|
||||
|
||||
File diff suppressed because one or more lines are too long
13
lib/trap-caching.test.js
generated
13
lib/trap-caching.test.js
generated
@@ -45,6 +45,7 @@ const actionsUtil = __importStar(require("./actions-util"));
|
||||
const apiClient = __importStar(require("./api-client"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const gitUtils = __importStar(require("./git-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
@@ -111,7 +112,7 @@ function getTestConfigWithTempDir(tempDir) {
|
||||
(0, ava_1.default)("check flags for JS, analyzing default branch", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
const config = getTestConfigWithTempDir(tmpDir);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const result = await (0, codeql_1.getTrapCachingExtractorConfigArgsForLang)(config, languages_1.Language.javascript);
|
||||
t.deepEqual(result, [
|
||||
`-O=javascript.trap.cache.dir=${path.resolve(tmpDir, "jsCache")}`,
|
||||
@@ -123,7 +124,7 @@ function getTestConfigWithTempDir(tempDir) {
|
||||
(0, ava_1.default)("check flags for all, not analyzing default branch", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
const config = getTestConfigWithTempDir(tmpDir);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false);
|
||||
const result = await (0, codeql_1.getTrapCachingExtractorConfigArgs)(config);
|
||||
t.deepEqual(result, [
|
||||
`-O=javascript.trap.cache.dir=${path.resolve(tmpDir, "jsCache")}`,
|
||||
@@ -144,7 +145,7 @@ function getTestConfigWithTempDir(tempDir) {
|
||||
(0, ava_1.default)("upload cache key contains right fields", async (t) => {
|
||||
const loggedMessages = [];
|
||||
const logger = (0, testing_utils_1.getRecordingLogger)(loggedMessages);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(util, "tryGetFolderBytes").resolves(999_999_999);
|
||||
const stubSave = sinon.stub(cache, "saveCache");
|
||||
process.env.GITHUB_SHA = "somesha";
|
||||
@@ -159,7 +160,7 @@ function getTestConfigWithTempDir(tempDir) {
|
||||
const loggedMessages = [];
|
||||
const logger = (0, testing_utils_1.getRecordingLogger)(loggedMessages);
|
||||
sinon.stub(actionsUtil, "getTemporaryDirectory").returns(tmpDir);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false);
|
||||
const stubRestore = sinon.stub(cache, "restoreCache").resolves("found");
|
||||
const eventFile = path.resolve(tmpDir, "event.json");
|
||||
process.env.GITHUB_EVENT_NAME = "pull_request";
|
||||
@@ -185,8 +186,8 @@ function getTestConfigWithTempDir(tempDir) {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
// This config specifies that we are analyzing JavaScript and Ruby, but not Swift.
|
||||
const config = getTestConfigWithTempDir(tmpDir);
|
||||
sinon.stub(actionsUtil, "getRef").resolves("refs/heads/main");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "getRef").resolves("refs/heads/main");
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const listStub = sinon.stub(apiClient, "listActionsCaches").resolves([
|
||||
// Should be kept, since it's not relevant to CodeQL. In reality, the API shouldn't return
|
||||
// this in the first place, but this is a defensive check.
|
||||
|
||||
File diff suppressed because one or more lines are too long
3
lib/upload-lib.js
generated
3
lib/upload-lib.js
generated
@@ -60,6 +60,7 @@ const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const environment_1 = require("./environment");
|
||||
const fingerprints = __importStar(require("./fingerprints"));
|
||||
const gitUtils = __importStar(require("./git-utils"));
|
||||
const init_1 = require("./init");
|
||||
const repository_1 = require("./repository");
|
||||
const tools_features_1 = require("./tools-features");
|
||||
@@ -423,7 +424,7 @@ async function uploadFiles(sarifPath, checkoutPath, category, features, logger)
|
||||
logger.debug(`Compressing serialized SARIF`);
|
||||
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = (0, file_url_1.default)(checkoutPath);
|
||||
const payload = buildPayload(await actionsUtil.getCommitOid(checkoutPath), await actionsUtil.getRef(), analysisKey, util.getRequiredEnvParam("GITHUB_WORKFLOW"), zippedSarif, actionsUtil.getWorkflowRunID(), actionsUtil.getWorkflowRunAttempt(), checkoutURI, environment, toolNames, await actionsUtil.determineBaseBranchHeadCommitOid());
|
||||
const payload = buildPayload(await gitUtils.getCommitOid(checkoutPath), await gitUtils.getRef(), analysisKey, util.getRequiredEnvParam("GITHUB_WORKFLOW"), zippedSarif, actionsUtil.getWorkflowRunID(), actionsUtil.getWorkflowRunAttempt(), checkoutURI, environment, toolNames, await gitUtils.determineBaseBranchHeadCommitOid());
|
||||
// Log some useful debug info about the info
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
|
||||
File diff suppressed because one or more lines are too long
7
lib/upload-sarif-action-post.js
generated
7
lib/upload-sarif-action-post.js
generated
@@ -40,13 +40,10 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
*/
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api_client_1 = require("./api-client");
|
||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||
const environment_1 = require("./environment");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const util_1 = require("./util");
|
||||
async function runWrapper() {
|
||||
try {
|
||||
@@ -55,8 +52,6 @@ async function runWrapper() {
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||
// Upload SARIF artifacts if we determine that this is a third-party analysis run.
|
||||
// For first-party runs, this artifact will be uploaded in the `analyze-post` step.
|
||||
if (process.env[environment_1.EnvVar.INIT_ACTION_HAS_RUN] !== "true") {
|
||||
@@ -64,7 +59,7 @@ async function runWrapper() {
|
||||
core.warning(`Did not upload debug artifacts because cannot determine the GitHub variant running.`);
|
||||
return;
|
||||
}
|
||||
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger, gitHubVersion.type, features));
|
||||
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger, gitHubVersion.type));
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"upload-sarif-action-post.js","sourceRoot":"","sources":["../src/upload-sarif-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAAuD;AACvD,6CAAgD;AAChD,kEAAoD;AACpD,+CAAuC;AACvC,mDAA2C;AAC3C,uCAAwD;AACxD,6CAAkD;AAClD,iCAIgB;AAEhB,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,6CAA6C;QAC7C,WAAW,CAAC,aAAa,EAAE,CAAC;QAC5B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QACjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QAEF,kFAAkF;QAClF,mFAAmF;QACnF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,IAAI,aAAa,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBACrC,IAAI,CAAC,OAAO,CACV,qFAAqF,CACtF,CAAC;gBACF,OAAO;YACT,CAAC;YACD,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CACzC,MAAM,EACN,aAAa,CAAC,IAAI,EAClB,QAAQ,CACT,CACF,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,yCAAyC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAClE,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"upload-sarif-action-post.js","sourceRoot":"","sources":["../src/upload-sarif-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,kEAAoD;AACpD,+CAAuC;AACvC,uCAAwD;AACxD,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,6CAA6C;QAC7C,WAAW,CAAC,aAAa,EAAE,CAAC;QAC5B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,kFAAkF;QAClF,mFAAmF;QACnF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,IAAI,aAAa,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBACrC,IAAI,CAAC,OAAO,CACV,qFAAqF,CACtF,CAAC;gBACF,OAAO;YACT,CAAC;YACD,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CAAC,MAAM,EAAE,aAAa,CAAC,IAAI,CAAC,CACxE,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,yCAAyC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAClE,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
4
lib/util.js
generated
4
lib/util.js
generated
@@ -86,7 +86,7 @@ const path = __importStar(require("path"));
|
||||
const util_1 = require("util");
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec/lib/exec"));
|
||||
const safe_which_1 = require("@chrisgavin/safe-which");
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const check_disk_space_1 = __importDefault(require("check-disk-space"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const get_folder_size_1 = __importDefault(require("get-folder-size"));
|
||||
@@ -940,7 +940,7 @@ async function cleanUpGlob(glob, name, logger) {
|
||||
}
|
||||
async function isBinaryAccessible(binary, logger) {
|
||||
try {
|
||||
await (0, safe_which_1.safeWhich)(binary);
|
||||
await io.which(binary, true);
|
||||
logger.debug(`Found ${binary}.`);
|
||||
return true;
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
598
node_modules/.package-lock.json
generated
vendored
598
node_modules/.package-lock.json
generated
vendored
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "3.27.7",
|
||||
"version": "3.28.1",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
@@ -87,9 +87,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/cache": {
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.3.0.tgz",
|
||||
"integrity": "sha512-+eCsMTIZUEm+QA9GqjollOhCdvRrZ1JV8d9Rp34zVNizBkYITO8dhKczP5Xps1dFzc5n59p7vYVtZrGt18bb5Q==",
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-4.0.0.tgz",
|
||||
"integrity": "sha512-WIuxjnZ44lNYtIS4fqSaYvF00hORdy3cSin+jx8xNgBVGWnNIAiCBHjlwusVQlcgExoQC9pHXGrDsZyZr7rCDQ==",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/exec": "^1.0.1",
|
||||
@@ -99,7 +99,9 @@
|
||||
"@azure/abort-controller": "^1.1.0",
|
||||
"@azure/ms-rest-js": "^2.6.0",
|
||||
"@azure/storage-blob": "^12.13.0",
|
||||
"semver": "^6.3.1"
|
||||
"@protobuf-ts/plugin": "^2.9.4",
|
||||
"semver": "^6.3.1",
|
||||
"twirp-ts": "^2.5.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/cache/node_modules/@actions/glob": {
|
||||
@@ -154,10 +156,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/http-client": {
|
||||
"version": "2.1.1",
|
||||
"license": "MIT",
|
||||
"version": "2.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz",
|
||||
"integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==",
|
||||
"dependencies": {
|
||||
"tunnel": "^0.0.6"
|
||||
"tunnel": "^0.0.6",
|
||||
"undici": "^5.25.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/io": {
|
||||
@@ -551,10 +555,6 @@
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@chrisgavin/safe-which": {
|
||||
"version": "1.0.2",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@eslint-community/eslint-utils": {
|
||||
"version": "4.4.0",
|
||||
"dev": true,
|
||||
@@ -651,15 +651,23 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/js": {
|
||||
"version": "9.16.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.16.0.tgz",
|
||||
"integrity": "sha512-tw2HxzQkrbeuvyj1tG2Yqq+0H9wGoI2IMk4EOsQeX+vmd75FtJAzf+gTA69WF+baUKRYQ3x2kbLE08js5OsTVg==",
|
||||
"version": "9.17.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.17.0.tgz",
|
||||
"integrity": "sha512-Sxc4hqcs1kTu0iID3kcZDW3JHq2a77HO9P8CP6YEA/FpH3Ll8UXE2r/86Rz9YJLKme39S9vU5OWNjC6Xl0Cr3w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/busboy": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
|
||||
"integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
},
|
||||
"node_modules/@github/browserslist-config": {
|
||||
"version": "1.0.0",
|
||||
"dev": true,
|
||||
@@ -1295,17 +1303,17 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||
"version": "8.17.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.17.0.tgz",
|
||||
"integrity": "sha512-HU1KAdW3Tt8zQkdvNoIijfWDMvdSweFYm4hWh+KwhPstv+sCmWb89hCIP8msFm9N1R/ooh9honpSuvqKWlYy3w==",
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.18.2.tgz",
|
||||
"integrity": "sha512-adig4SzPLjeQ0Tm+jvsozSGiCliI2ajeURDGHjZ2llnA+A67HihCQ+a3amtPhUakd1GlwHxSRvzOZktbEvhPPg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/regexpp": "^4.10.0",
|
||||
"@typescript-eslint/scope-manager": "8.17.0",
|
||||
"@typescript-eslint/type-utils": "8.17.0",
|
||||
"@typescript-eslint/utils": "8.17.0",
|
||||
"@typescript-eslint/visitor-keys": "8.17.0",
|
||||
"@typescript-eslint/scope-manager": "8.18.2",
|
||||
"@typescript-eslint/type-utils": "8.18.2",
|
||||
"@typescript-eslint/utils": "8.18.2",
|
||||
"@typescript-eslint/visitor-keys": "8.18.2",
|
||||
"graphemer": "^1.4.0",
|
||||
"ignore": "^5.3.1",
|
||||
"natural-compare": "^1.4.0",
|
||||
@@ -1320,25 +1328,161 @@
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0",
|
||||
"eslint": "^8.57.0 || ^9.0.0"
|
||||
"eslint": "^8.57.0 || ^9.0.0",
|
||||
"typescript": ">=4.8.4 <5.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.18.2.tgz",
|
||||
"integrity": "sha512-YJFSfbd0CJjy14r/EvWapYgV4R5CHzptssoag2M7y3Ra7XNta6GPAJPPP5KGB9j14viYXyrzRO5GkX7CRfo8/g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.18.2",
|
||||
"@typescript-eslint/visitor-keys": "8.18.2"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"typescript": {
|
||||
"optional": true
|
||||
}
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": {
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.18.2.tgz",
|
||||
"integrity": "sha512-Z/zblEPp8cIvmEn6+tPDIHUbRu/0z5lqZ+NvolL5SvXWT5rQy7+Nch83M0++XzO0XrWRFWECgOAyE8bsJTl1GQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.18.2.tgz",
|
||||
"integrity": "sha512-WXAVt595HjpmlfH4crSdM/1bcsqh+1weFRWIa9XMTx/XHZ9TCKMcr725tLYqWOgzKdeDrqVHxFotrvWcEsk2Tg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.18.2",
|
||||
"@typescript-eslint/visitor-keys": "8.18.2",
|
||||
"debug": "^4.3.4",
|
||||
"fast-glob": "^3.3.2",
|
||||
"is-glob": "^4.0.3",
|
||||
"minimatch": "^9.0.4",
|
||||
"semver": "^7.6.0",
|
||||
"ts-api-utils": "^1.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": ">=4.8.4 <5.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/utils": {
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.18.2.tgz",
|
||||
"integrity": "sha512-Cr4A0H7DtVIPkauj4sTSXVl+VBWewE9/o40KcF3TV9aqDEOWoXF3/+oRXNby3DYzZeCATvbdksYsGZzplwnK/Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.4.0",
|
||||
"@typescript-eslint/scope-manager": "8.18.2",
|
||||
"@typescript-eslint/types": "8.18.2",
|
||||
"@typescript-eslint/typescript-estree": "8.18.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": "^8.57.0 || ^9.0.0",
|
||||
"typescript": ">=4.8.4 <5.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.18.2.tgz",
|
||||
"integrity": "sha512-zORcwn4C3trOWiCqFQP1x6G3xTRyZ1LYydnj51cRnJ6hxBlr/cKPckk+PKPUw/fXmvfKTcw7bwY3w9izgx5jZw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.18.2",
|
||||
"eslint-visitor-keys": "^4.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/brace-expansion": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/eslint-visitor-keys": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
|
||||
"integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/minimatch": {
|
||||
"version": "9.0.5",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
|
||||
"integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser": {
|
||||
"version": "8.17.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.17.0.tgz",
|
||||
"integrity": "sha512-Drp39TXuUlD49F7ilHHCG7TTg8IkA+hxCuULdmzWYICxGXvDXmDmWEjJYZQYgf6l/TFfYNE167m7isnc3xlIEg==",
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.18.2.tgz",
|
||||
"integrity": "sha512-y7tcq4StgxQD4mDr9+Jb26dZ+HTZ/SkfqpXSiqeUXZHxOUyjWDKsmwKhJ0/tApR08DgOhrFAoAhyB80/p3ViuA==",
|
||||
"dev": true,
|
||||
"license": "BSD-2-Clause",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/scope-manager": "8.17.0",
|
||||
"@typescript-eslint/types": "8.17.0",
|
||||
"@typescript-eslint/typescript-estree": "8.17.0",
|
||||
"@typescript-eslint/visitor-keys": "8.17.0",
|
||||
"@typescript-eslint/scope-manager": "8.18.2",
|
||||
"@typescript-eslint/types": "8.18.2",
|
||||
"@typescript-eslint/typescript-estree": "8.18.2",
|
||||
"@typescript-eslint/visitor-keys": "8.18.2",
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"engines": {
|
||||
@@ -1349,12 +1493,124 @@
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": "^8.57.0 || ^9.0.0"
|
||||
"eslint": "^8.57.0 || ^9.0.0",
|
||||
"typescript": ">=4.8.4 <5.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.18.2.tgz",
|
||||
"integrity": "sha512-YJFSfbd0CJjy14r/EvWapYgV4R5CHzptssoag2M7y3Ra7XNta6GPAJPPP5KGB9j14viYXyrzRO5GkX7CRfo8/g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.18.2",
|
||||
"@typescript-eslint/visitor-keys": "8.18.2"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"typescript": {
|
||||
"optional": true
|
||||
}
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": {
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.18.2.tgz",
|
||||
"integrity": "sha512-Z/zblEPp8cIvmEn6+tPDIHUbRu/0z5lqZ+NvolL5SvXWT5rQy7+Nch83M0++XzO0XrWRFWECgOAyE8bsJTl1GQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.18.2.tgz",
|
||||
"integrity": "sha512-WXAVt595HjpmlfH4crSdM/1bcsqh+1weFRWIa9XMTx/XHZ9TCKMcr725tLYqWOgzKdeDrqVHxFotrvWcEsk2Tg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.18.2",
|
||||
"@typescript-eslint/visitor-keys": "8.18.2",
|
||||
"debug": "^4.3.4",
|
||||
"fast-glob": "^3.3.2",
|
||||
"is-glob": "^4.0.3",
|
||||
"minimatch": "^9.0.4",
|
||||
"semver": "^7.6.0",
|
||||
"ts-api-utils": "^1.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": ">=4.8.4 <5.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.18.2.tgz",
|
||||
"integrity": "sha512-zORcwn4C3trOWiCqFQP1x6G3xTRyZ1LYydnj51cRnJ6hxBlr/cKPckk+PKPUw/fXmvfKTcw7bwY3w9izgx5jZw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.18.2",
|
||||
"eslint-visitor-keys": "^4.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/brace-expansion": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/eslint-visitor-keys": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
|
||||
"integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/minimatch": {
|
||||
"version": "9.0.5",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
|
||||
"integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/scope-manager": {
|
||||
@@ -1376,14 +1632,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils": {
|
||||
"version": "8.17.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.17.0.tgz",
|
||||
"integrity": "sha512-q38llWJYPd63rRnJ6wY/ZQqIzPrBCkPdpIsaCfkR3Q4t3p6sb422zougfad4TFW9+ElIFLVDzWGiGAfbb/v2qw==",
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.18.2.tgz",
|
||||
"integrity": "sha512-AB/Wr1Lz31bzHfGm/jgbFR0VB0SML/hd2P1yxzKDM48YmP7vbyJNHRExUE/wZsQj2wUCvbWH8poNHFuxLqCTnA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/typescript-estree": "8.17.0",
|
||||
"@typescript-eslint/utils": "8.17.0",
|
||||
"@typescript-eslint/typescript-estree": "8.18.2",
|
||||
"@typescript-eslint/utils": "8.18.2",
|
||||
"debug": "^4.3.4",
|
||||
"ts-api-utils": "^1.3.0"
|
||||
},
|
||||
@@ -1395,12 +1651,148 @@
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": "^8.57.0 || ^9.0.0"
|
||||
"eslint": "^8.57.0 || ^9.0.0",
|
||||
"typescript": ">=4.8.4 <5.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.18.2.tgz",
|
||||
"integrity": "sha512-YJFSfbd0CJjy14r/EvWapYgV4R5CHzptssoag2M7y3Ra7XNta6GPAJPPP5KGB9j14viYXyrzRO5GkX7CRfo8/g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.18.2",
|
||||
"@typescript-eslint/visitor-keys": "8.18.2"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"typescript": {
|
||||
"optional": true
|
||||
}
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": {
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.18.2.tgz",
|
||||
"integrity": "sha512-Z/zblEPp8cIvmEn6+tPDIHUbRu/0z5lqZ+NvolL5SvXWT5rQy7+Nch83M0++XzO0XrWRFWECgOAyE8bsJTl1GQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.18.2.tgz",
|
||||
"integrity": "sha512-WXAVt595HjpmlfH4crSdM/1bcsqh+1weFRWIa9XMTx/XHZ9TCKMcr725tLYqWOgzKdeDrqVHxFotrvWcEsk2Tg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.18.2",
|
||||
"@typescript-eslint/visitor-keys": "8.18.2",
|
||||
"debug": "^4.3.4",
|
||||
"fast-glob": "^3.3.2",
|
||||
"is-glob": "^4.0.3",
|
||||
"minimatch": "^9.0.4",
|
||||
"semver": "^7.6.0",
|
||||
"ts-api-utils": "^1.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": ">=4.8.4 <5.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/utils": {
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.18.2.tgz",
|
||||
"integrity": "sha512-Cr4A0H7DtVIPkauj4sTSXVl+VBWewE9/o40KcF3TV9aqDEOWoXF3/+oRXNby3DYzZeCATvbdksYsGZzplwnK/Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.4.0",
|
||||
"@typescript-eslint/scope-manager": "8.18.2",
|
||||
"@typescript-eslint/types": "8.18.2",
|
||||
"@typescript-eslint/typescript-estree": "8.18.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": "^8.57.0 || ^9.0.0",
|
||||
"typescript": ">=4.8.4 <5.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "8.18.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.18.2.tgz",
|
||||
"integrity": "sha512-zORcwn4C3trOWiCqFQP1x6G3xTRyZ1LYydnj51cRnJ6hxBlr/cKPckk+PKPUw/fXmvfKTcw7bwY3w9izgx5jZw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.18.2",
|
||||
"eslint-visitor-keys": "^4.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/brace-expansion": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/eslint-visitor-keys": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
|
||||
"integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/minimatch": {
|
||||
"version": "9.0.5",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
|
||||
"integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/types": {
|
||||
@@ -3322,11 +3714,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-plugin-github": {
|
||||
"version": "5.1.3",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-github/-/eslint-plugin-github-5.1.3.tgz",
|
||||
"integrity": "sha512-/0lyEqLLodXW3p+D9eYtmEp6e9DcJmV5FhnE9wNWV1bcqyShuZFXn5kOeJIvxSbFbdbrKiNO8zFiV/VXeSpRSw==",
|
||||
"version": "5.1.4",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-github/-/eslint-plugin-github-5.1.4.tgz",
|
||||
"integrity": "sha512-j5IgIxsDoch06zJzeqPvenfzRXDKI9Z8YwfUg1pm2ay1q44tMSFwvEu6l0uEIrTpA3v8QdPyLr98LqDl1TIhSA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint/compat": "^1.2.3",
|
||||
"@eslint/eslintrc": "^3.1.0",
|
||||
@@ -7043,6 +7434,94 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/typescript-eslint/node_modules/@typescript-eslint/eslint-plugin": {
|
||||
"version": "8.17.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.17.0.tgz",
|
||||
"integrity": "sha512-HU1KAdW3Tt8zQkdvNoIijfWDMvdSweFYm4hWh+KwhPstv+sCmWb89hCIP8msFm9N1R/ooh9honpSuvqKWlYy3w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@eslint-community/regexpp": "^4.10.0",
|
||||
"@typescript-eslint/scope-manager": "8.17.0",
|
||||
"@typescript-eslint/type-utils": "8.17.0",
|
||||
"@typescript-eslint/utils": "8.17.0",
|
||||
"@typescript-eslint/visitor-keys": "8.17.0",
|
||||
"graphemer": "^1.4.0",
|
||||
"ignore": "^5.3.1",
|
||||
"natural-compare": "^1.4.0",
|
||||
"ts-api-utils": "^1.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0",
|
||||
"eslint": "^8.57.0 || ^9.0.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"typescript": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/typescript-eslint/node_modules/@typescript-eslint/parser": {
|
||||
"version": "8.17.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.17.0.tgz",
|
||||
"integrity": "sha512-Drp39TXuUlD49F7ilHHCG7TTg8IkA+hxCuULdmzWYICxGXvDXmDmWEjJYZQYgf6l/TFfYNE167m7isnc3xlIEg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/scope-manager": "8.17.0",
|
||||
"@typescript-eslint/types": "8.17.0",
|
||||
"@typescript-eslint/typescript-estree": "8.17.0",
|
||||
"@typescript-eslint/visitor-keys": "8.17.0",
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": "^8.57.0 || ^9.0.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"typescript": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/typescript-eslint/node_modules/@typescript-eslint/type-utils": {
|
||||
"version": "8.17.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.17.0.tgz",
|
||||
"integrity": "sha512-q38llWJYPd63rRnJ6wY/ZQqIzPrBCkPdpIsaCfkR3Q4t3p6sb422zougfad4TFW9+ElIFLVDzWGiGAfbb/v2qw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/typescript-estree": "8.17.0",
|
||||
"@typescript-eslint/utils": "8.17.0",
|
||||
"debug": "^4.3.4",
|
||||
"ts-api-utils": "^1.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": "^8.57.0 || ^9.0.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"typescript": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/unbox-primitive": {
|
||||
"version": "1.0.2",
|
||||
"dev": true,
|
||||
@@ -7057,6 +7536,17 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/undici": {
|
||||
"version": "5.28.4",
|
||||
"resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz",
|
||||
"integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==",
|
||||
"dependencies": {
|
||||
"@fastify/busboy": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.0"
|
||||
}
|
||||
},
|
||||
"node_modules/undici-types": {
|
||||
"version": "5.26.5",
|
||||
"license": "MIT"
|
||||
|
||||
16
node_modules/@actions/cache/README.md
generated
vendored
16
node_modules/@actions/cache/README.md
generated
vendored
@@ -6,6 +6,20 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac
|
||||
|
||||
Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 10 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 10 GB.
|
||||
|
||||
## ⚠️ Important changes
|
||||
|
||||
The cache backend service has been rewritten from the ground up for improved performance and reliability. The [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) package now integrates with the new cache service (v2) APIs.
|
||||
|
||||
The new service will gradually roll out as of **February 1st, 2025**. The legacy service will also be sunset on the same date. Changes in this release are **fully backward compatible**.
|
||||
|
||||
**All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.**
|
||||
|
||||
If you do not upgrade, all workflow runs using any of the deprecated [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) packages will fail.
|
||||
|
||||
Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`.
|
||||
|
||||
Read more about the change & access the migration guide: [reference to the announcement](https://github.com/actions/toolkit/discussions/1890).
|
||||
|
||||
## Usage
|
||||
|
||||
This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache).
|
||||
@@ -47,5 +61,3 @@ const cacheKey = await cache.restoreCache(paths, key, restoreKeys)
|
||||
A cache gets downloaded in multiple segments of fixed sizes (now `128MB` to fail-fast, previously `1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner were used). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.4` of cache package introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss.
|
||||
|
||||
Default value of this timeout is 10 minutes (starting `v3.2.1` and higher, previously 60 minutes in versions between `v.3.0.4` and `v3.2.0`, both included) and can be customized by specifying an [environment variable](https://docs.github.com/en/actions/learn-github-actions/environment-variables) named `SEGMENT_DOWNLOAD_TIMEOUT_MINS` with timeout value in minutes.
|
||||
|
||||
|
||||
|
||||
4
node_modules/@actions/cache/lib/cache.d.ts
generated
vendored
4
node_modules/@actions/cache/lib/cache.d.ts
generated
vendored
@@ -15,8 +15,8 @@ export declare function isFeatureAvailable(): boolean;
|
||||
* Restores cache from keys
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param primaryKey an explicit key for restoring the cache
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
|
||||
* @param downloadOptions cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
|
||||
229
node_modules/@actions/cache/lib/cache.js
generated
vendored
229
node_modules/@actions/cache/lib/cache.js
generated
vendored
@@ -37,7 +37,10 @@ const core = __importStar(require("@actions/core"));
|
||||
const path = __importStar(require("path"));
|
||||
const utils = __importStar(require("./internal/cacheUtils"));
|
||||
const cacheHttpClient = __importStar(require("./internal/cacheHttpClient"));
|
||||
const cacheTwirpClient = __importStar(require("./internal/shared/cacheTwirpClient"));
|
||||
const config_1 = require("./internal/config");
|
||||
const tar_1 = require("./internal/tar");
|
||||
const constants_1 = require("./internal/constants");
|
||||
class ValidationError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
@@ -81,15 +84,39 @@ exports.isFeatureAvailable = isFeatureAvailable;
|
||||
* Restores cache from keys
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param primaryKey an explicit key for restoring the cache
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
|
||||
* @param downloadOptions cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
*/
|
||||
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
|
||||
core.debug(`Cache service version: ${cacheServiceVersion}`);
|
||||
checkPaths(paths);
|
||||
switch (cacheServiceVersion) {
|
||||
case 'v2':
|
||||
return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
|
||||
case 'v1':
|
||||
default:
|
||||
return yield restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.restoreCache = restoreCache;
|
||||
/**
|
||||
* Restores cache using the legacy Cache Service
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
|
||||
* @param options cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
*/
|
||||
function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
restoreKeys = restoreKeys || [];
|
||||
const keys = [primaryKey, ...restoreKeys];
|
||||
core.debug('Resolved Keys:');
|
||||
@@ -151,7 +178,85 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
|
||||
return undefined;
|
||||
});
|
||||
}
|
||||
exports.restoreCache = restoreCache;
|
||||
/**
|
||||
* Restores cache using Cache Service v2
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
|
||||
* @param downloadOptions cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
*/
|
||||
function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Override UploadOptions to force the use of Azure
|
||||
options = Object.assign(Object.assign({}, options), { useAzureSdk: true });
|
||||
restoreKeys = restoreKeys || [];
|
||||
const keys = [primaryKey, ...restoreKeys];
|
||||
core.debug('Resolved Keys:');
|
||||
core.debug(JSON.stringify(keys));
|
||||
if (keys.length > 10) {
|
||||
throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`);
|
||||
}
|
||||
for (const key of keys) {
|
||||
checkKey(key);
|
||||
}
|
||||
let archivePath = '';
|
||||
try {
|
||||
const twirpClient = cacheTwirpClient.internalCacheTwirpClient();
|
||||
const compressionMethod = yield utils.getCompressionMethod();
|
||||
const request = {
|
||||
key: primaryKey,
|
||||
restoreKeys,
|
||||
version: utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive)
|
||||
};
|
||||
const response = yield twirpClient.GetCacheEntryDownloadURL(request);
|
||||
if (!response.ok) {
|
||||
core.warning(`Cache not found for keys: ${keys.join(', ')}`);
|
||||
return undefined;
|
||||
}
|
||||
core.info(`Cache hit for: ${request.key}`);
|
||||
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
||||
core.info('Lookup only - skipping download');
|
||||
return response.matchedKey;
|
||||
}
|
||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||
core.debug(`Archive path: ${archivePath}`);
|
||||
core.debug(`Starting download of archive to: ${archivePath}`);
|
||||
yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options);
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||
if (core.isDebug()) {
|
||||
yield (0, tar_1.listTar)(archivePath, compressionMethod);
|
||||
}
|
||||
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
||||
core.info('Cache restored successfully');
|
||||
return response.matchedKey;
|
||||
}
|
||||
catch (error) {
|
||||
const typedError = error;
|
||||
if (typedError.name === ValidationError.name) {
|
||||
throw error;
|
||||
}
|
||||
else {
|
||||
// Supress all non-validation cache related errors because caching should be optional
|
||||
core.warning(`Failed to restore: ${error.message}`);
|
||||
}
|
||||
}
|
||||
finally {
|
||||
try {
|
||||
if (archivePath) {
|
||||
yield utils.unlinkFile(archivePath);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
core.debug(`Failed to delete archive: ${error}`);
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Saves a list of files with the specified key
|
||||
*
|
||||
@@ -162,10 +267,33 @@ exports.restoreCache = restoreCache;
|
||||
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
||||
*/
|
||||
function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||
var _a, _b, _c, _d, _e;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
|
||||
core.debug(`Cache service version: ${cacheServiceVersion}`);
|
||||
checkPaths(paths);
|
||||
checkKey(key);
|
||||
switch (cacheServiceVersion) {
|
||||
case 'v2':
|
||||
return yield saveCacheV2(paths, key, options, enableCrossOsArchive);
|
||||
case 'v1':
|
||||
default:
|
||||
return yield saveCacheV1(paths, key, options, enableCrossOsArchive);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.saveCache = saveCache;
|
||||
/**
|
||||
* Save cache using the legacy Cache Service
|
||||
*
|
||||
* @param paths
|
||||
* @param key
|
||||
* @param options
|
||||
* @param enableCrossOsArchive
|
||||
* @returns
|
||||
*/
|
||||
function saveCacheV1(paths, key, options, enableCrossOsArchive = false) {
|
||||
var _a, _b, _c, _d, _e;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const compressionMethod = yield utils.getCompressionMethod();
|
||||
let cacheId = -1;
|
||||
const cachePaths = yield utils.resolvePaths(paths);
|
||||
@@ -186,7 +314,7 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
|
||||
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
|
||||
if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) {
|
||||
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
|
||||
}
|
||||
core.debug('Reserving Cache');
|
||||
@@ -205,7 +333,95 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`);
|
||||
}
|
||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
||||
yield cacheHttpClient.saveCache(cacheId, archivePath, '', options);
|
||||
}
|
||||
catch (error) {
|
||||
const typedError = error;
|
||||
if (typedError.name === ValidationError.name) {
|
||||
throw error;
|
||||
}
|
||||
else if (typedError.name === ReserveCacheError.name) {
|
||||
core.info(`Failed to save: ${typedError.message}`);
|
||||
}
|
||||
else {
|
||||
core.warning(`Failed to save: ${typedError.message}`);
|
||||
}
|
||||
}
|
||||
finally {
|
||||
// Try to delete the archive to save space
|
||||
try {
|
||||
yield utils.unlinkFile(archivePath);
|
||||
}
|
||||
catch (error) {
|
||||
core.debug(`Failed to delete archive: ${error}`);
|
||||
}
|
||||
}
|
||||
return cacheId;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Save cache using Cache Service v2
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param key an explicit key for restoring the cache
|
||||
* @param options cache upload options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
|
||||
* @returns
|
||||
*/
|
||||
function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Override UploadOptions to force the use of Azure
|
||||
// ...options goes first because we want to override the default values
|
||||
// set in UploadOptions with these specific figures
|
||||
options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true });
|
||||
const compressionMethod = yield utils.getCompressionMethod();
|
||||
const twirpClient = cacheTwirpClient.internalCacheTwirpClient();
|
||||
let cacheId = -1;
|
||||
const cachePaths = yield utils.resolvePaths(paths);
|
||||
core.debug('Cache Paths:');
|
||||
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||
if (cachePaths.length === 0) {
|
||||
throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`);
|
||||
}
|
||||
const archiveFolder = yield utils.createTempDirectory();
|
||||
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
|
||||
core.debug(`Archive Path: ${archivePath}`);
|
||||
try {
|
||||
yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod);
|
||||
if (core.isDebug()) {
|
||||
yield (0, tar_1.listTar)(archivePath, compressionMethod);
|
||||
}
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
|
||||
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
|
||||
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
|
||||
}
|
||||
// Set the archive size in the options, will be used to display the upload progress
|
||||
options.archiveSizeBytes = archiveFileSize;
|
||||
core.debug('Reserving Cache');
|
||||
const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive);
|
||||
const request = {
|
||||
key,
|
||||
version
|
||||
};
|
||||
const response = yield twirpClient.CreateCacheEntry(request);
|
||||
if (!response.ok) {
|
||||
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`);
|
||||
}
|
||||
core.debug(`Attempting to upload cache located at: ${archivePath}`);
|
||||
yield cacheHttpClient.saveCache(cacheId, archivePath, response.signedUploadUrl, options);
|
||||
const finalizeRequest = {
|
||||
key,
|
||||
version,
|
||||
sizeBytes: `${archiveFileSize}`
|
||||
};
|
||||
const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest);
|
||||
core.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`);
|
||||
if (!finalizeResponse.ok) {
|
||||
throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`);
|
||||
}
|
||||
cacheId = parseInt(finalizeResponse.entryId);
|
||||
}
|
||||
catch (error) {
|
||||
const typedError = error;
|
||||
@@ -231,5 +447,4 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||
return cacheId;
|
||||
});
|
||||
}
|
||||
exports.saveCache = saveCache;
|
||||
//# sourceMappingURL=cache.js.map
|
||||
2
node_modules/@actions/cache/lib/cache.js.map
generated
vendored
2
node_modules/@actions/cache/lib/cache.js.map
generated
vendored
File diff suppressed because one or more lines are too long
158
node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.d.ts
generated
vendored
Normal file
158
node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.d.ts
generated
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import type { JsonValue } from "@protobuf-ts/runtime";
|
||||
import type { JsonReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
/**
|
||||
* A Timestamp represents a point in time independent of any time zone or local
|
||||
* calendar, encoded as a count of seconds and fractions of seconds at
|
||||
* nanosecond resolution. The count is relative to an epoch at UTC midnight on
|
||||
* January 1, 1970, in the proleptic Gregorian calendar which extends the
|
||||
* Gregorian calendar backwards to year one.
|
||||
*
|
||||
* All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
|
||||
* second table is needed for interpretation, using a [24-hour linear
|
||||
* smear](https://developers.google.com/time/smear).
|
||||
*
|
||||
* The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
|
||||
* restricting to that range, we ensure that we can convert to and from [RFC
|
||||
* 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
|
||||
*
|
||||
* # Examples
|
||||
*
|
||||
* Example 1: Compute Timestamp from POSIX `time()`.
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(time(NULL));
|
||||
* timestamp.set_nanos(0);
|
||||
*
|
||||
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||
*
|
||||
* struct timeval tv;
|
||||
* gettimeofday(&tv, NULL);
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(tv.tv_sec);
|
||||
* timestamp.set_nanos(tv.tv_usec * 1000);
|
||||
*
|
||||
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||
*
|
||||
* FILETIME ft;
|
||||
* GetSystemTimeAsFileTime(&ft);
|
||||
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||
*
|
||||
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||
*
|
||||
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||
*
|
||||
* long millis = System.currentTimeMillis();
|
||||
*
|
||||
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||
* .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||
*
|
||||
*
|
||||
* Example 5: Compute Timestamp from Java `Instant.now()`.
|
||||
*
|
||||
* Instant now = Instant.now();
|
||||
*
|
||||
* Timestamp timestamp =
|
||||
* Timestamp.newBuilder().setSeconds(now.getEpochSecond())
|
||||
* .setNanos(now.getNano()).build();
|
||||
*
|
||||
*
|
||||
* Example 6: Compute Timestamp from current time in Python.
|
||||
*
|
||||
* timestamp = Timestamp()
|
||||
* timestamp.GetCurrentTime()
|
||||
*
|
||||
* # JSON Mapping
|
||||
*
|
||||
* In JSON format, the Timestamp type is encoded as a string in the
|
||||
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
|
||||
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
|
||||
* where {year} is always expressed using four digits while {month}, {day},
|
||||
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
|
||||
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
|
||||
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
|
||||
* is required. A proto3 JSON serializer should always use UTC (as indicated by
|
||||
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
|
||||
* able to accept both UTC and other timezones (as indicated by an offset).
|
||||
*
|
||||
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
|
||||
* 01:30 UTC on January 15, 2017.
|
||||
*
|
||||
* In JavaScript, one can convert a Date object to this format using the
|
||||
* standard
|
||||
* [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
|
||||
* method. In Python, a standard `datetime.datetime` object can be converted
|
||||
* to this format using
|
||||
* [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
|
||||
* the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
|
||||
* the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||
* http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
|
||||
* ) to obtain a formatter capable of generating timestamps in this format.
|
||||
*
|
||||
*
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
export interface Timestamp {
|
||||
/**
|
||||
* Represents seconds of UTC time since Unix epoch
|
||||
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||
* 9999-12-31T23:59:59Z inclusive.
|
||||
*
|
||||
* @generated from protobuf field: int64 seconds = 1;
|
||||
*/
|
||||
seconds: string;
|
||||
/**
|
||||
* Non-negative fractions of a second at nanosecond resolution. Negative
|
||||
* second values with fractions must still have non-negative nanos values
|
||||
* that count forward in time. Must be from 0 to 999,999,999
|
||||
* inclusive.
|
||||
*
|
||||
* @generated from protobuf field: int32 nanos = 2;
|
||||
*/
|
||||
nanos: number;
|
||||
}
|
||||
declare class Timestamp$Type extends MessageType<Timestamp> {
|
||||
constructor();
|
||||
/**
|
||||
* Creates a new `Timestamp` for the current time.
|
||||
*/
|
||||
now(): Timestamp;
|
||||
/**
|
||||
* Converts a `Timestamp` to a JavaScript Date.
|
||||
*/
|
||||
toDate(message: Timestamp): Date;
|
||||
/**
|
||||
* Converts a JavaScript Date to a `Timestamp`.
|
||||
*/
|
||||
fromDate(date: Date): Timestamp;
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonWrite(message: Timestamp, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Timestamp): Timestamp;
|
||||
create(value?: PartialMessage<Timestamp>): Timestamp;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Timestamp): Timestamp;
|
||||
internalBinaryWrite(message: Timestamp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
export declare const Timestamp: Timestamp$Type;
|
||||
export {};
|
||||
136
node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.js
generated
vendored
Normal file
136
node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.js
generated
vendored
Normal file
@@ -0,0 +1,136 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Timestamp = void 0;
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const runtime_6 = require("@protobuf-ts/runtime");
|
||||
const runtime_7 = require("@protobuf-ts/runtime");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Timestamp$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.Timestamp", [
|
||||
{ no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Creates a new `Timestamp` for the current time.
|
||||
*/
|
||||
now() {
|
||||
const msg = this.create();
|
||||
const ms = Date.now();
|
||||
msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString();
|
||||
msg.nanos = (ms % 1000) * 1000000;
|
||||
return msg;
|
||||
}
|
||||
/**
|
||||
* Converts a `Timestamp` to a JavaScript Date.
|
||||
*/
|
||||
toDate(message) {
|
||||
return new Date(runtime_6.PbLong.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000));
|
||||
}
|
||||
/**
|
||||
* Converts a JavaScript Date to a `Timestamp`.
|
||||
*/
|
||||
fromDate(date) {
|
||||
const msg = this.create();
|
||||
const ms = date.getTime();
|
||||
msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString();
|
||||
msg.nanos = (ms % 1000) * 1000000;
|
||||
return msg;
|
||||
}
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
let ms = runtime_6.PbLong.from(message.seconds).toNumber() * 1000;
|
||||
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
|
||||
throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
|
||||
if (message.nanos < 0)
|
||||
throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative.");
|
||||
let z = "Z";
|
||||
if (message.nanos > 0) {
|
||||
let nanosStr = (message.nanos + 1000000000).toString().substring(1);
|
||||
if (nanosStr.substring(3) === "000000")
|
||||
z = "." + nanosStr.substring(0, 3) + "Z";
|
||||
else if (nanosStr.substring(6) === "000")
|
||||
z = "." + nanosStr.substring(0, 6) + "Z";
|
||||
else
|
||||
z = "." + nanosStr + "Z";
|
||||
}
|
||||
return new Date(ms).toISOString().replace(".000Z", z);
|
||||
}
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (typeof json !== "string")
|
||||
throw new Error("Unable to parse Timestamp from JSON " + (0, runtime_5.typeofJsonValue)(json) + ".");
|
||||
let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);
|
||||
if (!matches)
|
||||
throw new Error("Unable to parse Timestamp from JSON. Invalid format.");
|
||||
let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z"));
|
||||
if (Number.isNaN(ms))
|
||||
throw new Error("Unable to parse Timestamp from JSON. Invalid value.");
|
||||
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
|
||||
throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.seconds = runtime_6.PbLong.from(ms / 1000).toString();
|
||||
target.nanos = 0;
|
||||
if (matches[7])
|
||||
target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000);
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { seconds: "0", nanos: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 seconds */ 1:
|
||||
message.seconds = reader.int64().toString();
|
||||
break;
|
||||
case /* int32 nanos */ 2:
|
||||
message.nanos = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* int64 seconds = 1; */
|
||||
if (message.seconds !== "0")
|
||||
writer.tag(1, runtime_1.WireType.Varint).int64(message.seconds);
|
||||
/* int32 nanos = 2; */
|
||||
if (message.nanos !== 0)
|
||||
writer.tag(2, runtime_1.WireType.Varint).int32(message.nanos);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
exports.Timestamp = new Timestamp$Type();
|
||||
//# sourceMappingURL=timestamp.js.map
|
||||
1
node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
307
node_modules/@actions/cache/lib/generated/google/protobuf/wrappers.d.ts
generated
vendored
Normal file
307
node_modules/@actions/cache/lib/generated/google/protobuf/wrappers.d.ts
generated
vendored
Normal file
@@ -0,0 +1,307 @@
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import type { JsonValue } from "@protobuf-ts/runtime";
|
||||
import type { JsonReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
/**
|
||||
* Wrapper message for `double`.
|
||||
*
|
||||
* The JSON representation for `DoubleValue` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
export interface DoubleValue {
|
||||
/**
|
||||
* The double value.
|
||||
*
|
||||
* @generated from protobuf field: double value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `float`.
|
||||
*
|
||||
* The JSON representation for `FloatValue` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
export interface FloatValue {
|
||||
/**
|
||||
* The float value.
|
||||
*
|
||||
* @generated from protobuf field: float value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `int64`.
|
||||
*
|
||||
* The JSON representation for `Int64Value` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
export interface Int64Value {
|
||||
/**
|
||||
* The int64 value.
|
||||
*
|
||||
* @generated from protobuf field: int64 value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `uint64`.
|
||||
*
|
||||
* The JSON representation for `UInt64Value` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
export interface UInt64Value {
|
||||
/**
|
||||
* The uint64 value.
|
||||
*
|
||||
* @generated from protobuf field: uint64 value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `int32`.
|
||||
*
|
||||
* The JSON representation for `Int32Value` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
export interface Int32Value {
|
||||
/**
|
||||
* The int32 value.
|
||||
*
|
||||
* @generated from protobuf field: int32 value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `uint32`.
|
||||
*
|
||||
* The JSON representation for `UInt32Value` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
export interface UInt32Value {
|
||||
/**
|
||||
* The uint32 value.
|
||||
*
|
||||
* @generated from protobuf field: uint32 value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `bool`.
|
||||
*
|
||||
* The JSON representation for `BoolValue` is JSON `true` and `false`.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
export interface BoolValue {
|
||||
/**
|
||||
* The bool value.
|
||||
*
|
||||
* @generated from protobuf field: bool value = 1;
|
||||
*/
|
||||
value: boolean;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `string`.
|
||||
*
|
||||
* The JSON representation for `StringValue` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
export interface StringValue {
|
||||
/**
|
||||
* The string value.
|
||||
*
|
||||
* @generated from protobuf field: string value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `bytes`.
|
||||
*
|
||||
* The JSON representation for `BytesValue` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
export interface BytesValue {
|
||||
/**
|
||||
* The bytes value.
|
||||
*
|
||||
* @generated from protobuf field: bytes value = 1;
|
||||
*/
|
||||
value: Uint8Array;
|
||||
}
|
||||
declare class DoubleValue$Type extends MessageType<DoubleValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `DoubleValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message: DoubleValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `DoubleValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: DoubleValue): DoubleValue;
|
||||
create(value?: PartialMessage<DoubleValue>): DoubleValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DoubleValue): DoubleValue;
|
||||
internalBinaryWrite(message: DoubleValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
export declare const DoubleValue: DoubleValue$Type;
|
||||
declare class FloatValue$Type extends MessageType<FloatValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `FloatValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message: FloatValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `FloatValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: FloatValue): FloatValue;
|
||||
create(value?: PartialMessage<FloatValue>): FloatValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FloatValue): FloatValue;
|
||||
internalBinaryWrite(message: FloatValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
export declare const FloatValue: FloatValue$Type;
|
||||
declare class Int64Value$Type extends MessageType<Int64Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `Int64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: Int64Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `Int64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int64Value): Int64Value;
|
||||
create(value?: PartialMessage<Int64Value>): Int64Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int64Value): Int64Value;
|
||||
internalBinaryWrite(message: Int64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
export declare const Int64Value: Int64Value$Type;
|
||||
declare class UInt64Value$Type extends MessageType<UInt64Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `UInt64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: UInt64Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `UInt64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt64Value): UInt64Value;
|
||||
create(value?: PartialMessage<UInt64Value>): UInt64Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt64Value): UInt64Value;
|
||||
internalBinaryWrite(message: UInt64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
export declare const UInt64Value: UInt64Value$Type;
|
||||
declare class Int32Value$Type extends MessageType<Int32Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `Int32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: Int32Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `Int32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int32Value): Int32Value;
|
||||
create(value?: PartialMessage<Int32Value>): Int32Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int32Value): Int32Value;
|
||||
internalBinaryWrite(message: Int32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
export declare const Int32Value: Int32Value$Type;
|
||||
declare class UInt32Value$Type extends MessageType<UInt32Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `UInt32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: UInt32Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `UInt32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt32Value): UInt32Value;
|
||||
create(value?: PartialMessage<UInt32Value>): UInt32Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt32Value): UInt32Value;
|
||||
internalBinaryWrite(message: UInt32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
export declare const UInt32Value: UInt32Value$Type;
|
||||
declare class BoolValue$Type extends MessageType<BoolValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `BoolValue` to JSON bool.
|
||||
*/
|
||||
internalJsonWrite(message: BoolValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `BoolValue` from JSON bool.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BoolValue): BoolValue;
|
||||
create(value?: PartialMessage<BoolValue>): BoolValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BoolValue): BoolValue;
|
||||
internalBinaryWrite(message: BoolValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
export declare const BoolValue: BoolValue$Type;
|
||||
declare class StringValue$Type extends MessageType<StringValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `StringValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: StringValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `StringValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: StringValue): StringValue;
|
||||
create(value?: PartialMessage<StringValue>): StringValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StringValue): StringValue;
|
||||
internalBinaryWrite(message: StringValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
export declare const StringValue: StringValue$Type;
|
||||
declare class BytesValue$Type extends MessageType<BytesValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `BytesValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: BytesValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `BytesValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BytesValue): BytesValue;
|
||||
create(value?: PartialMessage<BytesValue>): BytesValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BytesValue): BytesValue;
|
||||
internalBinaryWrite(message: BytesValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
export declare const BytesValue: BytesValue$Type;
|
||||
export {};
|
||||
614
node_modules/@actions/cache/lib/generated/google/protobuf/wrappers.js
generated
vendored
Normal file
614
node_modules/@actions/cache/lib/generated/google/protobuf/wrappers.js
generated
vendored
Normal file
@@ -0,0 +1,614 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.BytesValue = exports.StringValue = exports.BoolValue = exports.UInt32Value = exports.Int32Value = exports.UInt64Value = exports.Int64Value = exports.FloatValue = exports.DoubleValue = void 0;
|
||||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "google/protobuf/wrappers.proto" (package "google.protobuf", syntax proto3)
|
||||
// tslint:disable
|
||||
//
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
//
|
||||
// Wrappers for primitive (non-message) types. These types are useful
|
||||
// for embedding primitives in the `google.protobuf.Any` type and for places
|
||||
// where we need to distinguish between the absence of a primitive
|
||||
// typed field and its default value.
|
||||
//
|
||||
// These wrappers have no meaningful use within repeated fields as they lack
|
||||
// the ability to detect presence on individual elements.
|
||||
// These wrappers have no meaningful use within a map or a oneof since
|
||||
// individual entries of a map or fields of a oneof can already detect presence.
|
||||
//
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const runtime_6 = require("@protobuf-ts/runtime");
|
||||
const runtime_7 = require("@protobuf-ts/runtime");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DoubleValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.DoubleValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 1 /*ScalarType.DOUBLE*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `DoubleValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(2, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `DoubleValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 1, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* double value */ 1:
|
||||
message.value = reader.double();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* double value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Bit64).double(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
exports.DoubleValue = new DoubleValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FloatValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.FloatValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `FloatValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(1, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `FloatValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 1, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* float value */ 1:
|
||||
message.value = reader.float();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* float value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Bit32).float(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
exports.FloatValue = new FloatValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Int64Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.Int64Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `Int64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(runtime_1.ScalarType.INT64, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `Int64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.INT64, runtime_2.LongType.STRING, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 value */ 1:
|
||||
message.value = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* int64 value = 1; */
|
||||
if (message.value !== "0")
|
||||
writer.tag(1, runtime_3.WireType.Varint).int64(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
exports.Int64Value = new Int64Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class UInt64Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.UInt64Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `UInt64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(runtime_1.ScalarType.UINT64, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `UInt64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.UINT64, runtime_2.LongType.STRING, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* uint64 value */ 1:
|
||||
message.value = reader.uint64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* uint64 value = 1; */
|
||||
if (message.value !== "0")
|
||||
writer.tag(1, runtime_3.WireType.Varint).uint64(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
exports.UInt64Value = new UInt64Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Int32Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.Int32Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `Int32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(5, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `Int32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 5, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int32 value */ 1:
|
||||
message.value = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* int32 value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Varint).int32(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
exports.Int32Value = new Int32Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class UInt32Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.UInt32Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 13 /*ScalarType.UINT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `UInt32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(13, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `UInt32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 13, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* uint32 value */ 1:
|
||||
message.value = reader.uint32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* uint32 value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Varint).uint32(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
exports.UInt32Value = new UInt32Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class BoolValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.BoolValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `BoolValue` to JSON bool.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return message.value;
|
||||
}
|
||||
/**
|
||||
* Decode `BoolValue` from JSON bool.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 8, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: false };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool value */ 1:
|
||||
message.value = reader.bool();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool value = 1; */
|
||||
if (message.value !== false)
|
||||
writer.tag(1, runtime_3.WireType.Varint).bool(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
exports.BoolValue = new BoolValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class StringValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.StringValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `StringValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return message.value;
|
||||
}
|
||||
/**
|
||||
* Decode `StringValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 9, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string value */ 1:
|
||||
message.value = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string value = 1; */
|
||||
if (message.value !== "")
|
||||
writer.tag(1, runtime_3.WireType.LengthDelimited).string(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
exports.StringValue = new StringValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class BytesValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.BytesValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `BytesValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(12, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `BytesValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 12, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: new Uint8Array(0) };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bytes value */ 1:
|
||||
message.value = reader.bytes();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bytes value = 1; */
|
||||
if (message.value.length)
|
||||
writer.tag(1, runtime_3.WireType.LengthDelimited).bytes(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
exports.BytesValue = new BytesValue$Type();
|
||||
//# sourceMappingURL=wrappers.js.map
|
||||
1
node_modules/@actions/cache/lib/generated/google/protobuf/wrappers.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/generated/google/protobuf/wrappers.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
378
node_modules/@actions/cache/lib/generated/results/api/v1/cache.d.ts
generated
vendored
Normal file
378
node_modules/@actions/cache/lib/generated/results/api/v1/cache.d.ts
generated
vendored
Normal file
@@ -0,0 +1,378 @@
|
||||
import { ServiceType } from "@protobuf-ts/runtime-rpc";
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
import { CacheEntry } from "../../entities/v1/cacheentry";
|
||||
import { CacheMetadata } from "../../entities/v1/cachemetadata";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
|
||||
*/
|
||||
export interface CreateCacheEntryRequest {
|
||||
/**
|
||||
* Scope and other metadata for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
|
||||
*/
|
||||
metadata?: CacheMetadata;
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 2;
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* Hash of the compression tool, runner OS and paths cached
|
||||
*
|
||||
* @generated from protobuf field: string version = 3;
|
||||
*/
|
||||
version: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
|
||||
*/
|
||||
export interface CreateCacheEntryResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* SAS URL to upload the cache archive
|
||||
*
|
||||
* @generated from protobuf field: string signed_upload_url = 2;
|
||||
*/
|
||||
signedUploadUrl: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
|
||||
*/
|
||||
export interface FinalizeCacheEntryUploadRequest {
|
||||
/**
|
||||
* Scope and other metadata for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
|
||||
*/
|
||||
metadata?: CacheMetadata;
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 2;
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* Size of the cache archive in Bytes
|
||||
*
|
||||
* @generated from protobuf field: int64 size_bytes = 3;
|
||||
*/
|
||||
sizeBytes: string;
|
||||
/**
|
||||
* Hash of the compression tool, runner OS and paths cached
|
||||
*
|
||||
* @generated from protobuf field: string version = 4;
|
||||
*/
|
||||
version: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
|
||||
*/
|
||||
export interface FinalizeCacheEntryUploadResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* Cache entry database ID
|
||||
*
|
||||
* @generated from protobuf field: int64 entry_id = 2;
|
||||
*/
|
||||
entryId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
|
||||
*/
|
||||
export interface GetCacheEntryDownloadURLRequest {
|
||||
/**
|
||||
* Scope and other metadata for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
|
||||
*/
|
||||
metadata?: CacheMetadata;
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 2;
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* Restore keys used for prefix searching
|
||||
*
|
||||
* @generated from protobuf field: repeated string restore_keys = 3;
|
||||
*/
|
||||
restoreKeys: string[];
|
||||
/**
|
||||
* Hash of the compression tool, runner OS and paths cached
|
||||
*
|
||||
* @generated from protobuf field: string version = 4;
|
||||
*/
|
||||
version: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
|
||||
*/
|
||||
export interface GetCacheEntryDownloadURLResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* SAS URL to download the cache archive
|
||||
*
|
||||
* @generated from protobuf field: string signed_download_url = 2;
|
||||
*/
|
||||
signedDownloadUrl: string;
|
||||
/**
|
||||
* Key or restore key that matches the lookup
|
||||
*
|
||||
* @generated from protobuf field: string matched_key = 3;
|
||||
*/
|
||||
matchedKey: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest
|
||||
*/
|
||||
export interface DeleteCacheEntryRequest {
|
||||
/**
|
||||
* Scope and other metadata for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
|
||||
*/
|
||||
metadata?: CacheMetadata;
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 2;
|
||||
*/
|
||||
key: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse
|
||||
*/
|
||||
export interface DeleteCacheEntryResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* Cache entry database ID
|
||||
*
|
||||
* @generated from protobuf field: int64 entry_id = 2;
|
||||
*/
|
||||
entryId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesRequest
|
||||
*/
|
||||
export interface ListCacheEntriesRequest {
|
||||
/**
|
||||
* Scope and other metadata for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
|
||||
*/
|
||||
metadata?: CacheMetadata;
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 2;
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* Restore keys used for prefix searching
|
||||
*
|
||||
* @generated from protobuf field: repeated string restore_keys = 3;
|
||||
*/
|
||||
restoreKeys: string[];
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesResponse
|
||||
*/
|
||||
export interface ListCacheEntriesResponse {
|
||||
/**
|
||||
* Cache entries in the defined scope
|
||||
*
|
||||
* @generated from protobuf field: repeated github.actions.results.entities.v1.CacheEntry entries = 1;
|
||||
*/
|
||||
entries: CacheEntry[];
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryRequest
|
||||
*/
|
||||
export interface LookupCacheEntryRequest {
|
||||
/**
|
||||
* Scope and other metadata for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
|
||||
*/
|
||||
metadata?: CacheMetadata;
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 2;
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* Restore keys used for prefix searching
|
||||
*
|
||||
* @generated from protobuf field: repeated string restore_keys = 3;
|
||||
*/
|
||||
restoreKeys: string[];
|
||||
/**
|
||||
* Hash of the compression tool, runner OS and paths cached
|
||||
*
|
||||
* @generated from protobuf field: string version = 4;
|
||||
*/
|
||||
version: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
||||
*/
|
||||
export interface LookupCacheEntryResponse {
|
||||
/**
|
||||
* Indicates whether the cache entry exists or not
|
||||
*
|
||||
* @generated from protobuf field: bool exists = 1;
|
||||
*/
|
||||
exists: boolean;
|
||||
/**
|
||||
* Matched cache entry metadata
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheEntry entry = 2;
|
||||
*/
|
||||
entry?: CacheEntry;
|
||||
}
|
||||
declare class CreateCacheEntryRequest$Type extends MessageType<CreateCacheEntryRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<CreateCacheEntryRequest>): CreateCacheEntryRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryRequest): CreateCacheEntryRequest;
|
||||
internalBinaryWrite(message: CreateCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
|
||||
*/
|
||||
export declare const CreateCacheEntryRequest: CreateCacheEntryRequest$Type;
|
||||
declare class CreateCacheEntryResponse$Type extends MessageType<CreateCacheEntryResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<CreateCacheEntryResponse>): CreateCacheEntryResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryResponse): CreateCacheEntryResponse;
|
||||
internalBinaryWrite(message: CreateCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
|
||||
*/
|
||||
export declare const CreateCacheEntryResponse: CreateCacheEntryResponse$Type;
|
||||
declare class FinalizeCacheEntryUploadRequest$Type extends MessageType<FinalizeCacheEntryUploadRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<FinalizeCacheEntryUploadRequest>): FinalizeCacheEntryUploadRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadRequest): FinalizeCacheEntryUploadRequest;
|
||||
internalBinaryWrite(message: FinalizeCacheEntryUploadRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
|
||||
*/
|
||||
export declare const FinalizeCacheEntryUploadRequest: FinalizeCacheEntryUploadRequest$Type;
|
||||
declare class FinalizeCacheEntryUploadResponse$Type extends MessageType<FinalizeCacheEntryUploadResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<FinalizeCacheEntryUploadResponse>): FinalizeCacheEntryUploadResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadResponse): FinalizeCacheEntryUploadResponse;
|
||||
internalBinaryWrite(message: FinalizeCacheEntryUploadResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
|
||||
*/
|
||||
export declare const FinalizeCacheEntryUploadResponse: FinalizeCacheEntryUploadResponse$Type;
|
||||
declare class GetCacheEntryDownloadURLRequest$Type extends MessageType<GetCacheEntryDownloadURLRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<GetCacheEntryDownloadURLRequest>): GetCacheEntryDownloadURLRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLRequest): GetCacheEntryDownloadURLRequest;
|
||||
internalBinaryWrite(message: GetCacheEntryDownloadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
|
||||
*/
|
||||
export declare const GetCacheEntryDownloadURLRequest: GetCacheEntryDownloadURLRequest$Type;
|
||||
declare class GetCacheEntryDownloadURLResponse$Type extends MessageType<GetCacheEntryDownloadURLResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<GetCacheEntryDownloadURLResponse>): GetCacheEntryDownloadURLResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLResponse): GetCacheEntryDownloadURLResponse;
|
||||
internalBinaryWrite(message: GetCacheEntryDownloadURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
|
||||
*/
|
||||
export declare const GetCacheEntryDownloadURLResponse: GetCacheEntryDownloadURLResponse$Type;
|
||||
declare class DeleteCacheEntryRequest$Type extends MessageType<DeleteCacheEntryRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<DeleteCacheEntryRequest>): DeleteCacheEntryRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteCacheEntryRequest): DeleteCacheEntryRequest;
|
||||
internalBinaryWrite(message: DeleteCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest
|
||||
*/
|
||||
export declare const DeleteCacheEntryRequest: DeleteCacheEntryRequest$Type;
|
||||
declare class DeleteCacheEntryResponse$Type extends MessageType<DeleteCacheEntryResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<DeleteCacheEntryResponse>): DeleteCacheEntryResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteCacheEntryResponse): DeleteCacheEntryResponse;
|
||||
internalBinaryWrite(message: DeleteCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse
|
||||
*/
|
||||
export declare const DeleteCacheEntryResponse: DeleteCacheEntryResponse$Type;
|
||||
declare class ListCacheEntriesRequest$Type extends MessageType<ListCacheEntriesRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<ListCacheEntriesRequest>): ListCacheEntriesRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesRequest): ListCacheEntriesRequest;
|
||||
internalBinaryWrite(message: ListCacheEntriesRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesRequest
|
||||
*/
|
||||
export declare const ListCacheEntriesRequest: ListCacheEntriesRequest$Type;
|
||||
declare class ListCacheEntriesResponse$Type extends MessageType<ListCacheEntriesResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<ListCacheEntriesResponse>): ListCacheEntriesResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesResponse): ListCacheEntriesResponse;
|
||||
internalBinaryWrite(message: ListCacheEntriesResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse
|
||||
*/
|
||||
export declare const ListCacheEntriesResponse: ListCacheEntriesResponse$Type;
|
||||
declare class LookupCacheEntryRequest$Type extends MessageType<LookupCacheEntryRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<LookupCacheEntryRequest>): LookupCacheEntryRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryRequest): LookupCacheEntryRequest;
|
||||
internalBinaryWrite(message: LookupCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryRequest
|
||||
*/
|
||||
export declare const LookupCacheEntryRequest: LookupCacheEntryRequest$Type;
|
||||
declare class LookupCacheEntryResponse$Type extends MessageType<LookupCacheEntryResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<LookupCacheEntryResponse>): LookupCacheEntryResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryResponse): LookupCacheEntryResponse;
|
||||
internalBinaryWrite(message: LookupCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
||||
*/
|
||||
export declare const LookupCacheEntryResponse: LookupCacheEntryResponse$Type;
|
||||
/**
|
||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
|
||||
*/
|
||||
export declare const CacheService: ServiceType;
|
||||
export {};
|
||||
730
node_modules/@actions/cache/lib/generated/results/api/v1/cache.js
generated
vendored
Normal file
730
node_modules/@actions/cache/lib/generated/results/api/v1/cache.js
generated
vendored
Normal file
@@ -0,0 +1,730 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CacheService = exports.LookupCacheEntryResponse = exports.LookupCacheEntryRequest = exports.ListCacheEntriesResponse = exports.ListCacheEntriesRequest = exports.DeleteCacheEntryResponse = exports.DeleteCacheEntryRequest = exports.GetCacheEntryDownloadURLResponse = exports.GetCacheEntryDownloadURLRequest = exports.FinalizeCacheEntryUploadResponse = exports.FinalizeCacheEntryUploadRequest = exports.CreateCacheEntryResponse = exports.CreateCacheEntryRequest = void 0;
|
||||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3)
|
||||
// tslint:disable
|
||||
const runtime_rpc_1 = require("@protobuf-ts/runtime-rpc");
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const cacheentry_1 = require("../../entities/v1/cacheentry");
|
||||
const cachemetadata_1 = require("../../entities/v1/cachemetadata");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateCacheEntryRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.CreateCacheEntryRequest", [
|
||||
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
|
||||
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { key: "", version: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
|
||||
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
|
||||
break;
|
||||
case /* string key */ 2:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
case /* string version */ 3:
|
||||
message.version = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
|
||||
if (message.metadata)
|
||||
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* string key = 2; */
|
||||
if (message.key !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||
/* string version = 3; */
|
||||
if (message.version !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.version);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
|
||||
*/
|
||||
exports.CreateCacheEntryRequest = new CreateCacheEntryRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.CreateCacheEntryResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, signedUploadUrl: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* string signed_upload_url */ 2:
|
||||
message.signedUploadUrl = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||
/* string signed_upload_url = 2; */
|
||||
if (message.signedUploadUrl !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
|
||||
*/
|
||||
exports.CreateCacheEntryResponse = new CreateCacheEntryResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeCacheEntryUploadRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [
|
||||
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
|
||||
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { key: "", sizeBytes: "0", version: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
|
||||
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
|
||||
break;
|
||||
case /* string key */ 2:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
case /* int64 size_bytes */ 3:
|
||||
message.sizeBytes = reader.int64().toString();
|
||||
break;
|
||||
case /* string version */ 4:
|
||||
message.version = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
|
||||
if (message.metadata)
|
||||
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* string key = 2; */
|
||||
if (message.key !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||
/* int64 size_bytes = 3; */
|
||||
if (message.sizeBytes !== "0")
|
||||
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
||||
/* string version = 4; */
|
||||
if (message.version !== "")
|
||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
|
||||
*/
|
||||
exports.FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeCacheEntryUploadResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, entryId: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 entry_id */ 2:
|
||||
message.entryId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||
/* int64 entry_id = 2; */
|
||||
if (message.entryId !== "0")
|
||||
writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
|
||||
*/
|
||||
exports.FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetCacheEntryDownloadURLRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [
|
||||
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
|
||||
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { key: "", restoreKeys: [], version: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
|
||||
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
|
||||
break;
|
||||
case /* string key */ 2:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
case /* repeated string restore_keys */ 3:
|
||||
message.restoreKeys.push(reader.string());
|
||||
break;
|
||||
case /* string version */ 4:
|
||||
message.version = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
|
||||
if (message.metadata)
|
||||
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* string key = 2; */
|
||||
if (message.key !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||
/* repeated string restore_keys = 3; */
|
||||
for (let i = 0; i < message.restoreKeys.length; i++)
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]);
|
||||
/* string version = 4; */
|
||||
if (message.version !== "")
|
||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
|
||||
*/
|
||||
exports.GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "matched_key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, signedDownloadUrl: "", matchedKey: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* string signed_download_url */ 2:
|
||||
message.signedDownloadUrl = reader.string();
|
||||
break;
|
||||
case /* string matched_key */ 3:
|
||||
message.matchedKey = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||
/* string signed_download_url = 2; */
|
||||
if (message.signedDownloadUrl !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl);
|
||||
/* string matched_key = 3; */
|
||||
if (message.matchedKey !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
|
||||
*/
|
||||
exports.GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteCacheEntryRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteCacheEntryRequest", [
|
||||
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
|
||||
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { key: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
|
||||
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
|
||||
break;
|
||||
case /* string key */ 2:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
|
||||
if (message.metadata)
|
||||
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* string key = 2; */
|
||||
if (message.key !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest
|
||||
*/
|
||||
exports.DeleteCacheEntryRequest = new DeleteCacheEntryRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteCacheEntryResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, entryId: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 entry_id */ 2:
|
||||
message.entryId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||
/* int64 entry_id = 2; */
|
||||
if (message.entryId !== "0")
|
||||
writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse
|
||||
*/
|
||||
exports.DeleteCacheEntryResponse = new DeleteCacheEntryResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListCacheEntriesRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListCacheEntriesRequest", [
|
||||
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
|
||||
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { key: "", restoreKeys: [] };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
|
||||
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
|
||||
break;
|
||||
case /* string key */ 2:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
case /* repeated string restore_keys */ 3:
|
||||
message.restoreKeys.push(reader.string());
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
|
||||
if (message.metadata)
|
||||
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* string key = 2; */
|
||||
if (message.key !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||
/* repeated string restore_keys = 3; */
|
||||
for (let i = 0; i < message.restoreKeys.length; i++)
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesRequest
|
||||
*/
|
||||
exports.ListCacheEntriesRequest = new ListCacheEntriesRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListCacheEntriesResponse", [
|
||||
{ no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => cacheentry_1.CacheEntry }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { entries: [] };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* repeated github.actions.results.entities.v1.CacheEntry entries */ 1:
|
||||
message.entries.push(cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options));
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* repeated github.actions.results.entities.v1.CacheEntry entries = 1; */
|
||||
for (let i = 0; i < message.entries.length; i++)
|
||||
cacheentry_1.CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse
|
||||
*/
|
||||
exports.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class LookupCacheEntryRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.LookupCacheEntryRequest", [
|
||||
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
|
||||
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { key: "", restoreKeys: [], version: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
|
||||
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
|
||||
break;
|
||||
case /* string key */ 2:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
case /* repeated string restore_keys */ 3:
|
||||
message.restoreKeys.push(reader.string());
|
||||
break;
|
||||
case /* string version */ 4:
|
||||
message.version = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
|
||||
if (message.metadata)
|
||||
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* string key = 2; */
|
||||
if (message.key !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||
/* repeated string restore_keys = 3; */
|
||||
for (let i = 0; i < message.restoreKeys.length; i++)
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]);
|
||||
/* string version = 4; */
|
||||
if (message.version !== "")
|
||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryRequest
|
||||
*/
|
||||
exports.LookupCacheEntryRequest = new LookupCacheEntryRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.LookupCacheEntryResponse", [
|
||||
{ no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "entry", kind: "message", T: () => cacheentry_1.CacheEntry }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { exists: false };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool exists */ 1:
|
||||
message.exists = reader.bool();
|
||||
break;
|
||||
case /* github.actions.results.entities.v1.CacheEntry entry */ 2:
|
||||
message.entry = cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options, message.entry);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool exists = 1; */
|
||||
if (message.exists !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.exists);
|
||||
/* github.actions.results.entities.v1.CacheEntry entry = 2; */
|
||||
if (message.entry)
|
||||
cacheentry_1.CacheEntry.internalBinaryWrite(message.entry, writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
||||
*/
|
||||
exports.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type();
|
||||
/**
|
||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
|
||||
*/
|
||||
exports.CacheService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.CacheService", [
|
||||
{ name: "CreateCacheEntry", options: {}, I: exports.CreateCacheEntryRequest, O: exports.CreateCacheEntryResponse },
|
||||
{ name: "FinalizeCacheEntryUpload", options: {}, I: exports.FinalizeCacheEntryUploadRequest, O: exports.FinalizeCacheEntryUploadResponse },
|
||||
{ name: "GetCacheEntryDownloadURL", options: {}, I: exports.GetCacheEntryDownloadURLRequest, O: exports.GetCacheEntryDownloadURLResponse },
|
||||
{ name: "DeleteCacheEntry", options: {}, I: exports.DeleteCacheEntryRequest, O: exports.DeleteCacheEntryResponse },
|
||||
{ name: "ListCacheEntries", options: {}, I: exports.ListCacheEntriesRequest, O: exports.ListCacheEntriesResponse },
|
||||
{ name: "LookupCacheEntry", options: {}, I: exports.LookupCacheEntryRequest, O: exports.LookupCacheEntryResponse }
|
||||
]);
|
||||
//# sourceMappingURL=cache.js.map
|
||||
1
node_modules/@actions/cache/lib/generated/results/api/v1/cache.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/generated/results/api/v1/cache.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
53
node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.d.ts
generated
vendored
Normal file
53
node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.d.ts
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
/// <reference types="node" />
|
||||
import { TwirpContext, TwirpServer } from "twirp-ts";
|
||||
import { CreateCacheEntryRequest, CreateCacheEntryResponse, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest, GetCacheEntryDownloadURLResponse, DeleteCacheEntryRequest, DeleteCacheEntryResponse, ListCacheEntriesRequest, ListCacheEntriesResponse, LookupCacheEntryRequest, LookupCacheEntryResponse } from "./cache";
|
||||
interface Rpc {
|
||||
request(service: string, method: string, contentType: "application/json" | "application/protobuf", data: object | Uint8Array): Promise<object | Uint8Array>;
|
||||
}
|
||||
export interface CacheServiceClient {
|
||||
CreateCacheEntry(request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
|
||||
FinalizeCacheEntryUpload(request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
|
||||
GetCacheEntryDownloadURL(request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
|
||||
DeleteCacheEntry(request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
|
||||
ListCacheEntries(request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
|
||||
LookupCacheEntry(request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
|
||||
}
|
||||
export declare class CacheServiceClientJSON implements CacheServiceClient {
|
||||
private readonly rpc;
|
||||
constructor(rpc: Rpc);
|
||||
CreateCacheEntry(request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
|
||||
FinalizeCacheEntryUpload(request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
|
||||
GetCacheEntryDownloadURL(request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
|
||||
DeleteCacheEntry(request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
|
||||
ListCacheEntries(request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
|
||||
LookupCacheEntry(request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
|
||||
}
|
||||
export declare class CacheServiceClientProtobuf implements CacheServiceClient {
|
||||
private readonly rpc;
|
||||
constructor(rpc: Rpc);
|
||||
CreateCacheEntry(request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
|
||||
FinalizeCacheEntryUpload(request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
|
||||
GetCacheEntryDownloadURL(request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
|
||||
DeleteCacheEntry(request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
|
||||
ListCacheEntries(request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
|
||||
LookupCacheEntry(request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
|
||||
}
|
||||
export interface CacheServiceTwirp<T extends TwirpContext = TwirpContext> {
|
||||
CreateCacheEntry(ctx: T, request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
|
||||
FinalizeCacheEntryUpload(ctx: T, request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
|
||||
GetCacheEntryDownloadURL(ctx: T, request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
|
||||
DeleteCacheEntry(ctx: T, request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
|
||||
ListCacheEntries(ctx: T, request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
|
||||
LookupCacheEntry(ctx: T, request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
|
||||
}
|
||||
export declare enum CacheServiceMethod {
|
||||
CreateCacheEntry = "CreateCacheEntry",
|
||||
FinalizeCacheEntryUpload = "FinalizeCacheEntryUpload",
|
||||
GetCacheEntryDownloadURL = "GetCacheEntryDownloadURL",
|
||||
DeleteCacheEntry = "DeleteCacheEntry",
|
||||
ListCacheEntries = "ListCacheEntries",
|
||||
LookupCacheEntry = "LookupCacheEntry"
|
||||
}
|
||||
export declare const CacheServiceMethodList: CacheServiceMethod[];
|
||||
export declare function createCacheServiceServer<T extends TwirpContext = TwirpContext>(service: CacheServiceTwirp<T>): TwirpServer<CacheServiceTwirp<TwirpContext<import("http").IncomingMessage, import("http").ServerResponse<import("http").IncomingMessage>>>, T>;
|
||||
export {};
|
||||
602
node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.js
generated
vendored
Normal file
602
node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.js
generated
vendored
Normal file
@@ -0,0 +1,602 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createCacheServiceServer = exports.CacheServiceMethodList = exports.CacheServiceMethod = exports.CacheServiceClientProtobuf = exports.CacheServiceClientJSON = void 0;
|
||||
const twirp_ts_1 = require("twirp-ts");
|
||||
const cache_1 = require("./cache");
|
||||
class CacheServiceClientJSON {
|
||||
constructor(rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateCacheEntry.bind(this);
|
||||
this.FinalizeCacheEntryUpload.bind(this);
|
||||
this.GetCacheEntryDownloadURL.bind(this);
|
||||
this.DeleteCacheEntry.bind(this);
|
||||
this.ListCacheEntries.bind(this);
|
||||
this.LookupCacheEntry.bind(this);
|
||||
}
|
||||
CreateCacheEntry(request) {
|
||||
const data = cache_1.CreateCacheEntryRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/json", data);
|
||||
return promise.then((data) => cache_1.CreateCacheEntryResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
FinalizeCacheEntryUpload(request) {
|
||||
const data = cache_1.FinalizeCacheEntryUploadRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/json", data);
|
||||
return promise.then((data) => cache_1.FinalizeCacheEntryUploadResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
GetCacheEntryDownloadURL(request) {
|
||||
const data = cache_1.GetCacheEntryDownloadURLRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/json", data);
|
||||
return promise.then((data) => cache_1.GetCacheEntryDownloadURLResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
DeleteCacheEntry(request) {
|
||||
const data = cache_1.DeleteCacheEntryRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/json", data);
|
||||
return promise.then((data) => cache_1.DeleteCacheEntryResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
ListCacheEntries(request) {
|
||||
const data = cache_1.ListCacheEntriesRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/json", data);
|
||||
return promise.then((data) => cache_1.ListCacheEntriesResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
LookupCacheEntry(request) {
|
||||
const data = cache_1.LookupCacheEntryRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/json", data);
|
||||
return promise.then((data) => cache_1.LookupCacheEntryResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
}
|
||||
exports.CacheServiceClientJSON = CacheServiceClientJSON;
|
||||
class CacheServiceClientProtobuf {
|
||||
constructor(rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateCacheEntry.bind(this);
|
||||
this.FinalizeCacheEntryUpload.bind(this);
|
||||
this.GetCacheEntryDownloadURL.bind(this);
|
||||
this.DeleteCacheEntry.bind(this);
|
||||
this.ListCacheEntries.bind(this);
|
||||
this.LookupCacheEntry.bind(this);
|
||||
}
|
||||
CreateCacheEntry(request) {
|
||||
const data = cache_1.CreateCacheEntryRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/protobuf", data);
|
||||
return promise.then((data) => cache_1.CreateCacheEntryResponse.fromBinary(data));
|
||||
}
|
||||
FinalizeCacheEntryUpload(request) {
|
||||
const data = cache_1.FinalizeCacheEntryUploadRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/protobuf", data);
|
||||
return promise.then((data) => cache_1.FinalizeCacheEntryUploadResponse.fromBinary(data));
|
||||
}
|
||||
GetCacheEntryDownloadURL(request) {
|
||||
const data = cache_1.GetCacheEntryDownloadURLRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/protobuf", data);
|
||||
return promise.then((data) => cache_1.GetCacheEntryDownloadURLResponse.fromBinary(data));
|
||||
}
|
||||
DeleteCacheEntry(request) {
|
||||
const data = cache_1.DeleteCacheEntryRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/protobuf", data);
|
||||
return promise.then((data) => cache_1.DeleteCacheEntryResponse.fromBinary(data));
|
||||
}
|
||||
ListCacheEntries(request) {
|
||||
const data = cache_1.ListCacheEntriesRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/protobuf", data);
|
||||
return promise.then((data) => cache_1.ListCacheEntriesResponse.fromBinary(data));
|
||||
}
|
||||
LookupCacheEntry(request) {
|
||||
const data = cache_1.LookupCacheEntryRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/protobuf", data);
|
||||
return promise.then((data) => cache_1.LookupCacheEntryResponse.fromBinary(data));
|
||||
}
|
||||
}
|
||||
exports.CacheServiceClientProtobuf = CacheServiceClientProtobuf;
|
||||
var CacheServiceMethod;
|
||||
(function (CacheServiceMethod) {
|
||||
CacheServiceMethod["CreateCacheEntry"] = "CreateCacheEntry";
|
||||
CacheServiceMethod["FinalizeCacheEntryUpload"] = "FinalizeCacheEntryUpload";
|
||||
CacheServiceMethod["GetCacheEntryDownloadURL"] = "GetCacheEntryDownloadURL";
|
||||
CacheServiceMethod["DeleteCacheEntry"] = "DeleteCacheEntry";
|
||||
CacheServiceMethod["ListCacheEntries"] = "ListCacheEntries";
|
||||
CacheServiceMethod["LookupCacheEntry"] = "LookupCacheEntry";
|
||||
})(CacheServiceMethod || (exports.CacheServiceMethod = CacheServiceMethod = {}));
|
||||
exports.CacheServiceMethodList = [
|
||||
CacheServiceMethod.CreateCacheEntry,
|
||||
CacheServiceMethod.FinalizeCacheEntryUpload,
|
||||
CacheServiceMethod.GetCacheEntryDownloadURL,
|
||||
CacheServiceMethod.DeleteCacheEntry,
|
||||
CacheServiceMethod.ListCacheEntries,
|
||||
CacheServiceMethod.LookupCacheEntry,
|
||||
];
|
||||
function createCacheServiceServer(service) {
|
||||
return new twirp_ts_1.TwirpServer({
|
||||
service,
|
||||
packageName: "github.actions.results.api.v1",
|
||||
serviceName: "CacheService",
|
||||
methodList: exports.CacheServiceMethodList,
|
||||
matchRoute: matchCacheServiceRoute,
|
||||
});
|
||||
}
|
||||
exports.createCacheServiceServer = createCacheServiceServer;
|
||||
function matchCacheServiceRoute(method, events) {
|
||||
switch (method) {
|
||||
case "CreateCacheEntry":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "CreateCacheEntry" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "FinalizeCacheEntryUpload":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "FinalizeCacheEntryUpload" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "GetCacheEntryDownloadURL":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "GetCacheEntryDownloadURL" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "DeleteCacheEntry":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "DeleteCacheEntry" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "ListCacheEntries":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "ListCacheEntries" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "LookupCacheEntry":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "LookupCacheEntry" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
default:
|
||||
events.onNotFound();
|
||||
const msg = `no handler found`;
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = cache_1.CreateCacheEntryRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.CreateCacheEntry(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.CreateCacheEntry(ctx, request);
|
||||
}
|
||||
return JSON.stringify(cache_1.CreateCacheEntryResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = cache_1.FinalizeCacheEntryUploadRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.FinalizeCacheEntryUpload(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.FinalizeCacheEntryUpload(ctx, request);
|
||||
}
|
||||
return JSON.stringify(cache_1.FinalizeCacheEntryUploadResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = cache_1.GetCacheEntryDownloadURLRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.GetCacheEntryDownloadURL(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.GetCacheEntryDownloadURL(ctx, request);
|
||||
}
|
||||
return JSON.stringify(cache_1.GetCacheEntryDownloadURLResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = cache_1.DeleteCacheEntryRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.DeleteCacheEntry(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.DeleteCacheEntry(ctx, request);
|
||||
}
|
||||
return JSON.stringify(cache_1.DeleteCacheEntryResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = cache_1.ListCacheEntriesRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.ListCacheEntries(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.ListCacheEntries(ctx, request);
|
||||
}
|
||||
return JSON.stringify(cache_1.ListCacheEntriesResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = cache_1.LookupCacheEntryRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.LookupCacheEntry(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.LookupCacheEntry(ctx, request);
|
||||
}
|
||||
return JSON.stringify(cache_1.LookupCacheEntryResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = cache_1.CreateCacheEntryRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.CreateCacheEntry(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.CreateCacheEntry(ctx, request);
|
||||
}
|
||||
return Buffer.from(cache_1.CreateCacheEntryResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = cache_1.FinalizeCacheEntryUploadRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.FinalizeCacheEntryUpload(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.FinalizeCacheEntryUpload(ctx, request);
|
||||
}
|
||||
return Buffer.from(cache_1.FinalizeCacheEntryUploadResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = cache_1.GetCacheEntryDownloadURLRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.GetCacheEntryDownloadURL(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.GetCacheEntryDownloadURL(ctx, request);
|
||||
}
|
||||
return Buffer.from(cache_1.GetCacheEntryDownloadURLResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = cache_1.DeleteCacheEntryRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.DeleteCacheEntry(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.DeleteCacheEntry(ctx, request);
|
||||
}
|
||||
return Buffer.from(cache_1.DeleteCacheEntryResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = cache_1.ListCacheEntriesRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.ListCacheEntries(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.ListCacheEntries(ctx, request);
|
||||
}
|
||||
return Buffer.from(cache_1.ListCacheEntriesResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = cache_1.LookupCacheEntryRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.LookupCacheEntry(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.LookupCacheEntry(ctx, request);
|
||||
}
|
||||
return Buffer.from(cache_1.LookupCacheEntryResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=cache.twirp.js.map
|
||||
1
node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
71
node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.d.ts
generated
vendored
Normal file
71
node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.d.ts
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
import { Timestamp } from "../../../google/protobuf/timestamp";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.entities.v1.CacheEntry
|
||||
*/
|
||||
export interface CacheEntry {
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 1;
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* SHA256 hex digest of the cache archive
|
||||
*
|
||||
* @generated from protobuf field: string hash = 2;
|
||||
*/
|
||||
hash: string;
|
||||
/**
|
||||
* Cache entry size in bytes
|
||||
*
|
||||
* @generated from protobuf field: int64 size_bytes = 3;
|
||||
*/
|
||||
sizeBytes: string;
|
||||
/**
|
||||
* Access scope
|
||||
*
|
||||
* @generated from protobuf field: string scope = 4;
|
||||
*/
|
||||
scope: string;
|
||||
/**
|
||||
* Version SHA256 hex digest
|
||||
*
|
||||
* @generated from protobuf field: string version = 5;
|
||||
*/
|
||||
version: string;
|
||||
/**
|
||||
* When the cache entry was created
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
||||
*/
|
||||
createdAt?: Timestamp;
|
||||
/**
|
||||
* When the cache entry was last accessed
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7;
|
||||
*/
|
||||
lastAccessedAt?: Timestamp;
|
||||
/**
|
||||
* When the cache entry is set to expire
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 8;
|
||||
*/
|
||||
expiresAt?: Timestamp;
|
||||
}
|
||||
declare class CacheEntry$Type extends MessageType<CacheEntry> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<CacheEntry>): CacheEntry;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheEntry): CacheEntry;
|
||||
internalBinaryWrite(message: CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry
|
||||
*/
|
||||
export declare const CacheEntry: CacheEntry$Type;
|
||||
export {};
|
||||
106
node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.js
generated
vendored
Normal file
106
node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.js
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CacheEntry = void 0;
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const timestamp_1 = require("../../../google/protobuf/timestamp");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CacheEntry$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.entities.v1.CacheEntry", [
|
||||
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string key */ 1:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
case /* string hash */ 2:
|
||||
message.hash = reader.string();
|
||||
break;
|
||||
case /* int64 size_bytes */ 3:
|
||||
message.sizeBytes = reader.int64().toString();
|
||||
break;
|
||||
case /* string scope */ 4:
|
||||
message.scope = reader.string();
|
||||
break;
|
||||
case /* string version */ 5:
|
||||
message.version = reader.string();
|
||||
break;
|
||||
case /* google.protobuf.Timestamp created_at */ 6:
|
||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||
break;
|
||||
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
||||
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
||||
break;
|
||||
case /* google.protobuf.Timestamp expires_at */ 8:
|
||||
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string key = 1; */
|
||||
if (message.key !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||
/* string hash = 2; */
|
||||
if (message.hash !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
|
||||
/* int64 size_bytes = 3; */
|
||||
if (message.sizeBytes !== "0")
|
||||
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
||||
/* string scope = 4; */
|
||||
if (message.scope !== "")
|
||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
|
||||
/* string version = 5; */
|
||||
if (message.version !== "")
|
||||
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
|
||||
/* google.protobuf.Timestamp created_at = 6; */
|
||||
if (message.createdAt)
|
||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
||||
if (message.lastAccessedAt)
|
||||
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* google.protobuf.Timestamp expires_at = 8; */
|
||||
if (message.expiresAt)
|
||||
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry
|
||||
*/
|
||||
exports.CacheEntry = new CacheEntry$Type();
|
||||
//# sourceMappingURL=cacheentry.js.map
|
||||
1
node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"cacheentry.js","sourceRoot":"","sources":["../../../../../src/generated/results/entities/v1/cacheentry.ts"],"names":[],"mappings":";;;AAKA,kDAAgD;AAGhD,kDAA2D;AAE3D,kDAA8D;AAC9D,kDAAoD;AACpD,kDAAmD;AACnD,kEAA+D;AAsD/D,2FAA2F;AAC3F,MAAM,eAAgB,SAAQ,qBAAuB;IACjD;QACI,KAAK,CAAC,+CAA+C,EAAE;YACnD,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YAClE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YACnE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,oBAAoB,EAAE;YACxE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YACpE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YACtE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,qBAAS,EAAE;YAClE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,kBAAkB,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,qBAAS,EAAE;YACxE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,qBAAS,EAAE;SACrE,CAAC,CAAC;IACP,CAAC;IACD,MAAM,CAAC,KAAkC;QACrC,MAAM,OAAO,GAAG,EAAE,GAAG,EAAE,EAAE,EAAE,IAAI,EAAE,EAAE,EAAE,SAAS,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE,EAAE,OAAO,EAAE,EAAE,EAAE,CAAC;QAC9E,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,sBAAY,EAAE,EAAE,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5F,IAAI,KAAK,KAAK,SAAS;YACnB,IAAA,gCAAsB,EAAa,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC;QAC7D,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,kBAAkB,CAAC,MAAqB,EAAE,MAAc,EAAE,OAA0B,EAAE,MAAmB;QACrG,IAAI,OAAO,GAAG,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,IAAI,CAAC,MAAM,EAAE,EAAE,GAAG,GAAG,MAAM,CAAC,GAAG,GAAG,MAAM,CAAC;QACjE,OAAO,MAAM,CAAC,GAAG,GAAG,GAAG,EAAE;YACrB,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,GAAG,MAAM,CAAC,GAAG,EAAE,CAAC;YACvC,QAAQ,OAAO,EAAE;gBACb,KAAK,gBAAgB,CAAC,CAAC;oBACnB,OAAO,CAAC,GAAG,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAC9B,MAAM;gBACV,KAAK,iBAAiB,CAAC,CAAC;oBACpB,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAC/B,MAAM;gBACV,KAAK,sBAAsB,CAAC,CAAC;oBACzB,OAAO,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC;oBAC9C,MAAM;gBACV,KAAK,kBAAkB,CAAC,CAAC;oBACrB,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAChC,MAAM;gBACV,KAAK,oBAAoB,CAAC,CAAC;oBACvB,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAClC,MAAM;gBACV,KAAK,0CAA0C,CAAC,CAAC;oBAC7C,OAAO,CAAC,SAAS,GAAG,qBAAS,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;oBACtG,MAAM;gBACV,KAAK,gDAAgD,CAAC,CAAC;oBACnD,OAAO,CAAC,cAAc,GAAG,qBAAS,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,OAAO,EAAE,OAAO,CAAC,cAAc,CAAC,CAAC;oBAChH,MAAM;gBACV,KAAK,0CAA0C,CAAC,CAAC;oBAC7C,OAAO,CAAC,SAAS,GAAG,qBAAS,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;oBACtG,MAAM;gBACV;oBACI,IAAI,CAAC,GAAG,OAAO,CAAC,gBAAgB,CAAC;oBACjC,IAAI,CAAC,KAAK,OAAO;wBACb,MAAM,IAAI,UAAU,CAAC,KAAK,CAAC,iBAAiB,OAAO,eAAe,QAAQ,SAAS,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;oBACxG,IAAI,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC9B,IAAI,CAAC,KAAK,KAAK;wBACX,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;aACvG;SACJ;QACD,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,mBAAmB,CAAC,OAAmB,EAAE,MAAqB,EAAE,OAA2B;QACvF,qBAAqB;QACrB,IAAI,OAAO,CAAC,GAAG,KAAK,EAAE;YAClB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAChE,sBAAsB;QACtB,IAAI,OAAO,CAAC,IAAI,KAAK,EAAE;YACnB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACjE,2BAA2B;QAC3B,IAAI,OAAO,CAAC,SAAS,KAAK,GAAG;YACzB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;QAC5D,uBAAuB;QACvB,IAAI,OAAO,CAAC,KAAK,KAAK,EAAE;YACpB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QAClE,yBAAyB;QACzB,IAAI,OAAO,CAAC,OAAO,KAAK,EAAE;YACtB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QACpE,+CAA+C;QAC/C,IAAI,OAAO,CAAC,SAAS;YACjB,qBAAS,CAAC,mBAAmB,CAAC,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;QACrH,qDAAqD;QACrD,IAAI,OAAO,CAAC,cAAc;YACtB,qBAAS,CAAC,mBAAmB,CAAC,OAAO,CAAC,cAAc,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;QAC1H,+CAA+C;QAC/C,IAAI,OAAO,CAAC,SAAS;YACjB,qBAAS,CAAC,mBAAmB,CAAC,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;QACrH,IAAI,CAAC,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACnC,IAAI,CAAC,KAAK,KAAK;YACX,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QAClF,OAAO,MAAM,CAAC;IAClB,CAAC;CACJ;AACD;;GAEG;AACU,QAAA,UAAU,GAAG,IAAI,eAAe,EAAE,CAAC"}
|
||||
35
node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.d.ts
generated
vendored
Normal file
35
node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.d.ts
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
import { CacheScope } from "./cachescope";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.entities.v1.CacheMetadata
|
||||
*/
|
||||
export interface CacheMetadata {
|
||||
/**
|
||||
* Backend repository id
|
||||
*
|
||||
* @generated from protobuf field: int64 repository_id = 1;
|
||||
*/
|
||||
repositoryId: string;
|
||||
/**
|
||||
* Scopes for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: repeated github.actions.results.entities.v1.CacheScope scope = 2;
|
||||
*/
|
||||
scope: CacheScope[];
|
||||
}
|
||||
declare class CacheMetadata$Type extends MessageType<CacheMetadata> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<CacheMetadata>): CacheMetadata;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheMetadata): CacheMetadata;
|
||||
internalBinaryWrite(message: CacheMetadata, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheMetadata
|
||||
*/
|
||||
export declare const CacheMetadata: CacheMetadata$Type;
|
||||
export {};
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user