mirror of
https://github.com/github/codeql-action.git
synced 2025-12-11 10:14:33 +08:00
Compare commits
153 Commits
readme_min
...
fix-tslint
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0f9b63c0d7 | ||
|
|
74f864bee1 | ||
|
|
38c231113e | ||
|
|
34c941dc31 | ||
|
|
5eccb79587 | ||
|
|
11a9af0387 | ||
|
|
6d036cef6f | ||
|
|
f9768ac4ba | ||
|
|
3ff198f23b | ||
|
|
ff8fe44e0c | ||
|
|
3f2a60be8a | ||
|
|
4c6749115a | ||
|
|
608ed15968 | ||
|
|
14f179f70b | ||
|
|
dc4009c7ed | ||
|
|
14d602cced | ||
|
|
24096a1cb3 | ||
|
|
74d434c5ca | ||
|
|
fff3de9938 | ||
|
|
1aae76b906 | ||
|
|
013c02758e | ||
|
|
0b53ebbc36 | ||
|
|
6de3e1cde4 | ||
|
|
c9d0312cb7 | ||
|
|
0cdf645694 | ||
|
|
d00417a341 | ||
|
|
7928587bdf | ||
|
|
87ecd0d0cc | ||
|
|
7e2e297e07 | ||
|
|
b97097aaed | ||
|
|
8a8a49d3c5 | ||
|
|
fcb696ec59 | ||
|
|
c2d2dfdcdd | ||
|
|
042ab541fd | ||
|
|
19faafba94 | ||
|
|
476c8a44ba | ||
|
|
f9ef310b75 | ||
|
|
6bd7f17e0e | ||
|
|
582fd14a81 | ||
|
|
8425341ae0 | ||
|
|
1f2cca021a | ||
|
|
fa9e0ac2a6 | ||
|
|
a9de5b50d7 | ||
|
|
af4edf6546 | ||
|
|
5a97f7e980 | ||
|
|
d4fb7fc762 | ||
|
|
00ebedc522 | ||
|
|
840dc5ee9a | ||
|
|
c1add46efa | ||
|
|
e35c90f53d | ||
|
|
6db8182349 | ||
|
|
202704856d | ||
|
|
5ea736059a | ||
|
|
b4610ac367 | ||
|
|
a0d60d5d9e | ||
|
|
f18fffbea8 | ||
|
|
655c4497ce | ||
|
|
d7a2025f2d | ||
|
|
22501fd7c8 | ||
|
|
07e22b1f4a | ||
|
|
3c2191ffdd | ||
|
|
28abced8ca | ||
|
|
50dcaaf00d | ||
|
|
30f7117e6a | ||
|
|
28a878efc3 | ||
|
|
d518039a6b | ||
|
|
855f965205 | ||
|
|
2909e97a32 | ||
|
|
4997c3ff4d | ||
|
|
0bd4da3a6c | ||
|
|
98ad2fc49d | ||
|
|
3ca3147cd4 | ||
|
|
96da037d49 | ||
|
|
da1e237d1e | ||
|
|
054f867322 | ||
|
|
1e600686e7 | ||
|
|
cd1625a162 | ||
|
|
8788e5aa59 | ||
|
|
8fb9090674 | ||
|
|
10a2fd615f | ||
|
|
8b71cf3e5f | ||
|
|
ae301902e1 | ||
|
|
ddee374101 | ||
|
|
080dc8c3f0 | ||
|
|
6d1f969b1c | ||
|
|
ff40939f66 | ||
|
|
7b32c3c950 | ||
|
|
90c07ef21d | ||
|
|
852b9186d6 | ||
|
|
63f52e71c0 | ||
|
|
3a883af8a6 | ||
|
|
886b7d3e6e | ||
|
|
4e12efc7c3 | ||
|
|
5c5f422edb | ||
|
|
97ef91227e | ||
|
|
25e5256866 | ||
|
|
5ec6b7524f | ||
|
|
b366432cb3 | ||
|
|
fa0a733046 | ||
|
|
0e6df42024 | ||
|
|
58c1abf92e | ||
|
|
6507fba7ec | ||
|
|
aa54af7018 | ||
|
|
74c9991849 | ||
|
|
f49335fc3b | ||
|
|
d7b9f5a097 | ||
|
|
572c8bbc0c | ||
|
|
0347b72305 | ||
|
|
27cc8b23fe | ||
|
|
584df475ca | ||
|
|
cd95d34497 | ||
|
|
88c1b7fb89 | ||
|
|
51b42fcf78 | ||
|
|
015ead73d9 | ||
|
|
c351304778 | ||
|
|
96901ac7d8 | ||
|
|
cc471c2014 | ||
|
|
c88fb695ab | ||
|
|
ec4d38a9a5 | ||
|
|
15bd158ded | ||
|
|
256c63a715 | ||
|
|
a76042ab4a | ||
|
|
1477a43cc8 | ||
|
|
f17ebc80bd | ||
|
|
c0d9de18c0 | ||
|
|
52cd1f2261 | ||
|
|
3455736978 | ||
|
|
f668f5fc74 | ||
|
|
3aa3d6a2b6 | ||
|
|
538cbdd614 | ||
|
|
49575f87c4 | ||
|
|
5a800ccbfa | ||
|
|
cc2c18d6a8 | ||
|
|
4c11b3d9bf | ||
|
|
a511aca9f1 | ||
|
|
c3847056c5 | ||
|
|
189a899282 | ||
|
|
c5ecb82753 | ||
|
|
4dc964d906 | ||
|
|
dc27ff90bd | ||
|
|
cf266cbf27 | ||
|
|
1f29db50bb | ||
|
|
c979850d28 | ||
|
|
baa9c9e0df | ||
|
|
d966ea2f52 | ||
|
|
6bab450a9a | ||
|
|
583f8a923c | ||
|
|
ab918b676b | ||
|
|
290b34d5df | ||
|
|
dcd81b5847 | ||
|
|
d90fca396a | ||
|
|
546d5a8843 | ||
|
|
43de3a9949 |
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Contact GitHub Support
|
||||
url: https://support.github.com/contact?subject=Code+Scanning+Beta+Support&tags=code-scanning-support
|
||||
about: Contact Support about code scanning
|
||||
11
.github/codeql/codeql-config.yml
vendored
11
.github/codeql/codeql-config.yml
vendored
@@ -1,4 +1,13 @@
|
||||
me: "CodeQL config"
|
||||
name: "CodeQL config"
|
||||
queries:
|
||||
- name: Run custom queries
|
||||
uses: ./queries
|
||||
# Run all extra query suites, both because we want to
|
||||
# and because it'll act as extra testing. This is why
|
||||
# we include both even though one is a superset of the
|
||||
# other, because we're testing the parsing logic and
|
||||
# that the suites exist in the codeql bundle.
|
||||
- uses: security-extended
|
||||
- uses: security-and-quality
|
||||
paths-ignore:
|
||||
- tests
|
||||
178
.github/update-release-branch.py
vendored
Normal file
178
.github/update-release-branch.py
vendored
Normal file
@@ -0,0 +1,178 @@
|
||||
import datetime
|
||||
from github import Github
|
||||
import random
|
||||
import requests
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# The branch being merged from.
|
||||
# This is the one that contains day-to-day development work.
|
||||
MASTER_BRANCH = 'master'
|
||||
# The branch being merged into.
|
||||
# This is the release branch that users reference.
|
||||
LATEST_RELEASE_BRANCH = 'v1'
|
||||
# Name of the remote
|
||||
ORIGIN = 'origin'
|
||||
|
||||
# Runs git with the given args and returns the stdout.
|
||||
# Raises an error if git does not exit successfully.
|
||||
def run_git(*args):
|
||||
cmd = ['git', *args]
|
||||
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if (p.returncode != 0):
|
||||
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
|
||||
return p.stdout.decode('ascii')
|
||||
|
||||
# Returns true if the given branch exists on the origin remote
|
||||
def branch_exists_on_remote(branch_name):
|
||||
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
||||
|
||||
# Opens a PR from the given branch to the release branch
|
||||
def open_pr(repo, all_commits, short_master_sha, branch_name):
|
||||
# Sort the commits into the pull requests that introduced them,
|
||||
# and any commits that don't have a pull request
|
||||
pull_requests = []
|
||||
commits_without_pull_requests = []
|
||||
for commit in all_commits:
|
||||
pr = get_pr_for_commit(repo, commit)
|
||||
|
||||
if pr is None:
|
||||
commits_without_pull_requests.append(commit)
|
||||
elif not any(p for p in pull_requests if p.number == pr.number):
|
||||
pull_requests.append(pr)
|
||||
|
||||
print('Found ' + str(len(pull_requests)) + ' pull requests')
|
||||
print('Found ' + str(len(commits_without_pull_requests)) + ' commits not in a pull request')
|
||||
|
||||
# Sort PRs and commits by age
|
||||
sorted(pull_requests, key=lambda pr: pr.number)
|
||||
sorted(commits_without_pull_requests, key=lambda c: c.commit.author.date)
|
||||
|
||||
# Start constructing the body text
|
||||
body = 'Merging ' + short_master_sha + ' into ' + LATEST_RELEASE_BRANCH
|
||||
|
||||
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
|
||||
body += '\n\nConductor for this PR is @' + conductor
|
||||
|
||||
# List all PRs merged
|
||||
if len(pull_requests) > 0:
|
||||
body += '\n\nContains the following pull requests:'
|
||||
for pr in pull_requests:
|
||||
merger = get_merger_of_pr(repo, pr)
|
||||
body += '\n- #' + str(pr.number)
|
||||
body += ' - ' + pr.title
|
||||
body += ' (@' + merger + ')'
|
||||
|
||||
# List all commits not part of a PR
|
||||
if len(commits_without_pull_requests) > 0:
|
||||
body += '\n\nContains the following commits not from a pull request:'
|
||||
for commit in commits_without_pull_requests:
|
||||
body += '\n- ' + commit.sha
|
||||
body += ' - ' + get_truncated_commit_message(commit)
|
||||
body += ' (@' + commit.author.login + ')'
|
||||
|
||||
title = 'Merge ' + MASTER_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
|
||||
|
||||
# Create the pull request
|
||||
pr = repo.create_pull(title=title, body=body, head=branch_name, base=LATEST_RELEASE_BRANCH)
|
||||
print('Created PR #' + str(pr.number))
|
||||
|
||||
# Assign the conductor
|
||||
pr.add_to_assignees(conductor)
|
||||
print('Assigned PR to ' + conductor)
|
||||
|
||||
# Gets the person who should be in charge of the mergeback PR
|
||||
def get_conductor(repo, pull_requests, other_commits):
|
||||
# If there are any PRs then use whoever merged the last one
|
||||
if len(pull_requests) > 0:
|
||||
return get_merger_of_pr(repo, pull_requests[-1])
|
||||
|
||||
# Otherwise take the author of the latest commit
|
||||
return other_commits[-1].author.login
|
||||
|
||||
# Gets a list of the SHAs of all commits that have happened on master
|
||||
# since the release branched off.
|
||||
# This will not include any commits that exist on the release branch
|
||||
# that aren't on master.
|
||||
def get_commit_difference(repo):
|
||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '...' + MASTER_BRANCH).strip().split('\n')
|
||||
|
||||
# Convert to full-fledged commit objects
|
||||
commits = [repo.get_commit(c) for c in commits]
|
||||
|
||||
# Filter out merge commits for PRs
|
||||
return list(filter(lambda c: not is_pr_merge_commit(c), commits))
|
||||
|
||||
# Is the given commit the automatic merge commit from when merging a PR
|
||||
def is_pr_merge_commit(commit):
|
||||
return commit.committer.login == 'web-flow' and len(commit.parents) > 1
|
||||
|
||||
# Gets a copy of the commit message that should display nicely
|
||||
def get_truncated_commit_message(commit):
|
||||
message = commit.commit.message.split('\n')[0]
|
||||
if len(message) > 60:
|
||||
return message[:57] + '...'
|
||||
else:
|
||||
return message
|
||||
|
||||
# Converts a commit into the PR that introduced it to the master branch.
|
||||
# Returns the PR object, or None if no PR could be found.
|
||||
def get_pr_for_commit(repo, commit):
|
||||
prs = commit.get_pulls()
|
||||
|
||||
if prs.totalCount > 0:
|
||||
# In the case that there are multiple PRs, return the earliest one
|
||||
prs = list(prs)
|
||||
sorted(prs, key=lambda pr: int(pr.number))
|
||||
return prs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
# Get the person who merged the pull request.
|
||||
# For most cases this will be the same as the author, but for PRs opened
|
||||
# by external contributors getting the merger will get us the GitHub
|
||||
# employee who reviewed and merged the PR.
|
||||
def get_merger_of_pr(repo, pr):
|
||||
return repo.get_commit(pr.merge_commit_sha).author.login
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
raise Exception('Usage: update-release.branch.py <github token> <repository nwo>')
|
||||
github_token = sys.argv[1]
|
||||
repository_nwo = sys.argv[2]
|
||||
|
||||
repo = Github(github_token).get_repo(repository_nwo)
|
||||
|
||||
# Print what we intend to go
|
||||
print('Considering difference between ' + MASTER_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
|
||||
short_master_sha = run_git('rev-parse', '--short', MASTER_BRANCH).strip()
|
||||
print('Current head of ' + MASTER_BRANCH + ' is ' + short_master_sha)
|
||||
|
||||
# See if there are any commits to merge in
|
||||
commits = get_commit_difference(repo)
|
||||
if len(commits) == 0:
|
||||
print('No commits to merge from ' + MASTER_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
|
||||
return
|
||||
|
||||
# The branch name is based off of the name of branch being merged into
|
||||
# and the SHA of the branch being merged from. Thus if the branch already
|
||||
# exists we can assume we don't need to recreate it.
|
||||
new_branch_name = 'update-' + LATEST_RELEASE_BRANCH + '-' + short_master_sha
|
||||
print('Branch name is ' + new_branch_name)
|
||||
|
||||
# Check if the branch already exists. If so we can abort as this script
|
||||
# has already run on this combination of branches.
|
||||
if branch_exists_on_remote(new_branch_name):
|
||||
print('Branch ' + new_branch_name + ' already exists. Nothing to do.')
|
||||
return
|
||||
|
||||
# Create the new branch and push it to the remote
|
||||
print('Creating branch ' + new_branch_name)
|
||||
run_git('checkout', '-b', new_branch_name, MASTER_BRANCH)
|
||||
run_git('push', ORIGIN, new_branch_name)
|
||||
|
||||
# Open a PR to update the branch
|
||||
open_pr(repo, commits, short_master_sha, new_branch_name)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
15
.github/workflows/codeql.yml
vendored
15
.github/workflows/codeql.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: "CodeQL action"
|
||||
|
||||
on: [push]
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -11,7 +11,18 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
with:
|
||||
# Must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head of the pull request.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
- uses: ./init
|
||||
with:
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
languages: javascript
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
- uses: ./analyze
|
||||
|
||||
136
.github/workflows/integration-testing.yml
vendored
136
.github/workflows/integration-testing.yml
vendored
@@ -1,22 +1,126 @@
|
||||
name: "Integration Testing"
|
||||
|
||||
on: [push]
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
dispatch-events:
|
||||
if: github.event.repository.full_name == 'github/codeql-action'
|
||||
multi-language-repo_test-autodetect-languages:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send repository dispatch events
|
||||
run: |
|
||||
curl -X POST \
|
||||
-H "Authorization: Bearer ${{ secrets.CODEQL_TESTING_TOKEN }}" \
|
||||
-H "Accept: application/vnd.github.everest-preview+json" \
|
||||
https://api.github.com/repos/Anthophila/amazon-cognito-js-copy/dispatches \
|
||||
-d '{"event_type":"codeql-integration","client_payload": {"sha": "${{ github.sha }}"}}'
|
||||
|
||||
curl -X POST \
|
||||
-H "Authorization: Bearer ${{ secrets.CODEQL_TESTING_TOKEN }}" \
|
||||
-H "Accept: application/vnd.github.everest-preview+json" \
|
||||
https://api.github.com/repos/Anthophila/electron-test-action/dispatches \
|
||||
-d '{"event_type":"codeql-integration","client_payload": {"sha": "${{ github.sha }}"}}'
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- run: |
|
||||
cd "$CODEQL_ACTION_DATABASE_DIR"
|
||||
# List all directories as there will be precisely one directory per database
|
||||
# but there may be other files in this directory such as query suites.
|
||||
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
||||
[[ ! -d cpp ]] || \
|
||||
[[ ! -d csharp ]] || \
|
||||
[[ ! -d go ]] || \
|
||||
[[ ! -d java ]] || \
|
||||
[[ ! -d javascript ]] || \
|
||||
[[ ! -d python ]]; then
|
||||
echo "Did not find expected number of databases. Database dir contains: $(ls)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
multi-language-repo_test-custom-queries:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
config-file: ./.github/codeql/custom-queries.yml
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
# Currently is not possible to analyze Go in conjunction with other languages in macos
|
||||
multi-language-repo_test-go-custom-queries:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/setup-go@v2
|
||||
if: ${{ matrix.os == 'macos-latest' }}
|
||||
with:
|
||||
go-version: '^1.13.1'
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
config-file: ./.github/codeql/custom-queries.yml
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
|
||||
multi-language-repo_rubocop:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
||||
- name: Install dependencies
|
||||
run: bundle install
|
||||
- name: Rubocop run
|
||||
run: |
|
||||
bash -c "
|
||||
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
||||
[[ $? -ne 2 ]]
|
||||
"
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
sarif_file: rubocop.sarif
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
27
.github/workflows/js-uptodate-check.yml
vendored
27
.github/workflows/js-uptodate-check.yml
vendored
@@ -1,27 +0,0 @@
|
||||
name: "Check generated JavaScript"
|
||||
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
check-js:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Check generated JavaScript
|
||||
run: |
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
# Generate the JavaScript files
|
||||
npm run-script build
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: JavaScript files are up to date"
|
||||
12
.github/workflows/npm-test.yml
vendored
12
.github/workflows/npm-test.yml
vendored
@@ -1,12 +0,0 @@
|
||||
name: "npm run-script test"
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
npm-test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: npm run-script test
|
||||
run: npm run-script test
|
||||
71
.github/workflows/pr-checks.yml
vendored
Normal file
71
.github/workflows/pr-checks.yml
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
name: "PR checks"
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
tslint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: tslint
|
||||
run: npm run-script lint
|
||||
|
||||
check-js:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Check generated JavaScript
|
||||
run: |
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
# Generate the JavaScript files
|
||||
npm run-script build
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||
git status
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: JavaScript files are up to date"
|
||||
|
||||
check-node-modules:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Check node modules up to date
|
||||
run: |
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Reinstall modules and then clean to remove absolute paths
|
||||
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||
npm ci
|
||||
npm run removeNPMAbsolutePaths
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
|
||||
git status
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: node_modules are up to date"
|
||||
|
||||
npm-test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: npm run-script test
|
||||
run: npm run-script test
|
||||
12
.github/workflows/ts-lint.yml
vendored
12
.github/workflows/ts-lint.yml
vendored
@@ -1,12 +0,0 @@
|
||||
name: "TSLint"
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
tslint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: tslint
|
||||
run: npm run-script lint
|
||||
31
.github/workflows/update-release-branch.yml
vendored
Normal file
31
.github/workflows/update-release-branch.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Update release branch
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 9 * * 1
|
||||
repository_dispatch:
|
||||
# Example of how to trigger this:
|
||||
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
|
||||
# Replace <token> with a personal access token from this page: https://github.com/settings/tokens
|
||||
types: [update-release-branch]
|
||||
|
||||
jobs:
|
||||
update:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
# Need full history so we calculate diffs
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.5
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install PyGithub==1.51 requests
|
||||
|
||||
- name: Update release branch
|
||||
run: python .github/update-release-branch.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }}
|
||||
10
.vscode/settings.json
vendored
Normal file
10
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"files.exclude": {
|
||||
// include the defaults from VS Code
|
||||
"**/.git": true,
|
||||
"**/.DS_Store": true,
|
||||
|
||||
// transpiled JavaScript
|
||||
"lib": true,
|
||||
}
|
||||
}
|
||||
99
README.md
99
README.md
@@ -1,6 +1,6 @@
|
||||
# CodeQL Action
|
||||
|
||||
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/semmle/ql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||
|
||||
## License
|
||||
|
||||
@@ -10,6 +10,8 @@ The underlying CodeQL CLI, used in this action, is licensed under the [GitHub Co
|
||||
|
||||
## Usage
|
||||
|
||||
This is a short walkthrough, but for more information read [configuring code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning).
|
||||
|
||||
To get code scanning results from CodeQL analysis on your repo you can use the following workflow as a template:
|
||||
|
||||
```yaml
|
||||
@@ -18,6 +20,7 @@ name: "Code Scanning - Action"
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 0 * * 0'
|
||||
|
||||
@@ -33,6 +36,17 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# Must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head of the pull request.
|
||||
# Only include this option if you are running this workflow on pull requests.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
# Only include this step if you are running this workflow on pull requests.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
@@ -78,24 +92,9 @@ If you prefer to integrate this within an existing CI workflow, it should end up
|
||||
uses: github/codeql-action/analyze@v1
|
||||
```
|
||||
|
||||
### Actions triggers
|
||||
### Configuration file
|
||||
|
||||
The CodeQL action should be run on `push` events, and on a `schedule`. `Push` events allow us to do a detailed analysis of the delta in a pull request, while the `schedule` event ensures that GitHub regularly scans the repository for the latest vulnerabilities, even if the repository becomes inactive. This action does not support the `pull_request` event.
|
||||
|
||||
### Configuration
|
||||
|
||||
You may optionally specify additional queries for CodeQL to execute by using a config file. The queries must belong to a [QL pack](https://help.semmle.com/codeql/codeql-cli/reference/qlpack-overview.html) and can be in your repository or any public repository. You can choose a single .ql file, a folder containing multiple .ql files, a .qls [query suite](https://help.semmle.com/codeql/codeql-cli/procedures/query-suites.html) file, or any combination of the above. To use queries from other repositories use the same syntax as when [using an action](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepsuses).
|
||||
|
||||
You can disable the default queries using `disable-default-queries: true`.
|
||||
|
||||
You can choose to ignore some files or folders from the analysis, or include additional files/folders for analysis. This *only* works for Javascript and Python analysis.
|
||||
Identifying potential files for extraction:
|
||||
|
||||
- Scans each folder that's defined as `paths` in turn, traversing subfolders, and looking for relevant files.
|
||||
- If it finds a subfolder that's defined as `paths-ignore`, stop traversing.
|
||||
- If a file or folder is both in `paths` and `paths-ignore`, the `paths-ignore` is ignored.
|
||||
|
||||
Use the `config-file` parameter of the init action to enable the configuration file. For example:
|
||||
Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`.
|
||||
|
||||
```yaml
|
||||
- uses: github/codeql-action/init@v1
|
||||
@@ -103,68 +102,8 @@ Use the `config-file` parameter of the init action to enable the configuration f
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
```
|
||||
|
||||
A config file looks like this:
|
||||
|
||||
```yaml
|
||||
name: "My CodeQL config"
|
||||
|
||||
disable-default-queries: true
|
||||
|
||||
queries:
|
||||
- name: In-repo queries (Runs the queries located in the my-queries folder of the repo)
|
||||
uses: ./my-queries
|
||||
- name: External Javascript QL pack (Runs a QL pack located in an external repo)
|
||||
uses: /Semmle/ql/javascript/ql/src/Electron@master
|
||||
- name: External query (Runs a single query located in an external QL pack)
|
||||
uses: Semmle/ql/javascript/ql/src/AngularJS/DeadAngularJSEventListener.ql@master
|
||||
- name: Select query suite (Runs a query suites)
|
||||
uses: ./codeql-querypacks/complex-python-querypack/rootAndBar.qls
|
||||
|
||||
paths:
|
||||
- src/util.ts
|
||||
|
||||
paths-ignore:
|
||||
- src
|
||||
- lib
|
||||
```
|
||||
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration)."
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Trouble with Go dependencies
|
||||
|
||||
#### If you use a vendor directory
|
||||
|
||||
Try passing
|
||||
|
||||
```yaml
|
||||
env:
|
||||
GOFLAGS: "-mod=vendor"
|
||||
```
|
||||
|
||||
to `github/codeql-action/analyze`.
|
||||
|
||||
### If you do not use a vendor directory
|
||||
|
||||
Dependencies on public repositories should just work. If you have dependencies on private repositories, one option is to use `git config` and a [personal access token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) to authenticate when downloading dependencies. Add a section like
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- name: Configure git private repo access
|
||||
env:
|
||||
TOKEN: ${{ secrets.GITHUB_PAT }}
|
||||
run: |
|
||||
git config --global url."https://${TOKEN}@github.com/foo/bar".insteadOf "https://github.com/foo/bar"
|
||||
git config --global url."https://${TOKEN}@github.com/foo/baz".insteadOf "https://github.com/foo/baz"
|
||||
```
|
||||
|
||||
before any codeql actions. A similar thing can also be done with an SSH key or deploy key.
|
||||
|
||||
### C# using dotnet version 2 on linux
|
||||
|
||||
This currently requires invoking `dotnet` with the `/p:UseSharedCompilation=false` flag. For example:
|
||||
|
||||
```shell
|
||||
dotnet build /p:UseSharedCompilation=false
|
||||
```
|
||||
|
||||
Version 3 does not require the additional flag.
|
||||
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
|
||||
|
||||
@@ -12,6 +12,9 @@ inputs:
|
||||
description: Upload the SARIF file
|
||||
required: false
|
||||
default: true
|
||||
ram:
|
||||
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
|
||||
required: false
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
|
||||
@@ -5,12 +5,14 @@ inputs:
|
||||
tools:
|
||||
description: URL of CodeQL tools
|
||||
required: false
|
||||
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200427/codeql-bundle.tar.gz
|
||||
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200601/codeql-bundle.tar.gz
|
||||
languages:
|
||||
description: The languages to be analysed
|
||||
required: false
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
default: ${{ toJson(matrix) }}
|
||||
config-file:
|
||||
description: Path of the config file to use
|
||||
required: false
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
module.exports = {
|
||||
clearMocks: true,
|
||||
moduleFileExtensions: ['js', 'ts'],
|
||||
testEnvironment: 'node',
|
||||
testMatch: ['**/*.test.ts'],
|
||||
testRunner: 'jest-circus/runner',
|
||||
transform: {
|
||||
'^.+\\.ts$': 'ts-jest'
|
||||
},
|
||||
verbose: true
|
||||
}
|
||||
3
lib/analysis-paths.js
generated
3
lib/analysis-paths.js
generated
@@ -16,7 +16,7 @@ function includeAndExcludeAnalysisPaths(config, languages) {
|
||||
core.exportVariable('LGTM_INDEX_EXCLUDE', config.pathsIgnore.join('\n'));
|
||||
}
|
||||
function isInterpretedLanguage(language) {
|
||||
return language === 'javascript' && language === 'python';
|
||||
return language === 'javascript' || language === 'python';
|
||||
}
|
||||
// Index include/exclude only work in javascript and python
|
||||
// If some other language is detected/configured show a warning
|
||||
@@ -25,3 +25,4 @@ function includeAndExcludeAnalysisPaths(config, languages) {
|
||||
}
|
||||
}
|
||||
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;
|
||||
//# sourceMappingURL=analysis-paths.js.map
|
||||
1
lib/analysis-paths.js.map
Normal file
1
lib/analysis-paths.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAgB,8BAA8B,CAAC,MAA0B,EAAE,SAAmB;IAC1F,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KACtE;IAED,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACjC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC5E;IAED,SAAS,qBAAqB,CAAC,QAAQ;QACnC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;IAC9D,CAAC;IAED,2DAA2D;IAC3D,+DAA+D;IAC/D,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAC3G,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC9G;AACL,CAAC;AAlBD,wEAkBC"}
|
||||
30
lib/analysis-paths.test.js
generated
Normal file
30
lib/analysis-paths.test.js
generated
Normal file
@@ -0,0 +1,30 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
ava_1.default("emptyPaths", async (t) => {
|
||||
let config = new configUtils.Config();
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
|
||||
});
|
||||
ava_1.default("nonEmptyPaths", async (t) => {
|
||||
let config = new configUtils.Config();
|
||||
config.paths.push('path1', 'path2');
|
||||
config.pathsIgnore.push('path3', 'path4');
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path3\npath4');
|
||||
});
|
||||
//# sourceMappingURL=analysis-paths.test.js.map
|
||||
1
lib/analysis-paths.test.js.map
Normal file
1
lib/analysis-paths.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,4DAA8C;AAE9C,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACzB,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC5B,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IACpC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC1C,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;AAC5D,CAAC,CAAC,CAAC"}
|
||||
1
lib/autobuild.js
generated
1
lib/autobuild.js
generated
@@ -58,3 +58,4 @@ run().catch(e => {
|
||||
core.setFailed("autobuild action failed. " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=autobuild.js.map
|
||||
1
lib/autobuild.js.map
Normal file
1
lib/autobuild.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,2CAA6B;AAE7B,gEAAkD;AAClD,6CAA+B;AAE/B,KAAK,UAAU,GAAG;;IAChB,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,EAAE;YACzF,OAAO;SACR;QAED,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,OAAA,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,8BAA8B,CAAC,0CAAE,KAAK,CAAC,GAAG,MAAK,EAAE,CAAC;QACnG,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,8DAA8D;QAC9D,MAAM,SAAS,GAAG,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,iBAAiB,CAAC,CAAC;QAExE,MAAM,OAAO,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,cAAc,CAAC;QAChF,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;QAGpF,+DAA+D;QAC/D,0FAA0F;QAC1F,qDAAqD;QACrD,8EAA8E;QAC9E,gHAAgH;QAChH,IAAI,eAAe,GAAG,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,EAAE,CAAC;QAC7D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,wBAAwB,EAAE,+BAA+B,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAE1I,MAAM,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QAC9B,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,MAAM,IAAI,CAAC,kBAAkB,CAAC,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QACvE,OAAO;KACR;IAED,MAAM,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,CAAC;AAChD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
208
lib/config-utils.js
generated
208
lib/config-utils.js
generated
@@ -12,6 +12,13 @@ const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const path = __importStar(require("path"));
|
||||
const util = __importStar(require("./util"));
|
||||
const NAME_PROPERTY = 'name';
|
||||
const DISPLAY_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
|
||||
const QUERIES_PROPERTY = 'queries';
|
||||
const QUERIES_USES_PROPERTY = 'uses';
|
||||
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
|
||||
const PATHS_PROPERTY = 'paths';
|
||||
class ExternalQuery {
|
||||
constructor(repository, ref) {
|
||||
this.path = '';
|
||||
@@ -20,39 +27,74 @@ class ExternalQuery {
|
||||
}
|
||||
}
|
||||
exports.ExternalQuery = ExternalQuery;
|
||||
// The set of acceptable values for built-in suites from the codeql bundle
|
||||
const builtinSuites = ['security-extended', 'security-and-quality'];
|
||||
class Config {
|
||||
constructor() {
|
||||
this.name = "";
|
||||
this.disableDefaultQueries = false;
|
||||
this.additionalQueries = [];
|
||||
this.externalQueries = [];
|
||||
this.additionalSuites = [];
|
||||
this.pathsIgnore = [];
|
||||
this.paths = [];
|
||||
}
|
||||
addQuery(queryUses) {
|
||||
addQuery(configFile, queryUses) {
|
||||
// The logic for parsing the string is based on what actions does for
|
||||
// parsing the 'uses' actions in the workflow file
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw '"uses" value for queries cannot be blank';
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
this.additionalQueries.push(queryUses.slice(2));
|
||||
const localQueryPath = queryUses.slice(2);
|
||||
// Resolve the local path against the workspace so that when this is
|
||||
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
||||
const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
|
||||
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(absoluteQueryPath)) {
|
||||
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||
}
|
||||
// Call this after checking file exists, because it'll fail if file doesn't exist
|
||||
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
|
||||
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||
if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||
}
|
||||
this.additionalQueries.push(absoluteQueryPath);
|
||||
return;
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
|
||||
const suite = builtinSuites.find((suite) => suite === queryUses);
|
||||
if (suite) {
|
||||
this.additionalSuites.push(suite);
|
||||
return;
|
||||
}
|
||||
else {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
}
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
const ref = tok[1];
|
||||
tok = tok[0].split('/');
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length < 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
if (tok.length > 3) {
|
||||
tok = [tok[0], tok[1], tok.slice(2).join('/')];
|
||||
}
|
||||
if (tok.length < 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
// Check none of the parts of the repository name are empty
|
||||
if (tok[0].trim() === '' || tok[1].trim() === '') {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
|
||||
if (tok.length === 3) {
|
||||
@@ -62,62 +104,137 @@ class Config {
|
||||
}
|
||||
}
|
||||
exports.Config = Config;
|
||||
const configFolder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
function getNameInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
|
||||
}
|
||||
exports.getNameInvalid = getNameInvalid;
|
||||
function getDisableDefaultQueriesInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, DISPLAY_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
|
||||
}
|
||||
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
|
||||
function getQueriesInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
|
||||
}
|
||||
exports.getQueriesInvalid = getQueriesInvalid;
|
||||
function getQueryUsesInvalid(configFile, queryUses) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'must be a built-in suite (' + builtinSuites.join(' or ') +
|
||||
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
|
||||
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
|
||||
}
|
||||
exports.getQueryUsesInvalid = getQueryUsesInvalid;
|
||||
function getPathsIgnoreInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
exports.getPathsIgnoreInvalid = getPathsIgnoreInvalid;
|
||||
function getPathsInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
exports.getPathsInvalid = getPathsInvalid;
|
||||
function getLocalPathOutsideOfRepository(configFile, localPath) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" is outside of the repository');
|
||||
}
|
||||
exports.getLocalPathOutsideOfRepository = getLocalPathOutsideOfRepository;
|
||||
function getLocalPathDoesNotExist(configFile, localPath) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" does not exist in the repository');
|
||||
}
|
||||
exports.getLocalPathDoesNotExist = getLocalPathDoesNotExist;
|
||||
function getConfigFileOutsideWorkspaceErrorMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" is outside of the workspace';
|
||||
}
|
||||
exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage;
|
||||
function getConfigFileDoesNotExistErrorMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" does not exist';
|
||||
}
|
||||
exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage;
|
||||
function getConfigFilePropertyError(configFile, property, error) {
|
||||
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
|
||||
}
|
||||
function initConfig() {
|
||||
const configFile = core.getInput('config-file');
|
||||
let configFile = core.getInput('config-file');
|
||||
const config = new Config();
|
||||
// If no config file was provided create an empty one
|
||||
if (configFile === '') {
|
||||
core.debug('No configuration file was provided');
|
||||
return config;
|
||||
}
|
||||
try {
|
||||
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
if (parsedYAML.name && typeof parsedYAML.name === "string") {
|
||||
config.name = parsedYAML.name;
|
||||
}
|
||||
if (parsedYAML['disable-default-queries'] && typeof parsedYAML['disable-default-queries'] === "boolean") {
|
||||
config.disableDefaultQueries = parsedYAML['disable-default-queries'];
|
||||
}
|
||||
const queries = parsedYAML.queries;
|
||||
if (queries && queries instanceof Array) {
|
||||
queries.forEach(query => {
|
||||
if (query.uses && typeof query.uses === "string") {
|
||||
config.addQuery(query.uses);
|
||||
}
|
||||
});
|
||||
}
|
||||
const pathsIgnore = parsedYAML['paths-ignore'];
|
||||
if (pathsIgnore && pathsIgnore instanceof Array) {
|
||||
pathsIgnore.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
config.pathsIgnore.push(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
const paths = parsedYAML.paths;
|
||||
if (paths && paths instanceof Array) {
|
||||
paths.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
config.paths.push(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
// Treat the config file as relative to the workspace
|
||||
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
configFile = path.resolve(workspacePath, configFile);
|
||||
// Error if the config file is now outside of the workspace
|
||||
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
}
|
||||
catch (err) {
|
||||
core.setFailed(err);
|
||||
// Error if the file does not exist
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
||||
}
|
||||
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
if (NAME_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
if (parsedYAML[NAME_PROPERTY].length === 0) {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
config.name = parsedYAML[NAME_PROPERTY];
|
||||
}
|
||||
if (DISPLAY_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
||||
throw new Error(getDisableDefaultQueriesInvalid(configFile));
|
||||
}
|
||||
config.disableDefaultQueries = parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY];
|
||||
}
|
||||
if (QUERIES_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getQueriesInvalid(configFile));
|
||||
}
|
||||
parsedYAML[QUERIES_PROPERTY].forEach(query => {
|
||||
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
config.addQuery(configFile, query[QUERIES_USES_PROPERTY]);
|
||||
});
|
||||
}
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
config.pathsIgnore.push(path);
|
||||
});
|
||||
}
|
||||
if (PATHS_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
config.paths.push(path);
|
||||
});
|
||||
}
|
||||
return config;
|
||||
}
|
||||
function getConfigFolder() {
|
||||
return util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
}
|
||||
function getConfigFile() {
|
||||
return path.join(getConfigFolder(), 'config');
|
||||
}
|
||||
exports.getConfigFile = getConfigFile;
|
||||
async function saveConfig(config) {
|
||||
const configString = JSON.stringify(config);
|
||||
await io.mkdirP(configFolder);
|
||||
fs.writeFileSync(path.join(configFolder, 'config'), configString, 'utf8');
|
||||
await io.mkdirP(getConfigFolder());
|
||||
fs.writeFileSync(getConfigFile(), configString, 'utf8');
|
||||
core.debug('Saved config:');
|
||||
core.debug(configString);
|
||||
}
|
||||
async function loadConfig() {
|
||||
const configFile = path.join(configFolder, 'config');
|
||||
const configFile = getConfigFile();
|
||||
if (fs.existsSync(configFile)) {
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
@@ -133,3 +250,4 @@ async function loadConfig() {
|
||||
}
|
||||
}
|
||||
exports.loadConfig = loadConfig;
|
||||
//# sourceMappingURL=config-utils.js.map
|
||||
1
lib/config-utils.js.map
Normal file
1
lib/config-utils.js.map
Normal file
File diff suppressed because one or more lines are too long
162
lib/config-utils.test.js
generated
Normal file
162
lib/config-utils.test.js
generated
Normal file
@@ -0,0 +1,162 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const util = __importStar(require("./util"));
|
||||
function setInput(name, value) {
|
||||
// Transformation copied from
|
||||
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
|
||||
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
|
||||
if (value !== undefined) {
|
||||
process.env[envVar] = value;
|
||||
}
|
||||
else {
|
||||
delete process.env[envVar];
|
||||
}
|
||||
}
|
||||
ava_1.default("load empty config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', undefined);
|
||||
const config = await configUtils.loadConfig();
|
||||
t.deepEqual(config, new configUtils.Config());
|
||||
});
|
||||
});
|
||||
ava_1.default("loading config saves config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const configFile = configUtils.getConfigFile();
|
||||
// Sanity check the saved config file does not already exist
|
||||
t.false(fs.existsSync(configFile));
|
||||
const config = await configUtils.loadConfig();
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configFile));
|
||||
// And the contents should parse correctly to the config that was returned
|
||||
t.deepEqual(fs.readFileSync(configFile, 'utf8'), JSON.stringify(config));
|
||||
});
|
||||
});
|
||||
ava_1.default("load input outside of workspace", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', '../input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("load non-existent input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
t.false(fs.existsSync(path.join(tmpDir, 'input')));
|
||||
setInput('config-file', 'input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("load non-empty input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
// Just create a generic config object with non-default values for all fields
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- uses: ./
|
||||
- uses: ./foo
|
||||
- uses: foo/bar@dev
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = new configUtils.Config();
|
||||
expectedConfig.name = 'my config';
|
||||
expectedConfig.disableDefaultQueries = true;
|
||||
expectedConfig.additionalQueries.push(fs.realpathSync(tmpDir));
|
||||
expectedConfig.additionalQueries.push(fs.realpathSync(path.join(tmpDir, 'foo')));
|
||||
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
|
||||
expectedConfig.pathsIgnore = ['a', 'b'];
|
||||
expectedConfig.paths = ['c/d'];
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
const actualConfig = await configUtils.loadConfig();
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
|
||||
ava_1.default("load invalid input - " + testName, async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const inputFile = path.join(tmpDir, 'input');
|
||||
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
doInvalidInputTest('name invalid type', `
|
||||
name:
|
||||
- foo: bar`, configUtils.getNameInvalid);
|
||||
doInvalidInputTest('disable-default-queries invalid type', `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
|
||||
doInvalidInputTest('queries invalid type', `queries: foo`, configUtils.getQueriesInvalid);
|
||||
doInvalidInputTest('paths-ignore invalid type', `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
|
||||
doInvalidInputTest('paths invalid type', `paths: 17`, configUtils.getPathsInvalid);
|
||||
doInvalidInputTest('queries uses invalid type', `
|
||||
queries:
|
||||
- uses:
|
||||
- hello: world`, configUtils.getQueryUsesInvalid);
|
||||
function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) {
|
||||
// Invalid contents of a "queries.uses" field.
|
||||
// Should fail with the expected error message
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
queries:
|
||||
- name: foo
|
||||
uses: ` + input;
|
||||
doInvalidInputTest("queries uses \"" + input + "\"", inputFileContents, expectedErrorMessageGenerator);
|
||||
}
|
||||
// Various "uses" fields, and the errors they should produce
|
||||
doInvalidQueryUsesTest("''", c => configUtils.getQueryUsesInvalid(c, undefined));
|
||||
doInvalidQueryUsesTest("foo/bar", c => configUtils.getQueryUsesInvalid(c, "foo/bar"));
|
||||
doInvalidQueryUsesTest("foo/bar@v1@v2", c => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
|
||||
doInvalidQueryUsesTest("foo@master", c => configUtils.getQueryUsesInvalid(c, "foo@master"));
|
||||
doInvalidQueryUsesTest("https://github.com/foo/bar@master", c => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
|
||||
doInvalidQueryUsesTest("./foo", c => configUtils.getLocalPathDoesNotExist(c, "foo"));
|
||||
doInvalidQueryUsesTest("./..", c => configUtils.getLocalPathOutsideOfRepository(c, ".."));
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
1
lib/config-utils.test.js.map
Normal file
1
lib/config-utils.test.js.map
Normal file
File diff suppressed because one or more lines are too long
4
lib/external-queries.js
generated
4
lib/external-queries.js
generated
@@ -11,8 +11,9 @@ const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function checkoutExternalQueries(config) {
|
||||
const folder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
const folder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
for (const externalQuery of config.externalQueries) {
|
||||
core.info('Checking out ' + externalQuery.repository);
|
||||
const checkoutLocation = path.join(folder, externalQuery.repository);
|
||||
@@ -29,3 +30,4 @@ async function checkoutExternalQueries(config) {
|
||||
}
|
||||
}
|
||||
exports.checkoutExternalQueries = checkoutExternalQueries;
|
||||
//# sourceMappingURL=external-queries.js.map
|
||||
1
lib/external-queries.js.map
Normal file
1
lib/external-queries.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAG7B,6CAA+B;AAExB,KAAK,UAAU,uBAAuB,CAAC,MAA0B;IACtE,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC;IAEvD,KAAK,MAAM,aAAa,IAAI,MAAM,CAAC,eAAe,EAAE;QAClD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,UAAU,CAAC,CAAC;QAEtD,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,aAAa,CAAC,UAAU,CAAC,CAAC;QACrE,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;YACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,aAAa,CAAC,UAAU,GAAG,MAAM,CAAC;YAC1E,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;YAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;gBACrB,cAAc,GAAG,gBAAgB;gBACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;gBACzC,UAAU,EAAE,aAAa,CAAC,GAAG;aAC9B,CAAC,CAAC;SACJ;QAED,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;KAChF;AACH,CAAC;AAnBD,0DAmBC"}
|
||||
31
lib/external-queries.test.js
generated
Normal file
31
lib/external-queries.test.js
generated
Normal file
@@ -0,0 +1,31 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const util = __importStar(require("./util"));
|
||||
ava_1.default("checkoutExternalQueries", async (t) => {
|
||||
let config = new configUtils.Config();
|
||||
config.externalQueries = [
|
||||
new configUtils.ExternalQuery("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b"),
|
||||
];
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
|
||||
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=external-queries.test.js.map
|
||||
1
lib/external-queries.test.js.map
Normal file
1
lib/external-queries.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,4DAA8C;AAC9C,oEAAsD;AACtD,6CAA+B;AAE/B,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACtC,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,eAAe,GAAG;QACrB,IAAI,WAAW,CAAC,aAAa,CAAC,kBAAkB,EAAE,0CAA0C,CAAC;KAChG,CAAC;IAEF,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACjC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QACpC,MAAM,eAAe,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtD,uFAAuF;QACvF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IACjF,CAAC,CAAC,CAAC;AACP,CAAC,CAAC,CAAC"}
|
||||
112
lib/finalize-db.js
generated
112
lib/finalize-db.js
generated
@@ -11,12 +11,49 @@ const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
/**
|
||||
* A list of queries from https://github.com/github/codeql that
|
||||
* we don't want to run. Disabling them here is a quicker alternative to
|
||||
* disabling them in the code scanning query suites. Queries should also
|
||||
* be disabled in the suites, and removed from this list here once the
|
||||
* bundle is updated to make those suite changes live.
|
||||
*
|
||||
* Format is a map from language to an array of path suffixes of .ql files.
|
||||
*/
|
||||
const DISABLED_BUILTIN_QUERIES = {
|
||||
'csharp': [
|
||||
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
|
||||
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
|
||||
]
|
||||
};
|
||||
function queryIsDisabled(language, query) {
|
||||
return (DISABLED_BUILTIN_QUERIES[language] || [])
|
||||
.some(disabledQuery => query.endsWith(disabledQuery));
|
||||
}
|
||||
function getMemoryFlag() {
|
||||
let memoryToUseMegaBytes;
|
||||
const memoryToUseString = core.getInput("ram");
|
||||
if (memoryToUseString) {
|
||||
memoryToUseMegaBytes = Number(memoryToUseString);
|
||||
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
|
||||
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
|
||||
}
|
||||
}
|
||||
else {
|
||||
const totalMemoryBytes = os.totalmem();
|
||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||
const systemReservedMemoryMegaBytes = 256;
|
||||
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
|
||||
}
|
||||
return "--ram=" + Math.floor(memoryToUseMegaBytes);
|
||||
}
|
||||
async function createdDBForScannedLanguages(codeqlCmd, databaseFolder) {
|
||||
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
||||
if (scannedLanguages) {
|
||||
@@ -49,26 +86,50 @@ async function finalizeDatabaseCreation(codeqlCmd, databaseFolder) {
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
async function runResolveQueries(codeqlCmd, queries) {
|
||||
let output = '';
|
||||
const options = {
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
output += data.toString();
|
||||
}
|
||||
}
|
||||
};
|
||||
await exec.exec(codeqlCmd, [
|
||||
'resolve',
|
||||
'queries',
|
||||
...queries,
|
||||
'--format=bylanguage'
|
||||
], options);
|
||||
return JSON.parse(output);
|
||||
}
|
||||
async function resolveQueryLanguages(codeqlCmd, config) {
|
||||
let res = new Map();
|
||||
if (config.additionalQueries.length !== 0) {
|
||||
let resolveQueriesOutput = '';
|
||||
const options = {
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
resolveQueriesOutput += data.toString();
|
||||
}
|
||||
if (!config.disableDefaultQueries || config.additionalSuites.length !== 0) {
|
||||
const suites = [];
|
||||
for (const language of await util.getLanguages()) {
|
||||
if (!config.disableDefaultQueries) {
|
||||
suites.push(language + '-code-scanning.qls');
|
||||
}
|
||||
};
|
||||
await exec.exec(codeqlCmd, [
|
||||
'resolve',
|
||||
'queries',
|
||||
...config.additionalQueries,
|
||||
'--format=bylanguage'
|
||||
], options);
|
||||
const resolveQueriesOutputObject = JSON.parse(resolveQueriesOutput);
|
||||
for (const additionalSuite of config.additionalSuites) {
|
||||
suites.push(language + '-' + additionalSuite + '.qls');
|
||||
}
|
||||
}
|
||||
const resolveQueriesOutputObject = await runResolveQueries(codeqlCmd, suites);
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
res[language] = Object.keys(queries);
|
||||
if (res[language] === undefined) {
|
||||
res[language] = [];
|
||||
}
|
||||
res[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
|
||||
}
|
||||
}
|
||||
if (config.additionalQueries.length !== 0) {
|
||||
const resolveQueriesOutputObject = await runResolveQueries(codeqlCmd, config.additionalQueries);
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
if (res[language] === undefined) {
|
||||
res[language] = [];
|
||||
}
|
||||
res[language].push(...Object.keys(queries));
|
||||
}
|
||||
const noDeclaredLanguage = resolveQueriesOutputObject.noDeclaredLanguage;
|
||||
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
||||
@@ -88,20 +149,26 @@ async function runQueries(codeqlCmd, databaseFolder, sarifFolder, config) {
|
||||
const queriesPerLanguage = await resolveQueryLanguages(codeqlCmd, config);
|
||||
for (let database of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + database);
|
||||
const queries = [];
|
||||
if (!config.disableDefaultQueries) {
|
||||
queries.push(database + '-code-scanning.qls');
|
||||
const queries = queriesPerLanguage[database] || [];
|
||||
if (queries.length === 0) {
|
||||
throw new Error('Unable to analyse ' + database + ' as no queries were selected for this language');
|
||||
}
|
||||
queries.push(...(queriesPerLanguage[database] || []));
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = path.join(databaseFolder, database + '-queries.qls');
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
core.debug('Query suite file for ' + database + '...\n' + querySuiteContents);
|
||||
const sarifFile = path.join(sarifFolder, database + '.sarif');
|
||||
await exec.exec(codeqlCmd, [
|
||||
'database',
|
||||
'analyze',
|
||||
getMemoryFlag(),
|
||||
path.join(databaseFolder, database),
|
||||
'--format=sarif-latest',
|
||||
'--output=' + sarifFile,
|
||||
'--no-sarif-add-snippets',
|
||||
...queries
|
||||
querySuite
|
||||
]);
|
||||
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
@@ -126,7 +193,7 @@ async function run() {
|
||||
await runQueries(codeqlCmd, databaseFolder, sarifFolder, config);
|
||||
if ('true' === core.getInput('upload')) {
|
||||
if (!await upload_lib.upload(sarifFolder)) {
|
||||
await util.reportActionFailed('failed', 'upload');
|
||||
await util.reportActionFailed('finish', 'upload');
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -142,3 +209,4 @@ run().catch(e => {
|
||||
core.setFailed("analyze action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=finalize-db.js.map
|
||||
1
lib/finalize-db.js.map
Normal file
1
lib/finalize-db.js.map
Normal file
File diff suppressed because one or more lines are too long
1
lib/fingerprints.js
generated
1
lib/fingerprints.js
generated
@@ -245,3 +245,4 @@ function addFingerprints(sarifContents) {
|
||||
return JSON.stringify(sarif);
|
||||
}
|
||||
exports.addFingerprints = addFingerprints;
|
||||
//# sourceMappingURL=fingerprints.js.map
|
||||
1
lib/fingerprints.js.map
Normal file
1
lib/fingerprints.js.map
Normal file
File diff suppressed because one or more lines are too long
157
lib/fingerprints.test.js
generated
Normal file
157
lib/fingerprints.test.js
generated
Normal file
@@ -0,0 +1,157 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const fingerprints = __importStar(require("./fingerprints"));
|
||||
function testHash(t, input, expectedHashes) {
|
||||
let index = 0;
|
||||
let callback = function (lineNumber, hash) {
|
||||
t.is(lineNumber, index + 1);
|
||||
t.is(hash, expectedHashes[index]);
|
||||
index++;
|
||||
};
|
||||
fingerprints.hash(callback, input);
|
||||
t.is(index, input.split(/\r\n|\r|\n/).length);
|
||||
}
|
||||
ava_1.default('hash', (t) => {
|
||||
// Try empty file
|
||||
testHash(t, "", ["c129715d7a2bc9a3:1"]);
|
||||
// Try various combinations of newline characters
|
||||
testHash(t, " a\nb\n \t\tc\n d", [
|
||||
"271789c17abda88f:1",
|
||||
"54703d4cd895b18:1",
|
||||
"180aee12dab6264:1",
|
||||
"a23a3dc5e078b07b:1"
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
|
||||
"8b7cf3e952e7aeb2:1",
|
||||
"b1ae1287ec4718d9:1",
|
||||
"bff680108adb0fcc:1",
|
||||
"c6805c5e1288b612:1",
|
||||
"b86d3392aea1be30:1",
|
||||
"e6ceba753e1a442:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End\n", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\r\r\r \t\tGreetings\r End\r", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\r\nworld!!!\r\n\r\n\r\n \t\tGreetings\r\n End\r\n", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\r\n\n\r \t\tGreetings\r End\r\n", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
// Try repeating line that will generate identical hashes
|
||||
testHash(t, "Lorem ipsum dolor sit amet.\n".repeat(10), [
|
||||
"a7f2ff13bc495cf2:1",
|
||||
"a7f2ff13bc495cf2:2",
|
||||
"a7f2ff13bc495cf2:3",
|
||||
"a7f2ff13bc495cf2:4",
|
||||
"a7f2ff13bc495cf2:5",
|
||||
"a7f2ff13bc495cf2:6",
|
||||
"a7f2ff1481e87703:1",
|
||||
"a9cf91f7bbf1862b:1",
|
||||
"55ec222b86bcae53:1",
|
||||
"cc97dc7b1d7d8f7b:1",
|
||||
"c129715d7a2bc9a3:1"
|
||||
]);
|
||||
});
|
||||
function testResolveUriToFile(uri, index, artifactsURIs) {
|
||||
const location = { "uri": uri, "index": index };
|
||||
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
|
||||
return fingerprints.resolveUriToFile(location, artifacts);
|
||||
}
|
||||
ava_1.default('resolveUriToFile', t => {
|
||||
// The resolveUriToFile method checks that the file exists and is in the right directory
|
||||
// so we need to give it real files to look at. We will use this file as an example.
|
||||
// For this to work we require the current working directory to be a parent, but this
|
||||
// should generally always be the case so this is fine.
|
||||
const cwd = process.cwd();
|
||||
const filepath = __filename;
|
||||
t.true(filepath.startsWith(cwd + '/'));
|
||||
const relativeFilepaht = filepath.substring(cwd.length + 1);
|
||||
process.env['GITHUB_WORKSPACE'] = cwd;
|
||||
// Absolute paths are unmodified
|
||||
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
|
||||
// Relative paths are made absolute
|
||||
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile('file://' + relativeFilepaht, undefined, []), filepath);
|
||||
// Absolute paths outside the src root are discarded
|
||||
t.is(testResolveUriToFile('/src/foo/bar.js', undefined, []), undefined);
|
||||
t.is(testResolveUriToFile('file:///src/foo/bar.js', undefined, []), undefined);
|
||||
// Other schemes are discarded
|
||||
t.is(testResolveUriToFile('https://' + filepath, undefined, []), undefined);
|
||||
t.is(testResolveUriToFile('ftp://' + filepath, undefined, []), undefined);
|
||||
// Invalid URIs are discarded
|
||||
t.is(testResolveUriToFile(1, undefined, []), undefined);
|
||||
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
|
||||
// Non-existant files are discarded
|
||||
t.is(testResolveUriToFile(filepath + '2', undefined, []), undefined);
|
||||
// Index is resolved
|
||||
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
|
||||
t.is(testResolveUriToFile(undefined, 1, ['foo', filepath]), filepath);
|
||||
// Invalid indexes are discarded
|
||||
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
|
||||
t.is(testResolveUriToFile(undefined, '0', [filepath]), undefined);
|
||||
});
|
||||
ava_1.default('addFingerprints', t => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.input.sarif').toString();
|
||||
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.expected.sarif').toString();
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||
});
|
||||
ava_1.default('missingRegions', t => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.input.sarif').toString();
|
||||
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.expected.sarif').toString();
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||
});
|
||||
//# sourceMappingURL=fingerprints.test.js.map
|
||||
1
lib/fingerprints.test.js.map
Normal file
1
lib/fingerprints.test.js.map
Normal file
File diff suppressed because one or more lines are too long
45
lib/setup-tools.js
generated
45
lib/setup-tools.js
generated
@@ -10,6 +10,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const path = __importStar(require("path"));
|
||||
const semver = __importStar(require("semver"));
|
||||
class CodeQLSetup {
|
||||
constructor(codeqlDist) {
|
||||
this.dist = codeqlDist;
|
||||
@@ -35,17 +36,41 @@ class CodeQLSetup {
|
||||
}
|
||||
exports.CodeQLSetup = CodeQLSetup;
|
||||
async function setupCodeQL() {
|
||||
const version = '1.0.0';
|
||||
const codeqlURL = core.getInput('tools', { required: true });
|
||||
let codeqlFolder = toolcache.find('CodeQL', version);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
try {
|
||||
const codeqlURL = core.getInput('tools', { required: true });
|
||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL);
|
||||
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
}
|
||||
else {
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
|
||||
}
|
||||
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
||||
}
|
||||
else {
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', version);
|
||||
catch (e) {
|
||||
core.error(e);
|
||||
throw new Error("Unable to download and extract CodeQL CLI");
|
||||
}
|
||||
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
||||
}
|
||||
exports.setupCodeQL = setupCodeQL;
|
||||
function getCodeQLURLVersion(url) {
|
||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
||||
if (match === null || match.length < 2) {
|
||||
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
|
||||
}
|
||||
let version = match[1];
|
||||
if (!semver.valid(version)) {
|
||||
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||
version = '0.0.0-' + version;
|
||||
}
|
||||
const s = semver.clean(version);
|
||||
if (!s) {
|
||||
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
exports.getCodeQLURLVersion = getCodeQLURLVersion;
|
||||
//# sourceMappingURL=setup-tools.js.map
|
||||
1
lib/setup-tools.js.map
Normal file
1
lib/setup-tools.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"setup-tools.js","sourceRoot":"","sources":["../src/setup-tools.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,+DAAiD;AACjD,2CAA6B;AAC7B,+CAAiC;AAEjC,MAAa,WAAW;IAMpB,YAAY,UAAkB;QAC1B,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAC3C,4BAA4B;QAC5B,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAC9B,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;YACxB,IAAI,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBAC7B,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC;aACtB;SACJ;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YACrC,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;SAC7B;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACtC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;SAC3B;aAAM;YACH,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;SAC/D;IACL,CAAC;CACJ;AAxBD,kCAwBC;AAEM,KAAK,UAAU,WAAW;IAC7B,IAAI;QACA,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;QAC7D,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAExD,IAAI,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,gBAAgB,CAAC,CAAC;QAC9D,IAAI,YAAY,EAAE;YACd,IAAI,CAAC,KAAK,CAAC,yBAAyB,YAAY,EAAE,CAAC,CAAC;SACvD;aAAM;YACH,MAAM,UAAU,GAAG,MAAM,SAAS,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,eAAe,GAAG,MAAM,SAAS,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC/D,YAAY,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,eAAe,EAAE,QAAQ,EAAE,gBAAgB,CAAC,CAAC;SACxF;QACD,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAE7D;IAAC,OAAO,CAAC,EAAE;QACR,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACd,MAAM,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC;KAChE;AACL,CAAC;AAnBD,kCAmBC;AAED,SAAgB,mBAAmB,CAAC,GAAW;IAE3C,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,CAAC,wBAAwB,CAAC,CAAC;IAClD,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;QACpC,MAAM,IAAI,KAAK,CAAC,wBAAwB,GAAG,iCAAiC,CAAC,CAAC;KACjF;IAED,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IAEvB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE;QACxB,IAAI,CAAC,KAAK,CAAC,kBAAkB,OAAO,gEAAgE,OAAO,GAAG,CAAC,CAAC;QAChH,OAAO,GAAG,QAAQ,GAAG,OAAO,CAAC;KAChC;IAED,MAAM,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;IAChC,IAAI,CAAC,CAAC,EAAE;QACJ,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,iDAAiD,OAAO,UAAU,CAAC,CAAC;KACjH;IAED,OAAO,CAAC,CAAC;AACb,CAAC;AApBD,kDAoBC"}
|
||||
58
lib/setup-tools.test.js
generated
Normal file
58
lib/setup-tools.test.js
generated
Normal file
@@ -0,0 +1,58 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const nock_1 = __importDefault(require("nock"));
|
||||
const path = __importStar(require("path"));
|
||||
const setupTools = __importStar(require("./setup-tools"));
|
||||
const util = __importStar(require("./util"));
|
||||
ava_1.default('download codeql bundle cache', async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
|
||||
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
|
||||
const versions = ['20200601', '20200610'];
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
const version = versions[i];
|
||||
nock_1.default('https://example.com')
|
||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||
await setupTools.setupCodeQL();
|
||||
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
|
||||
}
|
||||
const cachedVersions = toolcache.findAllVersions('CodeQL');
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
ava_1.default('parse codeql bundle url version', t => {
|
||||
const tests = {
|
||||
'20200601': '0.0.0-20200601',
|
||||
'20200601.0': '0.0.0-20200601.0',
|
||||
'20200601.0.0': '20200601.0.0',
|
||||
'1.2.3': '1.2.3',
|
||||
'1.2.3-alpha': '1.2.3-alpha',
|
||||
'1.2.3-beta.1': '1.2.3-beta.1',
|
||||
};
|
||||
for (const [version, expectedVersion] of Object.entries(tests)) {
|
||||
const url = `https://github.com/.../codeql-bundle-${version}/...`;
|
||||
try {
|
||||
const parsedVersion = setupTools.getCodeQLURLVersion(url);
|
||||
t.deepEqual(parsedVersion, expectedVersion);
|
||||
}
|
||||
catch (e) {
|
||||
t.fail(e.message);
|
||||
}
|
||||
}
|
||||
});
|
||||
//# sourceMappingURL=setup-tools.test.js.map
|
||||
1
lib/setup-tools.test.js.map
Normal file
1
lib/setup-tools.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"setup-tools.test.js","sourceRoot":"","sources":["../src/setup-tools.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,0DAA4C;AAC5C,6CAA+B;AAE/B,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE3C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEjC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACtC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACtB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGvF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,UAAU,CAAC,WAAW,EAAE,CAAC;YAE/B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SAC1D;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACnC,CAAC,CAAC,CAAC;AACP,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAExC,MAAM,KAAK,GAAG;QACV,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KACjC,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC5D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACA,MAAM,aAAa,GAAG,UAAU,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YAC1D,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC/C;QAAC,OAAO,CAAC,EAAE;YACR,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACrB;KACJ;AACL,CAAC,CAAC,CAAC"}
|
||||
14
lib/setup-tracer.js
generated
14
lib/setup-tracer.js
generated
@@ -100,12 +100,13 @@ function concatTracerConfigs(configs) {
|
||||
totalCount += count;
|
||||
totalLines.push(...lines.slice(2));
|
||||
}
|
||||
const newLogFilePath = path.resolve(util.workspaceFolder(), 'compound-build-tracer.log');
|
||||
const spec = path.resolve(util.workspaceFolder(), 'compound-spec');
|
||||
const tempFolder = path.resolve(util.workspaceFolder(), 'compound-temp');
|
||||
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
|
||||
const spec = path.resolve(tempFolder, 'compound-spec');
|
||||
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
|
||||
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
|
||||
if (copyExecutables) {
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = tempFolder;
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
|
||||
envSize += 1;
|
||||
}
|
||||
fs.writeFileSync(spec, newSpecContent.join('\n'));
|
||||
@@ -156,7 +157,7 @@ async function run() {
|
||||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
||||
core.exportVariable('CODEQL_RAM', codeqlRam);
|
||||
const databaseFolder = path.resolve(util.workspaceFolder(), 'codeql_databases');
|
||||
const databaseFolder = path.resolve(util.getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
|
||||
await io.mkdirP(databaseFolder);
|
||||
let tracedLanguages = {};
|
||||
let scannedLanguages = [];
|
||||
@@ -205,10 +206,11 @@ async function run() {
|
||||
await util.reportActionFailed('init', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
await util.reportActionSucceeded('init');
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("init action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=setup-tracer.js.map
|
||||
1
lib/setup-tracer.js.map
Normal file
1
lib/setup-tracer.js.map
Normal file
File diff suppressed because one or more lines are too long
2
lib/shared-environment.js
generated
2
lib/shared-environment.js
generated
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CODEQL_ACTION_CMD = 'CODEQL_ACTION_CMD';
|
||||
exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
|
||||
exports.CODEQL_ACTION_LANGUAGES = 'CODEQL_ACTION_LANGUAGES';
|
||||
exports.CODEQL_ACTION_ANALYSIS_KEY = 'CODEQL_ACTION_ANALYSIS_KEY';
|
||||
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
|
||||
exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
|
||||
exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
||||
@@ -14,3 +15,4 @@ exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
||||
exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
|
||||
// Populated when the init action completes successfully
|
||||
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
|
||||
//# sourceMappingURL=shared-environment.js.map
|
||||
1
lib/shared-environment.js.map
Normal file
1
lib/shared-environment.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,iBAAiB,GAAG,mBAAmB,CAAC;AACxC,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,uBAAuB,GAAG,yBAAyB,CAAC;AACpD,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,+BAA+B,GAAG,iCAAiC,CAAC;AACpE,QAAA,8BAA8B,GAAG,gCAAgC,CAAC;AAC/E,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,wBAAwB,GAAG,0BAA0B,CAAC;AACnE,wDAAwD;AAC3C,QAAA,4BAA4B,GAAG,8BAA8B,CAAC"}
|
||||
1
lib/tracer-env.js
generated
1
lib/tracer-env.js
generated
@@ -18,3 +18,4 @@ for (let entry of Object.entries(process.env)) {
|
||||
}
|
||||
process.stdout.write(process.argv[2]);
|
||||
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');
|
||||
//# sourceMappingURL=tracer-env.js.map
|
||||
1
lib/tracer-env.js.map
Normal file
1
lib/tracer-env.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"tracer-env.js","sourceRoot":"","sources":["../src/tracer-env.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AAEzB,MAAM,GAAG,GAAG,EAAE,CAAC;AACf,KAAK,IAAI,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;IAC3C,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACrB,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACvB,IAAI,OAAO,KAAK,KAAK,WAAW,IAAI,GAAG,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;QACpF,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KACpB;CACJ;AACD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,CAAC"}
|
||||
148
lib/upload-lib.js
generated
148
lib/upload-lib.js
generated
@@ -13,26 +13,14 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const auth = __importStar(require("@actions/http-client/auth"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const file_url_1 = __importDefault(require("file-url"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const jsonschema = __importStar(require("jsonschema"));
|
||||
const path = __importStar(require("path"));
|
||||
const zlib_1 = __importDefault(require("zlib"));
|
||||
const fingerprints = __importStar(require("./fingerprints"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util = __importStar(require("./util"));
|
||||
// Construct the location of the sentinel file for detecting multiple uploads.
|
||||
// The returned location should be writable.
|
||||
async function getSentinelFilePath() {
|
||||
// Use the temp dir instead of placing next to the sarif file because of
|
||||
// issues with docker actions. The directory containing the sarif file
|
||||
// may not be writable by us.
|
||||
const uploadsTmpDir = path.join(process.env['RUNNER_TEMP'] || '/tmp/codeql-action', 'uploads');
|
||||
await io.mkdirP(uploadsTmpDir);
|
||||
// Hash the absolute path so we'll behave correctly in the unlikely
|
||||
// scenario a file is referenced twice with different paths.
|
||||
return path.join(uploadsTmpDir, 'codeql-action-upload-sentinel');
|
||||
}
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
function combineSarifFiles(sarifFiles) {
|
||||
@@ -58,6 +46,11 @@ exports.combineSarifFiles = combineSarifFiles;
|
||||
// If the request fails then this will retry a small number of times.
|
||||
async function uploadPayload(payload) {
|
||||
core.info('Uploading results');
|
||||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env['TEST_MODE'] === 'true' || false;
|
||||
if (testMode) {
|
||||
return true;
|
||||
}
|
||||
const githubToken = core.getInput('token');
|
||||
const ph = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
|
||||
@@ -120,60 +113,97 @@ async function upload(input) {
|
||||
}
|
||||
}
|
||||
exports.upload = upload;
|
||||
// Counts the number of results in the given SARIF file
|
||||
function countResultsInSarif(sarif) {
|
||||
let numResults = 0;
|
||||
for (const run of JSON.parse(sarif).runs) {
|
||||
numResults += run.results.length;
|
||||
}
|
||||
return numResults;
|
||||
}
|
||||
exports.countResultsInSarif = countResultsInSarif;
|
||||
// Validates that the given file path refers to a valid SARIF file.
|
||||
// Returns a non-empty list of error message if the file is invalid,
|
||||
// otherwise returns the empty list if the file is valid.
|
||||
function validateSarifFileSchema(sarifFilePath) {
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
|
||||
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
if (result.valid) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
// Set the failure message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const errorMessages = result.errors.map(e => "- " + e.stack);
|
||||
core.setFailed("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + errorMessages.join("\n"));
|
||||
// Also output the more verbose error messages in groups as these may be very large.
|
||||
for (const error of result.errors) {
|
||||
core.startGroup("Error details: " + error.stack);
|
||||
core.info(JSON.stringify(error, null, 2));
|
||||
core.endGroup();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles) {
|
||||
core.startGroup("Uploading results");
|
||||
let succeeded = false;
|
||||
try {
|
||||
// Check if an upload has happened before. If so then abort.
|
||||
// This is intended to catch when the finish and upload-sarif actions
|
||||
// are used together, and then the upload-sarif action is invoked twice.
|
||||
const sentinelFile = await getSentinelFilePath();
|
||||
if (fs.existsSync(sentinelFile)) {
|
||||
core.info("Aborting as an upload has already happened from this job");
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
return false;
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
if (!validateSarifFileSchema(file)) {
|
||||
return false;
|
||||
}
|
||||
const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRequiredEnvParam('GITHUB_REF'); // it's in the form "refs/heads/master"
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||
let checkoutPath = core.getInput('checkout_path');
|
||||
let checkoutURI = file_url_1.default(checkoutPath);
|
||||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
return false;
|
||||
}
|
||||
let matrix = core.getInput('matrix');
|
||||
if (matrix === "null" || matrix === "") {
|
||||
matrix = undefined;
|
||||
}
|
||||
const toolNames = util.getToolNames(sarifPayload);
|
||||
const payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
"checkout_uri": checkoutURI,
|
||||
"environment": matrix,
|
||||
"started_at": startedAt,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
// Make the upload
|
||||
succeeded = await uploadPayload(payload);
|
||||
// Mark that we have made an upload
|
||||
fs.writeFileSync(sentinelFile, '');
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
const commitOid = await util.getCommitOid();
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRef();
|
||||
const analysisKey = await util.getAnalysisKey();
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||
let checkoutPath = core.getInput('checkout_path');
|
||||
let checkoutURI = file_url_1.default(checkoutPath);
|
||||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
return false;
|
||||
}
|
||||
let matrix = core.getInput('matrix');
|
||||
if (matrix === "null" || matrix === "") {
|
||||
matrix = undefined;
|
||||
}
|
||||
const toolNames = util.getToolNames(sarifPayload);
|
||||
const payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_key": analysisKey,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
"checkout_uri": checkoutURI,
|
||||
"environment": matrix,
|
||||
"started_at": startedAt,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
// Log some useful debug info about the info
|
||||
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
|
||||
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
|
||||
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
|
||||
// Make the upload
|
||||
const succeeded = await uploadPayload(payload);
|
||||
core.endGroup();
|
||||
return succeeded;
|
||||
}
|
||||
//# sourceMappingURL=upload-lib.js.map
|
||||
1
lib/upload-lib.js.map
Normal file
1
lib/upload-lib.js.map
Normal file
File diff suppressed because one or more lines are too long
25
lib/upload-lib.test.js
generated
Normal file
25
lib/upload-lib.test.js
generated
Normal file
@@ -0,0 +1,25 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const uploadLib = __importStar(require("./upload-lib"));
|
||||
ava_1.default('validateSarifFileSchema - valid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
|
||||
t.true(uploadLib.validateSarifFileSchema(inputFile));
|
||||
});
|
||||
ava_1.default('validateSarifFileSchema - invalid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
|
||||
t.false(uploadLib.validateSarifFileSchema(inputFile));
|
||||
// validateSarifFileSchema calls core.setFailed which sets the exit code on error
|
||||
process.exitCode = 0;
|
||||
});
|
||||
//# sourceMappingURL=upload-lib.test.js.map
|
||||
1
lib/upload-lib.test.js.map
Normal file
1
lib/upload-lib.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,wDAA0C;AAE1C,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;IACtD,iFAAiF;IACjF,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC"}
|
||||
1
lib/upload-sarif.js
generated
1
lib/upload-sarif.js
generated
@@ -32,3 +32,4 @@ run().catch(e => {
|
||||
core.setFailed("codeql/upload-sarif action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=upload-sarif.js.map
|
||||
1
lib/upload-sarif.js.map
Normal file
1
lib/upload-sarif.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"upload-sarif.js","sourceRoot":"","sources":["../src/upload-sarif.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,yDAA2C;AAC3C,6CAA+B;AAE/B,KAAK,UAAU,GAAG;IACd,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,KAAK,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,cAAc,CAAC,EAAE;QAC9F,OAAO;KACV;IAED,IAAI;QACA,IAAI,MAAM,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE;YACtD,MAAM,IAAI,CAAC,qBAAqB,CAAC,cAAc,CAAC,CAAC;SACpD;aAAM;YACH,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;SAC3D;KACJ;IAAC,OAAO,KAAK,EAAE;QACZ,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QAC1E,OAAO;KACV;AACL,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACZ,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACnB,CAAC,CAAC,CAAC"}
|
||||
107
lib/util.js
generated
107
lib/util.js
generated
@@ -11,10 +11,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const auth = __importStar(require("@actions/http-client/auth"));
|
||||
const octokit = __importStar(require("@octokit/rest"));
|
||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
/**
|
||||
@@ -31,12 +34,6 @@ function should_abort(actionName, requireInitActionHasRun) {
|
||||
core.setFailed('GITHUB_REF must be set.');
|
||||
return true;
|
||||
}
|
||||
// Should abort if called on a merge commit for a pull request.
|
||||
if (ref.startsWith('refs/pull/')) {
|
||||
core.warning('The CodeQL ' + actionName + ' action is intended for workflows triggered on `push` events, '
|
||||
+ 'but the current workflow is running on a pull request. Aborting.');
|
||||
return true;
|
||||
}
|
||||
// If the init action is required, then check the it completed successfully.
|
||||
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
|
||||
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
|
||||
@@ -45,16 +42,6 @@ function should_abort(actionName, requireInitActionHasRun) {
|
||||
return false;
|
||||
}
|
||||
exports.should_abort = should_abort;
|
||||
/**
|
||||
* Resolve the path to the workspace folder.
|
||||
*/
|
||||
function workspaceFolder() {
|
||||
let workspaceFolder = process.env['RUNNER_WORKSPACE'];
|
||||
if (!workspaceFolder)
|
||||
workspaceFolder = path.resolve('..');
|
||||
return workspaceFolder;
|
||||
}
|
||||
exports.workspaceFolder = workspaceFolder;
|
||||
/**
|
||||
* Get an environment parameter, but throw an error if it is not set.
|
||||
*/
|
||||
@@ -149,6 +136,82 @@ async function getLanguages() {
|
||||
return languages;
|
||||
}
|
||||
exports.getLanguages = getLanguages;
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
async function getCommitOid() {
|
||||
let commitOid = '';
|
||||
await exec.exec('git', ['rev-parse', 'HEAD'], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => { commitOid += data.toString(); },
|
||||
stderr: (data) => { process.stderr.write(data); }
|
||||
}
|
||||
});
|
||||
return commitOid.trim();
|
||||
}
|
||||
exports.getCommitOid = getCommitOid;
|
||||
/**
|
||||
* Get the path of the currently executing workflow.
|
||||
*/
|
||||
async function getWorkflowPath() {
|
||||
const repo_nwo = getRequiredEnvParam('GITHUB_REPOSITORY').split("/");
|
||||
const owner = repo_nwo[0];
|
||||
const repo = repo_nwo[1];
|
||||
const run_id = getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ok = new octokit.Octokit({
|
||||
auth: core.getInput('token'),
|
||||
userAgent: "CodeQL Action",
|
||||
log: console_log_level_1.default({ level: 'debug' })
|
||||
});
|
||||
const runsResponse = await ok.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
|
||||
owner,
|
||||
repo,
|
||||
run_id
|
||||
});
|
||||
const workflowUrl = runsResponse.data.workflow_url;
|
||||
const workflowResponse = await ok.request('GET ' + workflowUrl);
|
||||
return workflowResponse.data.path;
|
||||
}
|
||||
/**
|
||||
* Get the analysis key paramter for the current job.
|
||||
*
|
||||
* This will combine the workflow path and current job name.
|
||||
* Computing this the first time requires making requests to
|
||||
* the github API, but after that the result will be cached.
|
||||
*/
|
||||
async function getAnalysisKey() {
|
||||
let analysisKey = process.env[sharedEnv.CODEQL_ACTION_ANALYSIS_KEY];
|
||||
if (analysisKey !== undefined) {
|
||||
return analysisKey;
|
||||
}
|
||||
const workflowPath = await getWorkflowPath();
|
||||
const jobName = getRequiredEnvParam('GITHUB_JOB');
|
||||
analysisKey = workflowPath + ':' + jobName;
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_ANALYSIS_KEY, analysisKey);
|
||||
return analysisKey;
|
||||
}
|
||||
exports.getAnalysisKey = getAnalysisKey;
|
||||
/**
|
||||
* Get the ref currently being analyzed.
|
||||
*/
|
||||
function getRef() {
|
||||
// Will be in the form "refs/heads/master" on a push event
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const ref = getRequiredEnvParam('GITHUB_REF');
|
||||
// For pull request refs we want to convert from the 'merge' ref
|
||||
// to the 'head' ref, as that is what we want to analyse.
|
||||
// There should have been some code earlier in the workflow to do
|
||||
// the checkout, but we have no way of verifying that here.
|
||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||
if (pull_ref_regex.test(ref)) {
|
||||
return ref.replace(pull_ref_regex, 'refs/pull/$1/head');
|
||||
}
|
||||
else {
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
exports.getRef = getRef;
|
||||
/**
|
||||
* Compose a StatusReport.
|
||||
*
|
||||
@@ -159,6 +222,7 @@ exports.getLanguages = getLanguages;
|
||||
*/
|
||||
async function createStatusReport(actionName, status, cause, exception) {
|
||||
const commitOid = process.env['GITHUB_SHA'] || '';
|
||||
const ref = getRef();
|
||||
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
|
||||
let workflowRunID = -1;
|
||||
if (workflowRunIDStr) {
|
||||
@@ -175,6 +239,7 @@ async function createStatusReport(actionName, status, cause, exception) {
|
||||
job_name: jobName,
|
||||
languages: languages,
|
||||
commit_oid: commitOid,
|
||||
ref: ref,
|
||||
action_name: actionName,
|
||||
action_oid: "unknown",
|
||||
started_at: startedAt,
|
||||
@@ -280,3 +345,13 @@ function getToolNames(sarifContents) {
|
||||
return Object.keys(toolNames);
|
||||
}
|
||||
exports.getToolNames = getToolNames;
|
||||
// Creates a random temporary directory, runs the given body, and then deletes the directory.
|
||||
// Mostly intended for use within tests.
|
||||
async function withTmpDir(body) {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
|
||||
const result = await body(tmpDir);
|
||||
fs.rmdirSync(tmpDir, { recursive: true });
|
||||
return result;
|
||||
}
|
||||
exports.withTmpDir = withTmpDir;
|
||||
//# sourceMappingURL=util.js.map
|
||||
1
lib/util.js.map
Normal file
1
lib/util.js.map
Normal file
File diff suppressed because one or more lines are too long
21
lib/util.test.js
generated
Normal file
21
lib/util.test.js
generated
Normal file
@@ -0,0 +1,21 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const util = __importStar(require("./util"));
|
||||
ava_1.default('getToolNames', t => {
|
||||
const input = fs.readFileSync(__dirname + '/../src/testdata/tool-names.sarif', 'utf8');
|
||||
const toolNames = util.getToolNames(input);
|
||||
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
|
||||
});
|
||||
//# sourceMappingURL=util.test.js.map
|
||||
1
lib/util.test.js.map
Normal file
1
lib/util.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"util.test.js","sourceRoot":"","sources":["../src/util.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AAEzB,6CAA+B;AAE/B,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACvB,MAAM,KAAK,GAAG,EAAE,CAAC,YAAY,CAAC,SAAS,GAAG,mCAAmC,EAAE,MAAM,CAAC,CAAC;IACvF,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,+BAA+B,EAAE,QAAQ,CAAC,CAAC,CAAC;AACtE,CAAC,CAAC,CAAC"}
|
||||
1
node_modules/.bin/atob
generated
vendored
1
node_modules/.bin/atob
generated
vendored
@@ -1 +0,0 @@
|
||||
../atob/bin/atob.js
|
||||
1
node_modules/.bin/ava
generated
vendored
Symbolic link
1
node_modules/.bin/ava
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../ava/cli.js
|
||||
1
node_modules/.bin/escodegen
generated
vendored
1
node_modules/.bin/escodegen
generated
vendored
@@ -1 +0,0 @@
|
||||
../escodegen/bin/escodegen.js
|
||||
1
node_modules/.bin/esgenerate
generated
vendored
1
node_modules/.bin/esgenerate
generated
vendored
@@ -1 +0,0 @@
|
||||
../escodegen/bin/esgenerate.js
|
||||
1
node_modules/.bin/esparse
generated
vendored
1
node_modules/.bin/esparse
generated
vendored
@@ -1 +0,0 @@
|
||||
../esprima/bin/esparse.js
|
||||
1
node_modules/.bin/esvalidate
generated
vendored
1
node_modules/.bin/esvalidate
generated
vendored
@@ -1 +0,0 @@
|
||||
../esprima/bin/esvalidate.js
|
||||
1
node_modules/.bin/jest
generated
vendored
1
node_modules/.bin/jest
generated
vendored
@@ -1 +0,0 @@
|
||||
../jest/bin/jest.js
|
||||
1
node_modules/.bin/jest-runtime
generated
vendored
1
node_modules/.bin/jest-runtime
generated
vendored
@@ -1 +0,0 @@
|
||||
../jest-runtime/bin/jest-runtime.js
|
||||
1
node_modules/.bin/jsesc
generated
vendored
1
node_modules/.bin/jsesc
generated
vendored
@@ -1 +0,0 @@
|
||||
../jsesc/bin/jsesc
|
||||
1
node_modules/.bin/json5
generated
vendored
1
node_modules/.bin/json5
generated
vendored
@@ -1 +0,0 @@
|
||||
../json5/lib/cli.js
|
||||
1
node_modules/.bin/parser
generated
vendored
1
node_modules/.bin/parser
generated
vendored
@@ -1 +0,0 @@
|
||||
../@babel/parser/bin/babel-parser.js
|
||||
1
node_modules/.bin/rc
generated
vendored
Symbolic link
1
node_modules/.bin/rc
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../rc/cli.js
|
||||
1
node_modules/.bin/removeNPMAbsolutePaths
generated
vendored
Symbolic link
1
node_modules/.bin/removeNPMAbsolutePaths
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../removeNPMAbsolutePaths/bin/removeNPMAbsolutePaths
|
||||
1
node_modules/.bin/sane
generated
vendored
1
node_modules/.bin/sane
generated
vendored
@@ -1 +0,0 @@
|
||||
../sane/src/cli.js
|
||||
2
node_modules/.bin/semver
generated
vendored
2
node_modules/.bin/semver
generated
vendored
@@ -1 +1 @@
|
||||
../semver/bin/semver
|
||||
../semver/bin/semver.js
|
||||
1
node_modules/.bin/sshpk-conv
generated
vendored
1
node_modules/.bin/sshpk-conv
generated
vendored
@@ -1 +0,0 @@
|
||||
../sshpk/bin/sshpk-conv
|
||||
1
node_modules/.bin/sshpk-sign
generated
vendored
1
node_modules/.bin/sshpk-sign
generated
vendored
@@ -1 +0,0 @@
|
||||
../sshpk/bin/sshpk-sign
|
||||
1
node_modules/.bin/sshpk-verify
generated
vendored
1
node_modules/.bin/sshpk-verify
generated
vendored
@@ -1 +0,0 @@
|
||||
../sshpk/bin/sshpk-verify
|
||||
1
node_modules/.bin/ts-jest
generated
vendored
1
node_modules/.bin/ts-jest
generated
vendored
@@ -1 +0,0 @@
|
||||
../ts-jest/cli.js
|
||||
1
node_modules/.bin/watch
generated
vendored
1
node_modules/.bin/watch
generated
vendored
@@ -1 +0,0 @@
|
||||
../@cnakazawa/watch/cli.js
|
||||
8
node_modules/@actions/http-client/README.md
generated
vendored
8
node_modules/@actions/http-client/README.md
generated
vendored
@@ -18,6 +18,8 @@ A lightweight HTTP client optimized for use with actions, TypeScript with generi
|
||||
- Basic, Bearer and PAT Support out of the box. Extensible handlers for others.
|
||||
- Redirects supported
|
||||
|
||||
Features and releases [here](./RELEASES.md)
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
@@ -49,7 +51,11 @@ export NODE_DEBUG=http
|
||||
|
||||
## Node support
|
||||
|
||||
The http-client is built using the latest LTS version of Node 12. We also support the latest LTS for Node 6, 8 and Node 10.
|
||||
The http-client is built using the latest LTS version of Node 12. It may work on previous node LTS versions but it's tested and officially supported on Node12+.
|
||||
|
||||
## Support and Versioning
|
||||
|
||||
We follow semver and will hold compatibility between major versions and increment the minor version with new features and capabilities (while holding compat).
|
||||
|
||||
## Contributing
|
||||
|
||||
|
||||
16
node_modules/@actions/http-client/RELEASES.md
generated
vendored
Normal file
16
node_modules/@actions/http-client/RELEASES.md
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
## Releases
|
||||
|
||||
## 1.0.7
|
||||
Update NPM dependencies and add 429 to the list of HttpCodes
|
||||
|
||||
## 1.0.6
|
||||
Automatically sends Content-Type and Accept application/json headers for \<verb>Json() helper methods if not set in the client or parameters.
|
||||
|
||||
## 1.0.5
|
||||
Adds \<verb>Json() helper methods for json over http scenarios.
|
||||
|
||||
## 1.0.4
|
||||
Started to add \<verb>Json() helper methods. Do not use this release for that. Use >= 1.0.5 since there was an issue with types.
|
||||
|
||||
## 1.0.1 to 1.0.3
|
||||
Adds proxy support.
|
||||
7
node_modules/@actions/http-client/auth.js
generated
vendored
7
node_modules/@actions/http-client/auth.js
generated
vendored
@@ -6,7 +6,9 @@ class BasicCredentialHandler {
|
||||
this.password = password;
|
||||
}
|
||||
prepareRequest(options) {
|
||||
options.headers['Authorization'] = 'Basic ' + Buffer.from(this.username + ':' + this.password).toString('base64');
|
||||
options.headers['Authorization'] =
|
||||
'Basic ' +
|
||||
Buffer.from(this.username + ':' + this.password).toString('base64');
|
||||
}
|
||||
// This handler cannot handle 401
|
||||
canHandleAuthentication(response) {
|
||||
@@ -42,7 +44,8 @@ class PersonalAccessTokenCredentialHandler {
|
||||
// currently implements pre-authorization
|
||||
// TODO: support preAuth = false where it hooks on 401
|
||||
prepareRequest(options) {
|
||||
options.headers['Authorization'] = 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
||||
options.headers['Authorization'] =
|
||||
'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
||||
}
|
||||
// This handler cannot handle 401
|
||||
canHandleAuthentication(response) {
|
||||
|
||||
24
node_modules/@actions/http-client/index.d.ts
generated
vendored
24
node_modules/@actions/http-client/index.d.ts
generated
vendored
@@ -1,5 +1,5 @@
|
||||
/// <reference types="node" />
|
||||
import http = require("http");
|
||||
import http = require('http');
|
||||
import ifm = require('./interfaces');
|
||||
export declare enum HttpCodes {
|
||||
OK = 200,
|
||||
@@ -23,12 +23,20 @@ export declare enum HttpCodes {
|
||||
RequestTimeout = 408,
|
||||
Conflict = 409,
|
||||
Gone = 410,
|
||||
TooManyRequests = 429,
|
||||
InternalServerError = 500,
|
||||
NotImplemented = 501,
|
||||
BadGateway = 502,
|
||||
ServiceUnavailable = 503,
|
||||
GatewayTimeout = 504
|
||||
}
|
||||
export declare enum Headers {
|
||||
Accept = "accept",
|
||||
ContentType = "content-type"
|
||||
}
|
||||
export declare enum MediaTypes {
|
||||
ApplicationJson = "application/json"
|
||||
}
|
||||
/**
|
||||
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
@@ -39,11 +47,6 @@ export declare class HttpClientResponse implements ifm.IHttpClientResponse {
|
||||
message: http.IncomingMessage;
|
||||
readBody(): Promise<string>;
|
||||
}
|
||||
export interface ITypedResponse<T> {
|
||||
statusCode: number;
|
||||
result: T | null;
|
||||
headers: Object;
|
||||
}
|
||||
export declare function isHttps(requestUrl: string): boolean;
|
||||
export declare class HttpClient {
|
||||
userAgent: string | undefined;
|
||||
@@ -73,10 +76,10 @@ export declare class HttpClient {
|
||||
* Gets a typed object from an endpoint
|
||||
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||
*/
|
||||
getJson<T>(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
||||
postJson<T>(requestUrl: string, obj: T, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
||||
putJson<T>(requestUrl: string, obj: T, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
||||
patchJson<T>(requestUrl: string, obj: T, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
||||
getJson<T>(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
postJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
putJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
patchJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
/**
|
||||
* Makes a raw http request.
|
||||
* All other methods such as get, post, patch, and request ultimately call this.
|
||||
@@ -108,6 +111,7 @@ export declare class HttpClient {
|
||||
getAgent(serverUrl: string): http.Agent;
|
||||
private _prepareRequest;
|
||||
private _mergeHeaders;
|
||||
private _getExistingOrDefaultHeader;
|
||||
private _getAgent;
|
||||
private _performExponentialBackoff;
|
||||
private static dateTimeDeserializer;
|
||||
|
||||
111
node_modules/@actions/http-client/index.js
generated
vendored
111
node_modules/@actions/http-client/index.js
generated
vendored
@@ -28,12 +28,22 @@ var HttpCodes;
|
||||
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
||||
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
||||
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
||||
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
|
||||
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
||||
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
||||
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
||||
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
||||
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
||||
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
||||
var Headers;
|
||||
(function (Headers) {
|
||||
Headers["Accept"] = "accept";
|
||||
Headers["ContentType"] = "content-type";
|
||||
})(Headers = exports.Headers || (exports.Headers = {}));
|
||||
var MediaTypes;
|
||||
(function (MediaTypes) {
|
||||
MediaTypes["ApplicationJson"] = "application/json";
|
||||
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
||||
/**
|
||||
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
@@ -43,8 +53,18 @@ function getProxyUrl(serverUrl) {
|
||||
return proxyUrl ? proxyUrl.href : '';
|
||||
}
|
||||
exports.getProxyUrl = getProxyUrl;
|
||||
const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect];
|
||||
const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout];
|
||||
const HttpRedirectCodes = [
|
||||
HttpCodes.MovedPermanently,
|
||||
HttpCodes.ResourceMoved,
|
||||
HttpCodes.SeeOther,
|
||||
HttpCodes.TemporaryRedirect,
|
||||
HttpCodes.PermanentRedirect
|
||||
];
|
||||
const HttpResponseRetryCodes = [
|
||||
HttpCodes.BadGateway,
|
||||
HttpCodes.ServiceUnavailable,
|
||||
HttpCodes.GatewayTimeout
|
||||
];
|
||||
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
||||
const ExponentialBackoffCeiling = 10;
|
||||
const ExponentialBackoffTimeSlice = 5;
|
||||
@@ -136,22 +156,29 @@ class HttpClient {
|
||||
* Gets a typed object from an endpoint
|
||||
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||
*/
|
||||
async getJson(requestUrl, additionalHeaders) {
|
||||
async getJson(requestUrl, additionalHeaders = {}) {
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
let res = await this.get(requestUrl, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
async postJson(requestUrl, obj, additionalHeaders) {
|
||||
async postJson(requestUrl, obj, additionalHeaders = {}) {
|
||||
let data = JSON.stringify(obj, null, 2);
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||
let res = await this.post(requestUrl, data, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
async putJson(requestUrl, obj, additionalHeaders) {
|
||||
async putJson(requestUrl, obj, additionalHeaders = {}) {
|
||||
let data = JSON.stringify(obj, null, 2);
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||
let res = await this.put(requestUrl, data, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
async patchJson(requestUrl, obj, additionalHeaders) {
|
||||
async patchJson(requestUrl, obj, additionalHeaders = {}) {
|
||||
let data = JSON.stringify(obj, null, 2);
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||
let res = await this.patch(requestUrl, data, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
@@ -162,18 +189,22 @@ class HttpClient {
|
||||
*/
|
||||
async request(verb, requestUrl, data, headers) {
|
||||
if (this._disposed) {
|
||||
throw new Error("Client has already been disposed.");
|
||||
throw new Error('Client has already been disposed.');
|
||||
}
|
||||
let parsedUrl = url.parse(requestUrl);
|
||||
let info = this._prepareRequest(verb, parsedUrl, headers);
|
||||
// Only perform retries on reads since writes may not be idempotent.
|
||||
let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1;
|
||||
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
|
||||
? this._maxRetries + 1
|
||||
: 1;
|
||||
let numTries = 0;
|
||||
let response;
|
||||
while (numTries < maxTries) {
|
||||
response = await this.requestRaw(info, data);
|
||||
// Check if it's an authentication challenge
|
||||
if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) {
|
||||
if (response &&
|
||||
response.message &&
|
||||
response.message.statusCode === HttpCodes.Unauthorized) {
|
||||
let authenticationHandler;
|
||||
for (let i = 0; i < this.handlers.length; i++) {
|
||||
if (this.handlers[i].canHandleAuthentication(response)) {
|
||||
@@ -191,21 +222,32 @@ class HttpClient {
|
||||
}
|
||||
}
|
||||
let redirectsRemaining = this._maxRedirects;
|
||||
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1
|
||||
&& this._allowRedirects
|
||||
&& redirectsRemaining > 0) {
|
||||
const redirectUrl = response.message.headers["location"];
|
||||
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
|
||||
this._allowRedirects &&
|
||||
redirectsRemaining > 0) {
|
||||
const redirectUrl = response.message.headers['location'];
|
||||
if (!redirectUrl) {
|
||||
// if there's no location to redirect to, we won't
|
||||
break;
|
||||
}
|
||||
let parsedRedirectUrl = url.parse(redirectUrl);
|
||||
if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) {
|
||||
throw new Error("Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.");
|
||||
if (parsedUrl.protocol == 'https:' &&
|
||||
parsedUrl.protocol != parsedRedirectUrl.protocol &&
|
||||
!this._allowRedirectDowngrade) {
|
||||
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
||||
}
|
||||
// we need to finish reading the response before reassigning response
|
||||
// which will leak the open socket.
|
||||
await response.readBody();
|
||||
// strip authorization header if redirected to a different hostname
|
||||
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
||||
for (let header in headers) {
|
||||
// header names are case insensitive
|
||||
if (header.toLowerCase() === 'authorization') {
|
||||
delete headers[header];
|
||||
}
|
||||
}
|
||||
}
|
||||
// let's make the request with the new redirectUrl
|
||||
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
||||
response = await this.requestRaw(info, data);
|
||||
@@ -256,8 +298,8 @@ class HttpClient {
|
||||
*/
|
||||
requestRawWithCallback(info, data, onResult) {
|
||||
let socket;
|
||||
if (typeof (data) === 'string') {
|
||||
info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8');
|
||||
if (typeof data === 'string') {
|
||||
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
||||
}
|
||||
let callbackCalled = false;
|
||||
let handleResult = (err, res) => {
|
||||
@@ -270,7 +312,7 @@ class HttpClient {
|
||||
let res = new HttpClientResponse(msg);
|
||||
handleResult(null, res);
|
||||
});
|
||||
req.on('socket', (sock) => {
|
||||
req.on('socket', sock => {
|
||||
socket = sock;
|
||||
});
|
||||
// If we ever get disconnected, we want the socket to timeout eventually
|
||||
@@ -285,10 +327,10 @@ class HttpClient {
|
||||
// res should have headers
|
||||
handleResult(err, null);
|
||||
});
|
||||
if (data && typeof (data) === 'string') {
|
||||
if (data && typeof data === 'string') {
|
||||
req.write(data, 'utf8');
|
||||
}
|
||||
if (data && typeof (data) !== 'string') {
|
||||
if (data && typeof data !== 'string') {
|
||||
data.on('close', function () {
|
||||
req.end();
|
||||
});
|
||||
@@ -315,29 +357,40 @@ class HttpClient {
|
||||
const defaultPort = usingSsl ? 443 : 80;
|
||||
info.options = {};
|
||||
info.options.host = info.parsedUrl.hostname;
|
||||
info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort;
|
||||
info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||||
info.options.port = info.parsedUrl.port
|
||||
? parseInt(info.parsedUrl.port)
|
||||
: defaultPort;
|
||||
info.options.path =
|
||||
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||||
info.options.method = method;
|
||||
info.options.headers = this._mergeHeaders(headers);
|
||||
if (this.userAgent != null) {
|
||||
info.options.headers["user-agent"] = this.userAgent;
|
||||
info.options.headers['user-agent'] = this.userAgent;
|
||||
}
|
||||
info.options.agent = this._getAgent(info.parsedUrl);
|
||||
// gives handlers an opportunity to participate
|
||||
if (this.handlers) {
|
||||
this.handlers.forEach((handler) => {
|
||||
this.handlers.forEach(handler => {
|
||||
handler.prepareRequest(info.options);
|
||||
});
|
||||
}
|
||||
return info;
|
||||
}
|
||||
_mergeHeaders(headers) {
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {});
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||
if (this.requestOptions && this.requestOptions.headers) {
|
||||
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
||||
}
|
||||
return lowercaseKeys(headers || {});
|
||||
}
|
||||
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||
let clientHeader;
|
||||
if (this.requestOptions && this.requestOptions.headers) {
|
||||
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
||||
}
|
||||
return additionalHeaders[header] || clientHeader || _default;
|
||||
}
|
||||
_getAgent(parsedUrl) {
|
||||
let agent;
|
||||
let proxyUrl = pm.getProxyUrl(parsedUrl);
|
||||
@@ -369,7 +422,7 @@ class HttpClient {
|
||||
proxyAuth: proxyUrl.auth,
|
||||
host: proxyUrl.hostname,
|
||||
port: proxyUrl.port
|
||||
},
|
||||
}
|
||||
};
|
||||
let tunnelAgent;
|
||||
const overHttps = proxyUrl.protocol === 'https:';
|
||||
@@ -396,7 +449,9 @@ class HttpClient {
|
||||
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||
// we have to cast it to any and change it directly
|
||||
agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false });
|
||||
agent.options = Object.assign(agent.options || {}, {
|
||||
rejectUnauthorized: false
|
||||
});
|
||||
}
|
||||
return agent;
|
||||
}
|
||||
@@ -457,7 +512,7 @@ class HttpClient {
|
||||
msg = contents;
|
||||
}
|
||||
else {
|
||||
msg = "Failed request: (" + statusCode + ")";
|
||||
msg = 'Failed request: (' + statusCode + ')';
|
||||
}
|
||||
let err = new Error(msg);
|
||||
// attach statusCode and body obj (if available) to the error object
|
||||
|
||||
9
node_modules/@actions/http-client/interfaces.d.ts
generated
vendored
9
node_modules/@actions/http-client/interfaces.d.ts
generated
vendored
@@ -1,6 +1,6 @@
|
||||
/// <reference types="node" />
|
||||
import http = require("http");
|
||||
import url = require("url");
|
||||
import http = require('http');
|
||||
import url = require('url');
|
||||
export interface IHeaders {
|
||||
[key: string]: any;
|
||||
}
|
||||
@@ -43,3 +43,8 @@ export interface IRequestOptions {
|
||||
allowRetries?: boolean;
|
||||
maxRetries?: number;
|
||||
}
|
||||
export interface ITypedResponse<T> {
|
||||
statusCode: number;
|
||||
result: T | null;
|
||||
headers: Object;
|
||||
}
|
||||
|
||||
1
node_modules/@actions/http-client/interfaces.js
generated
vendored
1
node_modules/@actions/http-client/interfaces.js
generated
vendored
@@ -1,3 +1,2 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
;
|
||||
|
||||
39
node_modules/@actions/http-client/node_modules/tunnel/package.json
generated
vendored
39
node_modules/@actions/http-client/node_modules/tunnel/package.json
generated
vendored
@@ -1,25 +1,7 @@
|
||||
{
|
||||
"author": {
|
||||
"name": "Koichi Kobayashi",
|
||||
"email": "koichik@improvement.jp"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/koichik/node-tunnel/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"deprecated": false,
|
||||
"name": "tunnel",
|
||||
"version": "0.0.6",
|
||||
"description": "Node HTTP/HTTPS Agents for tunneling proxies",
|
||||
"devDependencies": {
|
||||
"mocha": "^5.2.0",
|
||||
"should": "^13.2.3"
|
||||
},
|
||||
"directories": {
|
||||
"lib": "./lib"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
||||
},
|
||||
"homepage": "https://github.com/koichik/node-tunnel/",
|
||||
"keywords": [
|
||||
"http",
|
||||
"https",
|
||||
@@ -27,15 +9,26 @@
|
||||
"proxy",
|
||||
"tunnel"
|
||||
],
|
||||
"homepage": "https://github.com/koichik/node-tunnel/",
|
||||
"bugs": "https://github.com/koichik/node-tunnel/issues",
|
||||
"license": "MIT",
|
||||
"author": "Koichi Kobayashi <koichik@improvement.jp>",
|
||||
"main": "./index.js",
|
||||
"name": "tunnel",
|
||||
"directories": {
|
||||
"lib": "./lib"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/koichik/node-tunnel.git"
|
||||
"url": "https://github.com/koichik/node-tunnel.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha"
|
||||
},
|
||||
"version": "0.0.6"
|
||||
"devDependencies": {
|
||||
"mocha": "^5.2.0",
|
||||
"should": "^13.2.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
||||
}
|
||||
}
|
||||
62
node_modules/@actions/http-client/package.json
generated
vendored
62
node_modules/@actions/http-client/package.json
generated
vendored
@@ -1,39 +1,39 @@
|
||||
{
|
||||
"author": {
|
||||
"name": "GitHub, Inc."
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/http-client/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {
|
||||
"tunnel": "0.0.6"
|
||||
},
|
||||
"deprecated": false,
|
||||
"description": "Actions Http Client",
|
||||
"devDependencies": {
|
||||
"@types/jest": "^24.0.25",
|
||||
"@types/node": "^12.12.24",
|
||||
"jest": "^24.9.0",
|
||||
"proxy": "^1.0.1",
|
||||
"ts-jest": "^24.3.0",
|
||||
"typescript": "^3.7.4"
|
||||
},
|
||||
"homepage": "https://github.com/actions/http-client#readme",
|
||||
"keywords": [
|
||||
"Actions",
|
||||
"Http"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "index.js",
|
||||
"name": "@actions/http-client",
|
||||
"version": "1.0.8",
|
||||
"description": "Actions Http Client",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"build": "rm -Rf ./_out && tsc && cp package*.json ./_out && cp *.md ./_out && cp LICENSE ./_out && cp actions.png ./_out",
|
||||
"test": "jest",
|
||||
"format": "prettier --write *.ts && prettier --write **/*.ts",
|
||||
"format-check": "prettier --check *.ts && prettier --check **/*.ts",
|
||||
"audit-check": "npm audit --audit-level=moderate"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/http-client.git"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "rm -Rf ./_out && tsc && cp package*.json ./_out && cp *.md ./_out && cp LICENSE ./_out && cp actions.png ./_out",
|
||||
"test": "jest"
|
||||
"keywords": [
|
||||
"Actions",
|
||||
"Http"
|
||||
],
|
||||
"author": "GitHub, Inc.",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/http-client/issues"
|
||||
},
|
||||
"version": "1.0.4"
|
||||
"homepage": "https://github.com/actions/http-client#readme",
|
||||
"devDependencies": {
|
||||
"@types/jest": "^25.1.4",
|
||||
"@types/node": "^12.12.31",
|
||||
"jest": "^25.1.0",
|
||||
"prettier": "^2.0.4",
|
||||
"proxy": "^1.0.1",
|
||||
"ts-jest": "^25.2.1",
|
||||
"typescript": "^3.8.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"tunnel": "0.0.6"
|
||||
}
|
||||
}
|
||||
13
node_modules/@actions/http-client/proxy.js
generated
vendored
13
node_modules/@actions/http-client/proxy.js
generated
vendored
@@ -9,12 +9,10 @@ function getProxyUrl(reqUrl) {
|
||||
}
|
||||
let proxyVar;
|
||||
if (usingSsl) {
|
||||
proxyVar = process.env["https_proxy"] ||
|
||||
process.env["HTTPS_PROXY"];
|
||||
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];
|
||||
}
|
||||
else {
|
||||
proxyVar = process.env["http_proxy"] ||
|
||||
process.env["HTTP_PROXY"];
|
||||
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
||||
}
|
||||
if (proxyVar) {
|
||||
proxyUrl = url.parse(proxyVar);
|
||||
@@ -26,7 +24,7 @@ function checkBypass(reqUrl) {
|
||||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
let noProxy = process.env["no_proxy"] || process.env["NO_PROXY"] || '';
|
||||
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
}
|
||||
@@ -47,7 +45,10 @@ function checkBypass(reqUrl) {
|
||||
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
||||
}
|
||||
// Compare request host against noproxy
|
||||
for (let upperNoProxyItem of noProxy.split(',').map(x => x.trim().toUpperCase()).filter(x => x)) {
|
||||
for (let upperNoProxyItem of noProxy
|
||||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
8
node_modules/@actions/tool-cache/README.md
generated
vendored
8
node_modules/@actions/tool-cache/README.md
generated
vendored
@@ -22,11 +22,11 @@ These can then be extracted in platform specific ways:
|
||||
const tc = require('@actions/tool-cache');
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
const node12Path = tc.downloadTool('https://nodejs.org/dist/v12.7.0/node-v12.7.0-win-x64.zip');
|
||||
const node12Path = await tc.downloadTool('https://nodejs.org/dist/v12.7.0/node-v12.7.0-win-x64.zip');
|
||||
const node12ExtractedFolder = await tc.extractZip(node12Path, 'path/to/extract/to');
|
||||
|
||||
// Or alternately
|
||||
const node12Path = tc.downloadTool('https://nodejs.org/dist/v12.7.0/node-v12.7.0-win-x64.7z');
|
||||
const node12Path = await tc.downloadTool('https://nodejs.org/dist/v12.7.0/node-v12.7.0-win-x64.7z');
|
||||
const node12ExtractedFolder = await tc.extract7z(node12Path, 'path/to/extract/to');
|
||||
}
|
||||
else {
|
||||
@@ -37,7 +37,7 @@ else {
|
||||
|
||||
#### Cache
|
||||
|
||||
Finally, you can cache these directories in our tool-cache. This is useful if you want to switch back and forth between versions of a tool, or save a tool between runs for private runners (private runners are still in development but are on the roadmap).
|
||||
Finally, you can cache these directories in our tool-cache. This is useful if you want to switch back and forth between versions of a tool, or save a tool between runs for self-hosted runners.
|
||||
|
||||
You'll often want to add it to the path as part of this step:
|
||||
|
||||
@@ -57,7 +57,7 @@ You can also cache files for reuse.
|
||||
```js
|
||||
const tc = require('@actions/tool-cache');
|
||||
|
||||
tc.cacheFile('path/to/exe', 'destFileName.exe', 'myExeName', '1.1.0');
|
||||
const cachedPath = await tc.cacheFile('path/to/exe', 'destFileName.exe', 'myExeName', '1.1.0');
|
||||
```
|
||||
|
||||
#### Find
|
||||
|
||||
16
node_modules/@actions/tool-cache/lib/manifest.d.ts
generated
vendored
Normal file
16
node_modules/@actions/tool-cache/lib/manifest.d.ts
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
export interface IToolReleaseFile {
|
||||
filename: string;
|
||||
platform: string;
|
||||
platform_version?: string;
|
||||
arch: string;
|
||||
download_url: string;
|
||||
}
|
||||
export interface IToolRelease {
|
||||
version: string;
|
||||
stable: boolean;
|
||||
release_url: string;
|
||||
files: IToolReleaseFile[];
|
||||
}
|
||||
export declare function _findMatch(versionSpec: string, stable: boolean, candidates: IToolRelease[], archFilter: string): Promise<IToolRelease | undefined>;
|
||||
export declare function _getOsVersion(): string;
|
||||
export declare function _readLinuxVersionFile(): string;
|
||||
106
node_modules/@actions/tool-cache/lib/manifest.js
generated
vendored
Normal file
106
node_modules/@actions/tool-cache/lib/manifest.js
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const semver = __importStar(require("semver"));
|
||||
const core_1 = require("@actions/core");
|
||||
// needs to be require for core node modules to be mocked
|
||||
/* eslint @typescript-eslint/no-require-imports: 0 */
|
||||
const os = require("os");
|
||||
const cp = require("child_process");
|
||||
const fs = require("fs");
|
||||
function _findMatch(versionSpec, stable, candidates, archFilter) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const platFilter = os.platform();
|
||||
let result;
|
||||
let match;
|
||||
let file;
|
||||
for (const candidate of candidates) {
|
||||
const version = candidate.version;
|
||||
core_1.debug(`check ${version} satisfies ${versionSpec}`);
|
||||
if (semver.satisfies(version, versionSpec) &&
|
||||
(!stable || candidate.stable === stable)) {
|
||||
file = candidate.files.find(item => {
|
||||
core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);
|
||||
let chk = item.arch === archFilter && item.platform === platFilter;
|
||||
if (chk && item.platform_version) {
|
||||
const osVersion = module.exports._getOsVersion();
|
||||
if (osVersion === item.platform_version) {
|
||||
chk = true;
|
||||
}
|
||||
else {
|
||||
chk = semver.satisfies(osVersion, item.platform_version);
|
||||
}
|
||||
}
|
||||
return chk;
|
||||
});
|
||||
if (file) {
|
||||
core_1.debug(`matched ${candidate.version}`);
|
||||
match = candidate;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (match && file) {
|
||||
// clone since we're mutating the file list to be only the file that matches
|
||||
result = Object.assign({}, match);
|
||||
result.files = [file];
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
exports._findMatch = _findMatch;
|
||||
function _getOsVersion() {
|
||||
// TODO: add windows and other linux, arm variants
|
||||
// right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python)
|
||||
const plat = os.platform();
|
||||
let version = '';
|
||||
if (plat === 'darwin') {
|
||||
version = cp.execSync('sw_vers -productVersion').toString();
|
||||
}
|
||||
else if (plat === 'linux') {
|
||||
// lsb_release process not in some containers, readfile
|
||||
// Run cat /etc/lsb-release
|
||||
// DISTRIB_ID=Ubuntu
|
||||
// DISTRIB_RELEASE=18.04
|
||||
// DISTRIB_CODENAME=bionic
|
||||
// DISTRIB_DESCRIPTION="Ubuntu 18.04.4 LTS"
|
||||
const lsbContents = module.exports._readLinuxVersionFile();
|
||||
if (lsbContents) {
|
||||
const lines = lsbContents.split('\n');
|
||||
for (const line of lines) {
|
||||
const parts = line.split('=');
|
||||
if (parts.length === 2 && parts[0].trim() === 'DISTRIB_RELEASE') {
|
||||
version = parts[1].trim();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return version;
|
||||
}
|
||||
exports._getOsVersion = _getOsVersion;
|
||||
function _readLinuxVersionFile() {
|
||||
const lsbFile = '/etc/lsb-release';
|
||||
let contents = '';
|
||||
if (fs.existsSync(lsbFile)) {
|
||||
contents = fs.readFileSync(lsbFile).toString();
|
||||
}
|
||||
return contents;
|
||||
}
|
||||
exports._readLinuxVersionFile = _readLinuxVersionFile;
|
||||
//# sourceMappingURL=manifest.js.map
|
||||
1
node_modules/@actions/tool-cache/lib/manifest.js.map
generated
vendored
Normal file
1
node_modules/@actions/tool-cache/lib/manifest.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"manifest.js","sourceRoot":"","sources":["../src/manifest.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,+CAAgC;AAChC,wCAAmC;AAEnC,yDAAyD;AACzD,qDAAqD;AAErD,yBAAyB;AACzB,oCAAoC;AACpC,yBAAyB;AAqDzB,SAAsB,UAAU,CAC9B,WAAmB,EACnB,MAAe,EACf,UAA0B,EAC1B,UAAkB;;QAElB,MAAM,UAAU,GAAG,EAAE,CAAC,QAAQ,EAAE,CAAA;QAEhC,IAAI,MAAgC,CAAA;QACpC,IAAI,KAA+B,CAAA;QAEnC,IAAI,IAAkC,CAAA;QACtC,KAAK,MAAM,SAAS,IAAI,UAAU,EAAE;YAClC,MAAM,OAAO,GAAG,SAAS,CAAC,OAAO,CAAA;YAEjC,YAAK,CAAC,SAAS,OAAO,cAAc,WAAW,EAAE,CAAC,CAAA;YAClD,IACE,MAAM,CAAC,SAAS,CAAC,OAAO,EAAE,WAAW,CAAC;gBACtC,CAAC,CAAC,MAAM,IAAI,SAAS,CAAC,MAAM,KAAK,MAAM,CAAC,EACxC;gBACA,IAAI,GAAG,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;oBACjC,YAAK,CACH,GAAG,IAAI,CAAC,IAAI,MAAM,UAAU,OAAO,IAAI,CAAC,QAAQ,MAAM,UAAU,EAAE,CACnE,CAAA;oBAED,IAAI,GAAG,GAAG,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,IAAI,CAAC,QAAQ,KAAK,UAAU,CAAA;oBAClE,IAAI,GAAG,IAAI,IAAI,CAAC,gBAAgB,EAAE;wBAChC,MAAM,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC,aAAa,EAAE,CAAA;wBAEhD,IAAI,SAAS,KAAK,IAAI,CAAC,gBAAgB,EAAE;4BACvC,GAAG,GAAG,IAAI,CAAA;yBACX;6BAAM;4BACL,GAAG,GAAG,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAA;yBACzD;qBACF;oBAED,OAAO,GAAG,CAAA;gBACZ,CAAC,CAAC,CAAA;gBAEF,IAAI,IAAI,EAAE;oBACR,YAAK,CAAC,WAAW,SAAS,CAAC,OAAO,EAAE,CAAC,CAAA;oBACrC,KAAK,GAAG,SAAS,CAAA;oBACjB,MAAK;iBACN;aACF;SACF;QAED,IAAI,KAAK,IAAI,IAAI,EAAE;YACjB,4EAA4E;YAC5E,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,CAAA;YACjC,MAAM,CAAC,KAAK,GAAG,CAAC,IAAI,CAAC,CAAA;SACtB;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CAAA;AAtDD,gCAsDC;AAED,SAAgB,aAAa;IAC3B,kDAAkD;IAClD,6GAA6G;IAC7G,MAAM,IAAI,GAAG,EAAE,CAAC,QAAQ,EAAE,CAAA;IAC1B,IAAI,OAAO,GAAG,EAAE,CAAA;IAEhB,IAAI,IAAI,KAAK,QAAQ,EAAE;QACrB,OAAO,GAAG,EAAE,CAAC,QAAQ,CAAC,yBAAyB,CAAC,CAAC,QAAQ,EAAE,CAAA;KAC5D;SAAM,IAAI,IAAI,KAAK,OAAO,EAAE;QAC3B,uDAAuD;QACvD,2BAA2B;QAC3B,oBAAoB;QACpB,wBAAwB;QACxB,0BAA0B;QAC1B,2CAA2C;QAC3C,MAAM,WAAW,GAAG,MAAM,CAAC,OAAO,CAAC,qBAAqB,EAAE,CAAA;QAC1D,IAAI,WAAW,EAAE;YACf,MAAM,KAAK,GAAG,WAAW,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;YACrC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;gBACxB,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;gBAC7B,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,KAAK,iBAAiB,EAAE;oBAC/D,OAAO,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAA;oBACzB,MAAK;iBACN;aACF;SACF;KACF;IAED,OAAO,OAAO,CAAA;AAChB,CAAC;AA7BD,sCA6BC;AAED,SAAgB,qBAAqB;IACnC,MAAM,OAAO,GAAG,kBAAkB,CAAA;IAClC,IAAI,QAAQ,GAAG,EAAE,CAAA;IAEjB,IAAI,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QAC1B,QAAQ,GAAG,EAAE,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC,QAAQ,EAAE,CAAA;KAC/C;IAED,OAAO,QAAQ,CAAA;AACjB,CAAC;AATD,sDASC"}
|
||||
12
node_modules/@actions/tool-cache/lib/retry-helper.d.ts
generated
vendored
Normal file
12
node_modules/@actions/tool-cache/lib/retry-helper.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
/**
|
||||
* Internal class for retries
|
||||
*/
|
||||
export declare class RetryHelper {
|
||||
private maxAttempts;
|
||||
private minSeconds;
|
||||
private maxSeconds;
|
||||
constructor(maxAttempts: number, minSeconds: number, maxSeconds: number);
|
||||
execute<T>(action: () => Promise<T>, isRetryable?: (e: Error) => boolean): Promise<T>;
|
||||
private getSleepAmount;
|
||||
private sleep;
|
||||
}
|
||||
70
node_modules/@actions/tool-cache/lib/retry-helper.js
generated
vendored
Normal file
70
node_modules/@actions/tool-cache/lib/retry-helper.js
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
/**
|
||||
* Internal class for retries
|
||||
*/
|
||||
class RetryHelper {
|
||||
constructor(maxAttempts, minSeconds, maxSeconds) {
|
||||
if (maxAttempts < 1) {
|
||||
throw new Error('max attempts should be greater than or equal to 1');
|
||||
}
|
||||
this.maxAttempts = maxAttempts;
|
||||
this.minSeconds = Math.floor(minSeconds);
|
||||
this.maxSeconds = Math.floor(maxSeconds);
|
||||
if (this.minSeconds > this.maxSeconds) {
|
||||
throw new Error('min seconds should be less than or equal to max seconds');
|
||||
}
|
||||
}
|
||||
execute(action, isRetryable) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let attempt = 1;
|
||||
while (attempt < this.maxAttempts) {
|
||||
// Try
|
||||
try {
|
||||
return yield action();
|
||||
}
|
||||
catch (err) {
|
||||
if (isRetryable && !isRetryable(err)) {
|
||||
throw err;
|
||||
}
|
||||
core.info(err.message);
|
||||
}
|
||||
// Sleep
|
||||
const seconds = this.getSleepAmount();
|
||||
core.info(`Waiting ${seconds} seconds before trying again`);
|
||||
yield this.sleep(seconds);
|
||||
attempt++;
|
||||
}
|
||||
// Last attempt
|
||||
return yield action();
|
||||
});
|
||||
}
|
||||
getSleepAmount() {
|
||||
return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) +
|
||||
this.minSeconds);
|
||||
}
|
||||
sleep(seconds) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise(resolve => setTimeout(resolve, seconds * 1000));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.RetryHelper = RetryHelper;
|
||||
//# sourceMappingURL=retry-helper.js.map
|
||||
1
node_modules/@actions/tool-cache/lib/retry-helper.js.map
generated
vendored
Normal file
1
node_modules/@actions/tool-cache/lib/retry-helper.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"retry-helper.js","sourceRoot":"","sources":["../src/retry-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AAErC;;GAEG;AACH,MAAa,WAAW;IAKtB,YAAY,WAAmB,EAAE,UAAkB,EAAE,UAAkB;QACrE,IAAI,WAAW,GAAG,CAAC,EAAE;YACnB,MAAM,IAAI,KAAK,CAAC,mDAAmD,CAAC,CAAA;SACrE;QAED,IAAI,CAAC,WAAW,GAAG,WAAW,CAAA;QAC9B,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,CAAA;QACxC,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,CAAA;QACxC,IAAI,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE;YACrC,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAA;SAC3E;IACH,CAAC;IAEK,OAAO,CACX,MAAwB,EACxB,WAAmC;;YAEnC,IAAI,OAAO,GAAG,CAAC,CAAA;YACf,OAAO,OAAO,GAAG,IAAI,CAAC,WAAW,EAAE;gBACjC,MAAM;gBACN,IAAI;oBACF,OAAO,MAAM,MAAM,EAAE,CAAA;iBACtB;gBAAC,OAAO,GAAG,EAAE;oBACZ,IAAI,WAAW,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,EAAE;wBACpC,MAAM,GAAG,CAAA;qBACV;oBAED,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,CAAA;iBACvB;gBAED,QAAQ;gBACR,MAAM,OAAO,GAAG,IAAI,CAAC,cAAc,EAAE,CAAA;gBACrC,IAAI,CAAC,IAAI,CAAC,WAAW,OAAO,8BAA8B,CAAC,CAAA;gBAC3D,MAAM,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;gBACzB,OAAO,EAAE,CAAA;aACV;YAED,eAAe;YACf,OAAO,MAAM,MAAM,EAAE,CAAA;QACvB,CAAC;KAAA;IAEO,cAAc;QACpB,OAAO,CACL,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC;YACnE,IAAI,CAAC,UAAU,CAChB,CAAA;IACH,CAAC;IAEa,KAAK,CAAC,OAAe;;YACjC,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,OAAO,GAAG,IAAI,CAAC,CAAC,CAAA;QACpE,CAAC;KAAA;CACF;AAxDD,kCAwDC"}
|
||||
15
node_modules/@actions/tool-cache/lib/tool-cache.d.ts
generated
vendored
15
node_modules/@actions/tool-cache/lib/tool-cache.d.ts
generated
vendored
@@ -1,3 +1,4 @@
|
||||
import * as mm from './manifest';
|
||||
export declare class HTTPError extends Error {
|
||||
readonly httpStatusCode: number | undefined;
|
||||
constructor(httpStatusCode: number | undefined);
|
||||
@@ -6,9 +7,11 @@ export declare class HTTPError extends Error {
|
||||
* Download a tool from an url and stream it into a file
|
||||
*
|
||||
* @param url url of tool to download
|
||||
* @param dest path to download tool
|
||||
* @param auth authorization header
|
||||
* @returns path to downloaded tool
|
||||
*/
|
||||
export declare function downloadTool(url: string): Promise<string>;
|
||||
export declare function downloadTool(url: string, dest?: string, auth?: string): Promise<string>;
|
||||
/**
|
||||
* Extract a .7z file
|
||||
*
|
||||
@@ -26,14 +29,14 @@ export declare function downloadTool(url: string): Promise<string>;
|
||||
*/
|
||||
export declare function extract7z(file: string, dest?: string, _7zPath?: string): Promise<string>;
|
||||
/**
|
||||
* Extract a tar
|
||||
* Extract a compressed tar archive
|
||||
*
|
||||
* @param file path to the tar
|
||||
* @param dest destination directory. Optional.
|
||||
* @param flags flags for the tar. Optional.
|
||||
* @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional.
|
||||
* @returns path to the destination directory
|
||||
*/
|
||||
export declare function extractTar(file: string, dest?: string, flags?: string): Promise<string>;
|
||||
export declare function extractTar(file: string, dest?: string, flags?: string | string[]): Promise<string>;
|
||||
/**
|
||||
* Extract a zip
|
||||
*
|
||||
@@ -77,3 +80,7 @@ export declare function find(toolName: string, versionSpec: string, arch?: strin
|
||||
* @param arch optional arch. defaults to arch of computer
|
||||
*/
|
||||
export declare function findAllVersions(toolName: string, arch?: string): string[];
|
||||
export declare type IToolRelease = mm.IToolRelease;
|
||||
export declare type IToolReleaseFile = mm.IToolReleaseFile;
|
||||
export declare function getManifestFromRepo(owner: string, repo: string, auth?: string, branch?: string): Promise<IToolRelease[]>;
|
||||
export declare function findFromManifest(versionSpec: string, stable: boolean, manifest: IToolRelease[], archFilter?: string): Promise<IToolRelease | undefined>;
|
||||
|
||||
312
node_modules/@actions/tool-cache/lib/tool-cache.js
generated
vendored
312
node_modules/@actions/tool-cache/lib/tool-cache.js
generated
vendored
@@ -8,17 +8,31 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = require("@actions/core");
|
||||
const io = require("@actions/io");
|
||||
const fs = require("fs");
|
||||
const os = require("os");
|
||||
const path = require("path");
|
||||
const httpm = require("typed-rest-client/HttpClient");
|
||||
const semver = require("semver");
|
||||
const uuidV4 = require("uuid/v4");
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const mm = __importStar(require("./manifest"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const httpm = __importStar(require("@actions/http-client"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const stream = __importStar(require("stream"));
|
||||
const util = __importStar(require("util"));
|
||||
const v4_1 = __importDefault(require("uuid/v4"));
|
||||
const exec_1 = require("@actions/exec/lib/exec");
|
||||
const assert_1 = require("assert");
|
||||
const retry_helper_1 = require("./retry-helper");
|
||||
class HTTPError extends Error {
|
||||
constructor(httpStatusCode) {
|
||||
super(`Unexpected HTTP response: ${httpStatusCode}`);
|
||||
@@ -29,85 +43,88 @@ class HTTPError extends Error {
|
||||
exports.HTTPError = HTTPError;
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
const userAgent = 'actions/tool-cache';
|
||||
// On load grab temp directory and cache directory and remove them from env (currently don't want to expose this)
|
||||
let tempDirectory = process.env['RUNNER_TEMP'] || '';
|
||||
let cacheRoot = process.env['RUNNER_TOOL_CACHE'] || '';
|
||||
// If directories not found, place them in common temp locations
|
||||
if (!tempDirectory || !cacheRoot) {
|
||||
let baseLocation;
|
||||
if (IS_WINDOWS) {
|
||||
// On windows use the USERPROFILE env variable
|
||||
baseLocation = process.env['USERPROFILE'] || 'C:\\';
|
||||
}
|
||||
else {
|
||||
if (process.platform === 'darwin') {
|
||||
baseLocation = '/Users';
|
||||
}
|
||||
else {
|
||||
baseLocation = '/home';
|
||||
}
|
||||
}
|
||||
if (!tempDirectory) {
|
||||
tempDirectory = path.join(baseLocation, 'actions', 'temp');
|
||||
}
|
||||
if (!cacheRoot) {
|
||||
cacheRoot = path.join(baseLocation, 'actions', 'cache');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Download a tool from an url and stream it into a file
|
||||
*
|
||||
* @param url url of tool to download
|
||||
* @param dest path to download tool
|
||||
* @param auth authorization header
|
||||
* @returns path to downloaded tool
|
||||
*/
|
||||
function downloadTool(url) {
|
||||
function downloadTool(url, dest, auth) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Wrap in a promise so that we can resolve from within stream callbacks
|
||||
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
const http = new httpm.HttpClient(userAgent, [], {
|
||||
allowRetries: true,
|
||||
maxRetries: 3
|
||||
});
|
||||
const destPath = path.join(tempDirectory, uuidV4());
|
||||
yield io.mkdirP(tempDirectory);
|
||||
core.debug(`Downloading ${url}`);
|
||||
core.debug(`Downloading ${destPath}`);
|
||||
if (fs.existsSync(destPath)) {
|
||||
throw new Error(`Destination file path ${destPath} already exists`);
|
||||
dest = dest || path.join(_getTempDirectory(), v4_1.default());
|
||||
yield io.mkdirP(path.dirname(dest));
|
||||
core.debug(`Downloading ${url}`);
|
||||
core.debug(`Destination ${dest}`);
|
||||
const maxAttempts = 3;
|
||||
const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10);
|
||||
const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20);
|
||||
const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds);
|
||||
return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield downloadToolAttempt(url, dest || '', auth);
|
||||
}), (err) => {
|
||||
if (err instanceof HTTPError && err.httpStatusCode) {
|
||||
// Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests
|
||||
if (err.httpStatusCode < 500 &&
|
||||
err.httpStatusCode !== 408 &&
|
||||
err.httpStatusCode !== 429) {
|
||||
return false;
|
||||
}
|
||||
const response = yield http.get(url);
|
||||
if (response.message.statusCode !== 200) {
|
||||
const err = new HTTPError(response.message.statusCode);
|
||||
core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
||||
throw err;
|
||||
}
|
||||
const file = fs.createWriteStream(destPath);
|
||||
file.on('open', () => __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
const stream = response.message.pipe(file);
|
||||
stream.on('close', () => {
|
||||
core.debug('download complete');
|
||||
resolve(destPath);
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
||||
reject(err);
|
||||
}
|
||||
}));
|
||||
file.on('error', err => {
|
||||
file.end();
|
||||
reject(err);
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
}));
|
||||
// Otherwise retry
|
||||
return true;
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.downloadTool = downloadTool;
|
||||
function downloadToolAttempt(url, dest, auth) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (fs.existsSync(dest)) {
|
||||
throw new Error(`Destination file path ${dest} already exists`);
|
||||
}
|
||||
// Get the response headers
|
||||
const http = new httpm.HttpClient(userAgent, [], {
|
||||
allowRetries: false
|
||||
});
|
||||
let headers;
|
||||
if (auth) {
|
||||
core.debug('set auth');
|
||||
headers = {
|
||||
authorization: auth
|
||||
};
|
||||
}
|
||||
const response = yield http.get(url, headers);
|
||||
if (response.message.statusCode !== 200) {
|
||||
const err = new HTTPError(response.message.statusCode);
|
||||
core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
||||
throw err;
|
||||
}
|
||||
// Download the response body
|
||||
const pipeline = util.promisify(stream.pipeline);
|
||||
const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message);
|
||||
const readStream = responseMessageFactory();
|
||||
let succeeded = false;
|
||||
try {
|
||||
yield pipeline(readStream, fs.createWriteStream(dest));
|
||||
core.debug('download complete');
|
||||
succeeded = true;
|
||||
return dest;
|
||||
}
|
||||
finally {
|
||||
// Error, delete dest before retry
|
||||
if (!succeeded) {
|
||||
core.debug('download failed');
|
||||
try {
|
||||
yield io.rmRF(dest);
|
||||
}
|
||||
catch (err) {
|
||||
core.debug(`Failed to delete '${dest}'. ${err.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Extract a .7z file
|
||||
*
|
||||
@@ -127,14 +144,15 @@ function extract7z(file, dest, _7zPath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');
|
||||
assert_1.ok(file, 'parameter "file" is required');
|
||||
dest = dest || (yield _createExtractFolder(dest));
|
||||
dest = yield _createExtractFolder(dest);
|
||||
const originalCwd = process.cwd();
|
||||
process.chdir(dest);
|
||||
if (_7zPath) {
|
||||
try {
|
||||
const logLevel = core.isDebug() ? '-bb1' : '-bb0';
|
||||
const args = [
|
||||
'x',
|
||||
'-bb1',
|
||||
logLevel,
|
||||
'-bd',
|
||||
'-sccUTF-8',
|
||||
file
|
||||
@@ -182,11 +200,11 @@ function extract7z(file, dest, _7zPath) {
|
||||
}
|
||||
exports.extract7z = extract7z;
|
||||
/**
|
||||
* Extract a tar
|
||||
* Extract a compressed tar archive
|
||||
*
|
||||
* @param file path to the tar
|
||||
* @param dest destination directory. Optional.
|
||||
* @param flags flags for the tar. Optional.
|
||||
* @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional.
|
||||
* @returns path to the destination directory
|
||||
*/
|
||||
function extractTar(file, dest, flags = 'xz') {
|
||||
@@ -194,9 +212,47 @@ function extractTar(file, dest, flags = 'xz') {
|
||||
if (!file) {
|
||||
throw new Error("parameter 'file' is required");
|
||||
}
|
||||
dest = dest || (yield _createExtractFolder(dest));
|
||||
const tarPath = yield io.which('tar', true);
|
||||
yield exec_1.exec(`"${tarPath}"`, [flags, '-C', dest, '-f', file]);
|
||||
// Create dest
|
||||
dest = yield _createExtractFolder(dest);
|
||||
// Determine whether GNU tar
|
||||
core.debug('Checking tar --version');
|
||||
let versionOutput = '';
|
||||
yield exec_1.exec('tar --version', [], {
|
||||
ignoreReturnCode: true,
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => (versionOutput += data.toString()),
|
||||
stderr: (data) => (versionOutput += data.toString())
|
||||
}
|
||||
});
|
||||
core.debug(versionOutput.trim());
|
||||
const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR');
|
||||
// Initialize args
|
||||
let args;
|
||||
if (flags instanceof Array) {
|
||||
args = flags;
|
||||
}
|
||||
else {
|
||||
args = [flags];
|
||||
}
|
||||
if (core.isDebug() && !flags.includes('v')) {
|
||||
args.push('-v');
|
||||
}
|
||||
let destArg = dest;
|
||||
let fileArg = file;
|
||||
if (IS_WINDOWS && isGnuTar) {
|
||||
args.push('--force-local');
|
||||
destArg = dest.replace(/\\/g, '/');
|
||||
// Technically only the dest needs to have `/` but for aesthetic consistency
|
||||
// convert slashes in the file arg too.
|
||||
fileArg = file.replace(/\\/g, '/');
|
||||
}
|
||||
if (isGnuTar) {
|
||||
// Suppress warnings when using GNU tar to extract archives created by BSD tar
|
||||
args.push('--warning=no-unknown-keyword');
|
||||
}
|
||||
args.push('-C', destArg, '-f', fileArg);
|
||||
yield exec_1.exec(`tar`, args);
|
||||
return dest;
|
||||
});
|
||||
}
|
||||
@@ -213,7 +269,7 @@ function extractZip(file, dest) {
|
||||
if (!file) {
|
||||
throw new Error("parameter 'file' is required");
|
||||
}
|
||||
dest = dest || (yield _createExtractFolder(dest));
|
||||
dest = yield _createExtractFolder(dest);
|
||||
if (IS_WINDOWS) {
|
||||
yield extractZipWin(file, dest);
|
||||
}
|
||||
@@ -231,7 +287,7 @@ function extractZipWin(file, dest) {
|
||||
const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
||||
const command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`;
|
||||
// run powershell
|
||||
const powershellPath = yield io.which('powershell');
|
||||
const powershellPath = yield io.which('powershell', true);
|
||||
const args = [
|
||||
'-NoLogo',
|
||||
'-Sta',
|
||||
@@ -247,8 +303,12 @@ function extractZipWin(file, dest) {
|
||||
}
|
||||
function extractZipNix(file, dest) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const unzipPath = yield io.which('unzip');
|
||||
yield exec_1.exec(`"${unzipPath}"`, [file], { cwd: dest });
|
||||
const unzipPath = yield io.which('unzip', true);
|
||||
const args = [file];
|
||||
if (!core.isDebug()) {
|
||||
args.unshift('-q');
|
||||
}
|
||||
yield exec_1.exec(`"${unzipPath}"`, args, { cwd: dest });
|
||||
});
|
||||
}
|
||||
/**
|
||||
@@ -339,7 +399,7 @@ function find(toolName, versionSpec, arch) {
|
||||
let toolPath = '';
|
||||
if (versionSpec) {
|
||||
versionSpec = semver.clean(versionSpec) || '';
|
||||
const cachePath = path.join(cacheRoot, toolName, versionSpec, arch);
|
||||
const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch);
|
||||
core.debug(`checking cache: ${cachePath}`);
|
||||
if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {
|
||||
core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);
|
||||
@@ -361,7 +421,7 @@ exports.find = find;
|
||||
function findAllVersions(toolName, arch) {
|
||||
const versions = [];
|
||||
arch = arch || os.arch();
|
||||
const toolPath = path.join(cacheRoot, toolName);
|
||||
const toolPath = path.join(_getCacheDirectory(), toolName);
|
||||
if (fs.existsSync(toolPath)) {
|
||||
const children = fs.readdirSync(toolPath);
|
||||
for (const child of children) {
|
||||
@@ -376,11 +436,56 @@ function findAllVersions(toolName, arch) {
|
||||
return versions;
|
||||
}
|
||||
exports.findAllVersions = findAllVersions;
|
||||
function getManifestFromRepo(owner, repo, auth, branch = 'master') {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let releases = [];
|
||||
const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`;
|
||||
const http = new httpm.HttpClient('tool-cache');
|
||||
const headers = {};
|
||||
if (auth) {
|
||||
core.debug('set auth');
|
||||
headers.authorization = auth;
|
||||
}
|
||||
const response = yield http.getJson(treeUrl, headers);
|
||||
if (!response.result) {
|
||||
return releases;
|
||||
}
|
||||
let manifestUrl = '';
|
||||
for (const item of response.result.tree) {
|
||||
if (item.path === 'versions-manifest.json') {
|
||||
manifestUrl = item.url;
|
||||
break;
|
||||
}
|
||||
}
|
||||
headers['accept'] = 'application/vnd.github.VERSION.raw';
|
||||
let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody();
|
||||
if (versionsRaw) {
|
||||
// shouldn't be needed but protects against invalid json saved with BOM
|
||||
versionsRaw = versionsRaw.replace(/^\uFEFF/, '');
|
||||
try {
|
||||
releases = JSON.parse(versionsRaw);
|
||||
}
|
||||
catch (_a) {
|
||||
core.debug('Invalid json');
|
||||
}
|
||||
}
|
||||
return releases;
|
||||
});
|
||||
}
|
||||
exports.getManifestFromRepo = getManifestFromRepo;
|
||||
function findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// wrap the internal impl
|
||||
const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter);
|
||||
return match;
|
||||
});
|
||||
}
|
||||
exports.findFromManifest = findFromManifest;
|
||||
function _createExtractFolder(dest) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (!dest) {
|
||||
// create a temp dir
|
||||
dest = path.join(tempDirectory, uuidV4());
|
||||
dest = path.join(_getTempDirectory(), v4_1.default());
|
||||
}
|
||||
yield io.mkdirP(dest);
|
||||
return dest;
|
||||
@@ -388,7 +493,7 @@ function _createExtractFolder(dest) {
|
||||
}
|
||||
function _createToolPath(tool, version, arch) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const folderPath = path.join(cacheRoot, tool, semver.clean(version) || version, arch || '');
|
||||
const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
|
||||
core.debug(`destination ${folderPath}`);
|
||||
const markerPath = `${folderPath}.complete`;
|
||||
yield io.rmRF(folderPath);
|
||||
@@ -398,7 +503,7 @@ function _createToolPath(tool, version, arch) {
|
||||
});
|
||||
}
|
||||
function _completeToolPath(tool, version, arch) {
|
||||
const folderPath = path.join(cacheRoot, tool, semver.clean(version) || version, arch || '');
|
||||
const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
|
||||
const markerPath = `${folderPath}.complete`;
|
||||
fs.writeFileSync(markerPath, '');
|
||||
core.debug('finished caching tool');
|
||||
@@ -435,4 +540,29 @@ function _evaluateVersions(versions, versionSpec) {
|
||||
}
|
||||
return version;
|
||||
}
|
||||
/**
|
||||
* Gets RUNNER_TOOL_CACHE
|
||||
*/
|
||||
function _getCacheDirectory() {
|
||||
const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || '';
|
||||
assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined');
|
||||
return cacheDirectory;
|
||||
}
|
||||
/**
|
||||
* Gets RUNNER_TEMP
|
||||
*/
|
||||
function _getTempDirectory() {
|
||||
const tempDirectory = process.env['RUNNER_TEMP'] || '';
|
||||
assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined');
|
||||
return tempDirectory;
|
||||
}
|
||||
/**
|
||||
* Gets a global variable
|
||||
*/
|
||||
function _getGlobal(key, defaultValue) {
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
const value = global[key];
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
return value !== undefined ? value : defaultValue;
|
||||
}
|
||||
//# sourceMappingURL=tool-cache.js.map
|
||||
2
node_modules/@actions/tool-cache/lib/tool-cache.js.map
generated
vendored
2
node_modules/@actions/tool-cache/lib/tool-cache.js.map
generated
vendored
File diff suppressed because one or more lines are too long
146
node_modules/@actions/tool-cache/node_modules/@actions/core/README.md
generated
vendored
Normal file
146
node_modules/@actions/tool-cache/node_modules/@actions/core/README.md
generated
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
# `@actions/core`
|
||||
|
||||
> Core functions for setting results, logging, registering secrets and exporting variables across actions
|
||||
|
||||
## Usage
|
||||
|
||||
### Import the package
|
||||
|
||||
```js
|
||||
// javascript
|
||||
const core = require('@actions/core');
|
||||
|
||||
// typescript
|
||||
import * as core from '@actions/core';
|
||||
```
|
||||
|
||||
#### Inputs/Outputs
|
||||
|
||||
Action inputs can be read with `getInput`. Outputs can be set with `setOutput` which makes them available to be mapped into inputs of other actions to ensure they are decoupled.
|
||||
|
||||
```js
|
||||
const myInput = core.getInput('inputName', { required: true });
|
||||
|
||||
core.setOutput('outputKey', 'outputVal');
|
||||
```
|
||||
|
||||
#### Exporting variables
|
||||
|
||||
Since each step runs in a separate process, you can use `exportVariable` to add it to this step and future steps environment blocks.
|
||||
|
||||
```js
|
||||
core.exportVariable('envVar', 'Val');
|
||||
```
|
||||
|
||||
#### Setting a secret
|
||||
|
||||
Setting a secret registers the secret with the runner to ensure it is masked in logs.
|
||||
|
||||
```js
|
||||
core.setSecret('myPassword');
|
||||
```
|
||||
|
||||
#### PATH Manipulation
|
||||
|
||||
To make a tool's path available in the path for the remainder of the job (without altering the machine or containers state), use `addPath`. The runner will prepend the path given to the jobs PATH.
|
||||
|
||||
```js
|
||||
core.addPath('/path/to/mytool');
|
||||
```
|
||||
|
||||
#### Exit codes
|
||||
|
||||
You should use this library to set the failing exit code for your action. If status is not set and the script runs to completion, that will lead to a success.
|
||||
|
||||
```js
|
||||
const core = require('@actions/core');
|
||||
|
||||
try {
|
||||
// Do stuff
|
||||
}
|
||||
catch (err) {
|
||||
// setFailed logs the message and sets a failing exit code
|
||||
core.setFailed(`Action failed with error ${err}`);
|
||||
}
|
||||
|
||||
Note that `setNeutral` is not yet implemented in actions V2 but equivalent functionality is being planned.
|
||||
|
||||
```
|
||||
|
||||
#### Logging
|
||||
|
||||
Finally, this library provides some utilities for logging. Note that debug logging is hidden from the logs by default. This behavior can be toggled by enabling the [Step Debug Logs](../../docs/action-debugging.md#step-debug-logs).
|
||||
|
||||
```js
|
||||
const core = require('@actions/core');
|
||||
|
||||
const myInput = core.getInput('input');
|
||||
try {
|
||||
core.debug('Inside try block');
|
||||
|
||||
if (!myInput) {
|
||||
core.warning('myInput was not set');
|
||||
}
|
||||
|
||||
if (core.isDebug()) {
|
||||
// curl -v https://github.com
|
||||
} else {
|
||||
// curl https://github.com
|
||||
}
|
||||
|
||||
// Do stuff
|
||||
}
|
||||
catch (err) {
|
||||
core.error(`Error ${err}, action may still succeed though`);
|
||||
}
|
||||
```
|
||||
|
||||
This library can also wrap chunks of output in foldable groups.
|
||||
|
||||
```js
|
||||
const core = require('@actions/core')
|
||||
|
||||
// Manually wrap output
|
||||
core.startGroup('Do some function')
|
||||
doSomeFunction()
|
||||
core.endGroup()
|
||||
|
||||
// Wrap an asynchronous function call
|
||||
const result = await core.group('Do something async', async () => {
|
||||
const response = await doSomeHTTPRequest()
|
||||
return response
|
||||
})
|
||||
```
|
||||
|
||||
#### Action state
|
||||
|
||||
You can use this library to save state and get state for sharing information between a given wrapper action:
|
||||
|
||||
**action.yml**
|
||||
```yaml
|
||||
name: 'Wrapper action sample'
|
||||
inputs:
|
||||
name:
|
||||
default: 'GitHub'
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: 'main.js'
|
||||
post: 'cleanup.js'
|
||||
```
|
||||
|
||||
In action's `main.js`:
|
||||
|
||||
```js
|
||||
const core = require('@actions/core');
|
||||
|
||||
core.saveState("pidToKill", 12345);
|
||||
```
|
||||
|
||||
In action's `cleanup.js`:
|
||||
```js
|
||||
const core = require('@actions/core');
|
||||
|
||||
var pid = core.getState("pidToKill");
|
||||
|
||||
process.kill(pid);
|
||||
```
|
||||
21
node_modules/@actions/tool-cache/node_modules/@actions/core/lib/command.d.ts
generated
vendored
Normal file
21
node_modules/@actions/tool-cache/node_modules/@actions/core/lib/command.d.ts
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
interface CommandProperties {
|
||||
[key: string]: any;
|
||||
}
|
||||
/**
|
||||
* Commands
|
||||
*
|
||||
* Command Format:
|
||||
* ::name key=value,key=value::message
|
||||
*
|
||||
* Examples:
|
||||
* ::warning::This is the message
|
||||
* ::set-env name=MY_VAR::some value
|
||||
*/
|
||||
export declare function issueCommand(command: string, properties: CommandProperties, message: any): void;
|
||||
export declare function issue(name: string, message?: string): void;
|
||||
/**
|
||||
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
||||
* @param input input to sanitize into a string
|
||||
*/
|
||||
export declare function toCommandValue(input: any): string;
|
||||
export {};
|
||||
92
node_modules/@actions/tool-cache/node_modules/@actions/core/lib/command.js
generated
vendored
Normal file
92
node_modules/@actions/tool-cache/node_modules/@actions/core/lib/command.js
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const os = __importStar(require("os"));
|
||||
/**
|
||||
* Commands
|
||||
*
|
||||
* Command Format:
|
||||
* ::name key=value,key=value::message
|
||||
*
|
||||
* Examples:
|
||||
* ::warning::This is the message
|
||||
* ::set-env name=MY_VAR::some value
|
||||
*/
|
||||
function issueCommand(command, properties, message) {
|
||||
const cmd = new Command(command, properties, message);
|
||||
process.stdout.write(cmd.toString() + os.EOL);
|
||||
}
|
||||
exports.issueCommand = issueCommand;
|
||||
function issue(name, message = '') {
|
||||
issueCommand(name, {}, message);
|
||||
}
|
||||
exports.issue = issue;
|
||||
const CMD_STRING = '::';
|
||||
class Command {
|
||||
constructor(command, properties, message) {
|
||||
if (!command) {
|
||||
command = 'missing.command';
|
||||
}
|
||||
this.command = command;
|
||||
this.properties = properties;
|
||||
this.message = message;
|
||||
}
|
||||
toString() {
|
||||
let cmdStr = CMD_STRING + this.command;
|
||||
if (this.properties && Object.keys(this.properties).length > 0) {
|
||||
cmdStr += ' ';
|
||||
let first = true;
|
||||
for (const key in this.properties) {
|
||||
if (this.properties.hasOwnProperty(key)) {
|
||||
const val = this.properties[key];
|
||||
if (val) {
|
||||
if (first) {
|
||||
first = false;
|
||||
}
|
||||
else {
|
||||
cmdStr += ',';
|
||||
}
|
||||
cmdStr += `${key}=${escapeProperty(val)}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
|
||||
return cmdStr;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
||||
* @param input input to sanitize into a string
|
||||
*/
|
||||
function toCommandValue(input) {
|
||||
if (input === null || input === undefined) {
|
||||
return '';
|
||||
}
|
||||
else if (typeof input === 'string' || input instanceof String) {
|
||||
return input;
|
||||
}
|
||||
return JSON.stringify(input);
|
||||
}
|
||||
exports.toCommandValue = toCommandValue;
|
||||
function escapeData(s) {
|
||||
return toCommandValue(s)
|
||||
.replace(/%/g, '%25')
|
||||
.replace(/\r/g, '%0D')
|
||||
.replace(/\n/g, '%0A');
|
||||
}
|
||||
function escapeProperty(s) {
|
||||
return toCommandValue(s)
|
||||
.replace(/%/g, '%25')
|
||||
.replace(/\r/g, '%0D')
|
||||
.replace(/\n/g, '%0A')
|
||||
.replace(/:/g, '%3A')
|
||||
.replace(/,/g, '%2C');
|
||||
}
|
||||
//# sourceMappingURL=command.js.map
|
||||
1
node_modules/@actions/tool-cache/node_modules/@actions/core/lib/command.js.map
generated
vendored
Normal file
1
node_modules/@actions/tool-cache/node_modules/@actions/core/lib/command.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"command.js","sourceRoot":"","sources":["../src/command.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAwB;AAWxB;;;;;;;;;GASG;AACH,SAAgB,YAAY,CAC1B,OAAe,EACf,UAA6B,EAC7B,OAAY;IAEZ,MAAM,GAAG,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,CAAC,CAAA;IACrD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AAC/C,CAAC;AAPD,oCAOC;AAED,SAAgB,KAAK,CAAC,IAAY,EAAE,UAAkB,EAAE;IACtD,YAAY,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACjC,CAAC;AAFD,sBAEC;AAED,MAAM,UAAU,GAAG,IAAI,CAAA;AAEvB,MAAM,OAAO;IAKX,YAAY,OAAe,EAAE,UAA6B,EAAE,OAAe;QACzE,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,iBAAiB,CAAA;SAC5B;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAED,QAAQ;QACN,IAAI,MAAM,GAAG,UAAU,GAAG,IAAI,CAAC,OAAO,CAAA;QAEtC,IAAI,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YAC9D,MAAM,IAAI,GAAG,CAAA;YACb,IAAI,KAAK,GAAG,IAAI,CAAA;YAChB,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,UAAU,EAAE;gBACjC,IAAI,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;oBACvC,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;oBAChC,IAAI,GAAG,EAAE;wBACP,IAAI,KAAK,EAAE;4BACT,KAAK,GAAG,KAAK,CAAA;yBACd;6BAAM;4BACL,MAAM,IAAI,GAAG,CAAA;yBACd;wBAED,MAAM,IAAI,GAAG,GAAG,IAAI,cAAc,CAAC,GAAG,CAAC,EAAE,CAAA;qBAC1C;iBACF;aACF;SACF;QAED,MAAM,IAAI,GAAG,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAA;QACpD,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAED;;;GAGG;AACH,SAAgB,cAAc,CAAC,KAAU;IACvC,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,OAAO,EAAE,CAAA;KACV;SAAM,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,YAAY,MAAM,EAAE;QAC/D,OAAO,KAAe,CAAA;KACvB;IACD,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAA;AAC9B,CAAC;AAPD,wCAOC;AAED,SAAS,UAAU,CAAC,CAAM;IACxB,OAAO,cAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;AAC1B,CAAC;AAED,SAAS,cAAc,CAAC,CAAM;IAC5B,OAAO,cAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;AACzB,CAAC"}
|
||||
122
node_modules/@actions/tool-cache/node_modules/@actions/core/lib/core.d.ts
generated
vendored
Normal file
122
node_modules/@actions/tool-cache/node_modules/@actions/core/lib/core.d.ts
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
/**
|
||||
* Interface for getInput options
|
||||
*/
|
||||
export interface InputOptions {
|
||||
/** Optional. Whether the input is required. If required and not present, will throw. Defaults to false */
|
||||
required?: boolean;
|
||||
}
|
||||
/**
|
||||
* The code to exit an action
|
||||
*/
|
||||
export declare enum ExitCode {
|
||||
/**
|
||||
* A code indicating that the action was successful
|
||||
*/
|
||||
Success = 0,
|
||||
/**
|
||||
* A code indicating that the action was a failure
|
||||
*/
|
||||
Failure = 1
|
||||
}
|
||||
/**
|
||||
* Sets env variable for this action and future actions in the job
|
||||
* @param name the name of the variable to set
|
||||
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
||||
*/
|
||||
export declare function exportVariable(name: string, val: any): void;
|
||||
/**
|
||||
* Registers a secret which will get masked from logs
|
||||
* @param secret value of the secret
|
||||
*/
|
||||
export declare function setSecret(secret: string): void;
|
||||
/**
|
||||
* Prepends inputPath to the PATH (for this action and future actions)
|
||||
* @param inputPath
|
||||
*/
|
||||
export declare function addPath(inputPath: string): void;
|
||||
/**
|
||||
* Gets the value of an input. The value is also trimmed.
|
||||
*
|
||||
* @param name name of the input to get
|
||||
* @param options optional. See InputOptions.
|
||||
* @returns string
|
||||
*/
|
||||
export declare function getInput(name: string, options?: InputOptions): string;
|
||||
/**
|
||||
* Sets the value of an output.
|
||||
*
|
||||
* @param name name of the output to set
|
||||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||||
*/
|
||||
export declare function setOutput(name: string, value: any): void;
|
||||
/**
|
||||
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
||||
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
||||
*
|
||||
*/
|
||||
export declare function setCommandEcho(enabled: boolean): void;
|
||||
/**
|
||||
* Sets the action status to failed.
|
||||
* When the action exits it will be with an exit code of 1
|
||||
* @param message add error issue message
|
||||
*/
|
||||
export declare function setFailed(message: string | Error): void;
|
||||
/**
|
||||
* Gets whether Actions Step Debug is on or not
|
||||
*/
|
||||
export declare function isDebug(): boolean;
|
||||
/**
|
||||
* Writes debug message to user log
|
||||
* @param message debug message
|
||||
*/
|
||||
export declare function debug(message: string): void;
|
||||
/**
|
||||
* Adds an error issue
|
||||
* @param message error issue message. Errors will be converted to string via toString()
|
||||
*/
|
||||
export declare function error(message: string | Error): void;
|
||||
/**
|
||||
* Adds an warning issue
|
||||
* @param message warning issue message. Errors will be converted to string via toString()
|
||||
*/
|
||||
export declare function warning(message: string | Error): void;
|
||||
/**
|
||||
* Writes info to log with console.log.
|
||||
* @param message info message
|
||||
*/
|
||||
export declare function info(message: string): void;
|
||||
/**
|
||||
* Begin an output group.
|
||||
*
|
||||
* Output until the next `groupEnd` will be foldable in this group
|
||||
*
|
||||
* @param name The name of the output group
|
||||
*/
|
||||
export declare function startGroup(name: string): void;
|
||||
/**
|
||||
* End an output group.
|
||||
*/
|
||||
export declare function endGroup(): void;
|
||||
/**
|
||||
* Wrap an asynchronous function call in a group.
|
||||
*
|
||||
* Returns the same type as the function itself.
|
||||
*
|
||||
* @param name The name of the group
|
||||
* @param fn The function to wrap in the group
|
||||
*/
|
||||
export declare function group<T>(name: string, fn: () => Promise<T>): Promise<T>;
|
||||
/**
|
||||
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
||||
*
|
||||
* @param name name of the state to store
|
||||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||||
*/
|
||||
export declare function saveState(name: string, value: any): void;
|
||||
/**
|
||||
* Gets the value of an state set by this action's main execution.
|
||||
*
|
||||
* @param name name of the state to get
|
||||
* @returns string
|
||||
*/
|
||||
export declare function getState(name: string): string;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user