mirror of
https://github.com/github/codeql-action.git
synced 2025-12-08 00:38:30 +08:00
Compare commits
239 Commits
pre
...
codeql-bun
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e0299c3c04 | ||
|
|
aad14bf2cb | ||
|
|
a08742f199 | ||
|
|
151d531bd0 | ||
|
|
504c8cfc6f | ||
|
|
a0d4330434 | ||
|
|
bb9ed79f3d | ||
|
|
17548064f9 | ||
|
|
ef507971e7 | ||
|
|
96d02d50f7 | ||
|
|
0fdc2c71e4 | ||
|
|
28944b580b | ||
|
|
50a2815790 | ||
|
|
6846c702da | ||
|
|
559e2600c1 | ||
|
|
5bb9e6e131 | ||
|
|
464ce1b43a | ||
|
|
8530f5b76a | ||
|
|
a67896b792 | ||
|
|
b3ffa760ab | ||
|
|
3871ca717b | ||
|
|
a091618158 | ||
|
|
04adf2bf60 | ||
|
|
74c48f71fa | ||
|
|
b8ac06a9c8 | ||
|
|
7581ac8b17 | ||
|
|
af252d2f0d | ||
|
|
cb384e776b | ||
|
|
403832b950 | ||
|
|
52e52435f7 | ||
|
|
bc21c8f6f3 | ||
|
|
9a784b1f57 | ||
|
|
b3c9d6f3a9 | ||
|
|
f009c4c924 | ||
|
|
852d99d8e2 | ||
|
|
052d39e909 | ||
|
|
107d8ffc4c | ||
|
|
88231094bf | ||
|
|
211ad30f72 | ||
|
|
350bf488da | ||
|
|
a1d945f14f | ||
|
|
bd4042802d | ||
|
|
02776246bf | ||
|
|
66be268a09 | ||
|
|
56f06c77fd | ||
|
|
98f8945cfb | ||
|
|
a30a5ba788 | ||
|
|
9133b2b54d | ||
|
|
8e098cbb87 | ||
|
|
7ae8c32cbe | ||
|
|
beedd317d2 | ||
|
|
8a67191278 | ||
|
|
1ce7f98898 | ||
|
|
6d413dd723 | ||
|
|
31996935e6 | ||
|
|
bcb5b28954 | ||
|
|
8622312249 | ||
|
|
c0c67ce80f | ||
|
|
bc9591a12b | ||
|
|
dcba70915d | ||
|
|
2758bd30c8 | ||
|
|
f4001a0790 | ||
|
|
d55f711b71 | ||
|
|
2845a93f4c | ||
|
|
74f864bee1 | ||
|
|
38c231113e | ||
|
|
34c941dc31 | ||
|
|
5eccb79587 | ||
|
|
11a9af0387 | ||
|
|
6d036cef6f | ||
|
|
f9768ac4ba | ||
|
|
3ff198f23b | ||
|
|
ff8fe44e0c | ||
|
|
3f2a60be8a | ||
|
|
4c6749115a | ||
|
|
608ed15968 | ||
|
|
14f179f70b | ||
|
|
dc4009c7ed | ||
|
|
14d602cced | ||
|
|
24096a1cb3 | ||
|
|
74d434c5ca | ||
|
|
fff3de9938 | ||
|
|
1aae76b906 | ||
|
|
013c02758e | ||
|
|
0b53ebbc36 | ||
|
|
6de3e1cde4 | ||
|
|
c9d0312cb7 | ||
|
|
0cdf645694 | ||
|
|
d00417a341 | ||
|
|
7928587bdf | ||
|
|
87ecd0d0cc | ||
|
|
7e2e297e07 | ||
|
|
b97097aaed | ||
|
|
8a8a49d3c5 | ||
|
|
fcb696ec59 | ||
|
|
c2d2dfdcdd | ||
|
|
042ab541fd | ||
|
|
19faafba94 | ||
|
|
476c8a44ba | ||
|
|
f9ef310b75 | ||
|
|
6bd7f17e0e | ||
|
|
582fd14a81 | ||
|
|
8425341ae0 | ||
|
|
1f2cca021a | ||
|
|
fa9e0ac2a6 | ||
|
|
a9de5b50d7 | ||
|
|
af4edf6546 | ||
|
|
5a97f7e980 | ||
|
|
d4fb7fc762 | ||
|
|
00ebedc522 | ||
|
|
840dc5ee9a | ||
|
|
c1add46efa | ||
|
|
e35c90f53d | ||
|
|
6db8182349 | ||
|
|
202704856d | ||
|
|
5ea736059a | ||
|
|
b4610ac367 | ||
|
|
a0d60d5d9e | ||
|
|
f18fffbea8 | ||
|
|
655c4497ce | ||
|
|
d7a2025f2d | ||
|
|
22501fd7c8 | ||
|
|
07e22b1f4a | ||
|
|
3c2191ffdd | ||
|
|
28abced8ca | ||
|
|
50dcaaf00d | ||
|
|
30f7117e6a | ||
|
|
28a878efc3 | ||
|
|
d518039a6b | ||
|
|
855f965205 | ||
|
|
2909e97a32 | ||
|
|
4997c3ff4d | ||
|
|
0bd4da3a6c | ||
|
|
98ad2fc49d | ||
|
|
3ca3147cd4 | ||
|
|
96da037d49 | ||
|
|
da1e237d1e | ||
|
|
054f867322 | ||
|
|
1e600686e7 | ||
|
|
cd1625a162 | ||
|
|
8788e5aa59 | ||
|
|
8fb9090674 | ||
|
|
10a2fd615f | ||
|
|
8b71cf3e5f | ||
|
|
ae301902e1 | ||
|
|
ddee374101 | ||
|
|
080dc8c3f0 | ||
|
|
6d1f969b1c | ||
|
|
ff40939f66 | ||
|
|
7b32c3c950 | ||
|
|
90c07ef21d | ||
|
|
852b9186d6 | ||
|
|
63f52e71c0 | ||
|
|
3a883af8a6 | ||
|
|
886b7d3e6e | ||
|
|
4e12efc7c3 | ||
|
|
5c5f422edb | ||
|
|
97ef91227e | ||
|
|
25e5256866 | ||
|
|
5ec6b7524f | ||
|
|
b366432cb3 | ||
|
|
fa0a733046 | ||
|
|
0e6df42024 | ||
|
|
58c1abf92e | ||
|
|
6507fba7ec | ||
|
|
aa54af7018 | ||
|
|
74c9991849 | ||
|
|
f49335fc3b | ||
|
|
d7b9f5a097 | ||
|
|
572c8bbc0c | ||
|
|
0347b72305 | ||
|
|
27cc8b23fe | ||
|
|
584df475ca | ||
|
|
cd95d34497 | ||
|
|
88c1b7fb89 | ||
|
|
51b42fcf78 | ||
|
|
015ead73d9 | ||
|
|
c351304778 | ||
|
|
96901ac7d8 | ||
|
|
cc471c2014 | ||
|
|
c88fb695ab | ||
|
|
ec4d38a9a5 | ||
|
|
15bd158ded | ||
|
|
256c63a715 | ||
|
|
a76042ab4a | ||
|
|
1477a43cc8 | ||
|
|
f17ebc80bd | ||
|
|
c0d9de18c0 | ||
|
|
52cd1f2261 | ||
|
|
3455736978 | ||
|
|
f668f5fc74 | ||
|
|
3aa3d6a2b6 | ||
|
|
538cbdd614 | ||
|
|
49575f87c4 | ||
|
|
5a800ccbfa | ||
|
|
cc2c18d6a8 | ||
|
|
4c11b3d9bf | ||
|
|
a511aca9f1 | ||
|
|
c3847056c5 | ||
|
|
189a899282 | ||
|
|
c5ecb82753 | ||
|
|
4dc964d906 | ||
|
|
dc27ff90bd | ||
|
|
cf266cbf27 | ||
|
|
1f29db50bb | ||
|
|
c979850d28 | ||
|
|
baa9c9e0df | ||
|
|
d966ea2f52 | ||
|
|
6bab450a9a | ||
|
|
583f8a923c | ||
|
|
b73b259103 | ||
|
|
4fff14bba4 | ||
|
|
ab918b676b | ||
|
|
290b34d5df | ||
|
|
dcd81b5847 | ||
|
|
d90fca396a | ||
|
|
5218f937b3 | ||
|
|
984552a36e | ||
|
|
d46c1c7f29 | ||
|
|
43e27012da | ||
|
|
3d3dccf92d | ||
|
|
8ff10b4a6b | ||
|
|
d68eb11bae | ||
|
|
4e9886ad2b | ||
|
|
1fe0932cc2 | ||
|
|
5bceb2be38 | ||
|
|
129ce28897 | ||
|
|
a23cb1d61a | ||
|
|
0c4fc16b49 | ||
|
|
b6a0306228 | ||
|
|
e52e34ba17 | ||
|
|
cffc0f7b4e | ||
|
|
5d2700f9cb | ||
|
|
1da651c219 | ||
|
|
26e955cfa3 | ||
|
|
546d5a8843 | ||
|
|
43de3a9949 | ||
|
|
7963db13d8 | ||
|
|
f237316c5a |
10
.editorconfig
Normal file
10
.editorconfig
Normal file
@@ -0,0 +1,10 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.ts]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Contact GitHub Support
|
||||
url: https://support.github.com/contact?subject=Code+Scanning+Beta+Support&tags=code-scanning-support
|
||||
about: Contact Support about code scanning
|
||||
11
.github/codeql/codeql-config.yml
vendored
11
.github/codeql/codeql-config.yml
vendored
@@ -1,4 +1,13 @@
|
||||
me: "CodeQL config"
|
||||
name: "CodeQL config"
|
||||
queries:
|
||||
- name: Run custom queries
|
||||
uses: ./queries
|
||||
# Run all extra query suites, both because we want to
|
||||
# and because it'll act as extra testing. This is why
|
||||
# we include both even though one is a superset of the
|
||||
# other, because we're testing the parsing logic and
|
||||
# that the suites exist in the codeql bundle.
|
||||
- uses: security-extended
|
||||
- uses: security-and-quality
|
||||
paths-ignore:
|
||||
- tests
|
||||
178
.github/update-release-branch.py
vendored
Normal file
178
.github/update-release-branch.py
vendored
Normal file
@@ -0,0 +1,178 @@
|
||||
import datetime
|
||||
from github import Github
|
||||
import random
|
||||
import requests
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# The branch being merged from.
|
||||
# This is the one that contains day-to-day development work.
|
||||
MAIN_BRANCH = 'main'
|
||||
# The branch being merged into.
|
||||
# This is the release branch that users reference.
|
||||
LATEST_RELEASE_BRANCH = 'v1'
|
||||
# Name of the remote
|
||||
ORIGIN = 'origin'
|
||||
|
||||
# Runs git with the given args and returns the stdout.
|
||||
# Raises an error if git does not exit successfully.
|
||||
def run_git(*args):
|
||||
cmd = ['git', *args]
|
||||
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if (p.returncode != 0):
|
||||
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
|
||||
return p.stdout.decode('ascii')
|
||||
|
||||
# Returns true if the given branch exists on the origin remote
|
||||
def branch_exists_on_remote(branch_name):
|
||||
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
||||
|
||||
# Opens a PR from the given branch to the release branch
|
||||
def open_pr(repo, all_commits, short_main_sha, branch_name):
|
||||
# Sort the commits into the pull requests that introduced them,
|
||||
# and any commits that don't have a pull request
|
||||
pull_requests = []
|
||||
commits_without_pull_requests = []
|
||||
for commit in all_commits:
|
||||
pr = get_pr_for_commit(repo, commit)
|
||||
|
||||
if pr is None:
|
||||
commits_without_pull_requests.append(commit)
|
||||
elif not any(p for p in pull_requests if p.number == pr.number):
|
||||
pull_requests.append(pr)
|
||||
|
||||
print('Found ' + str(len(pull_requests)) + ' pull requests')
|
||||
print('Found ' + str(len(commits_without_pull_requests)) + ' commits not in a pull request')
|
||||
|
||||
# Sort PRs and commits by age
|
||||
pull_requests = sorted(pull_requests, key=lambda pr: pr.number)
|
||||
commits_without_pull_requests = sorted(commits_without_pull_requests, key=lambda c: c.commit.author.date)
|
||||
|
||||
# Start constructing the body text
|
||||
body = 'Merging ' + short_main_sha + ' into ' + LATEST_RELEASE_BRANCH
|
||||
|
||||
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
|
||||
body += '\n\nConductor for this PR is @' + conductor
|
||||
|
||||
# List all PRs merged
|
||||
if len(pull_requests) > 0:
|
||||
body += '\n\nContains the following pull requests:'
|
||||
for pr in pull_requests:
|
||||
merger = get_merger_of_pr(repo, pr)
|
||||
body += '\n- #' + str(pr.number)
|
||||
body += ' - ' + pr.title
|
||||
body += ' (@' + merger + ')'
|
||||
|
||||
# List all commits not part of a PR
|
||||
if len(commits_without_pull_requests) > 0:
|
||||
body += '\n\nContains the following commits not from a pull request:'
|
||||
for commit in commits_without_pull_requests:
|
||||
body += '\n- ' + commit.sha
|
||||
body += ' - ' + get_truncated_commit_message(commit)
|
||||
body += ' (@' + commit.author.login + ')'
|
||||
|
||||
title = 'Merge ' + MAIN_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
|
||||
|
||||
# Create the pull request
|
||||
pr = repo.create_pull(title=title, body=body, head=branch_name, base=LATEST_RELEASE_BRANCH)
|
||||
print('Created PR #' + str(pr.number))
|
||||
|
||||
# Assign the conductor
|
||||
pr.add_to_assignees(conductor)
|
||||
print('Assigned PR to ' + conductor)
|
||||
|
||||
# Gets the person who should be in charge of the mergeback PR
|
||||
def get_conductor(repo, pull_requests, other_commits):
|
||||
# If there are any PRs then use whoever merged the last one
|
||||
if len(pull_requests) > 0:
|
||||
return get_merger_of_pr(repo, pull_requests[-1])
|
||||
|
||||
# Otherwise take the author of the latest commit
|
||||
return other_commits[-1].author.login
|
||||
|
||||
# Gets a list of the SHAs of all commits that have happened on main
|
||||
# since the release branched off.
|
||||
# This will not include any commits that exist on the release branch
|
||||
# that aren't on main.
|
||||
def get_commit_difference(repo):
|
||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '...' + MAIN_BRANCH).strip().split('\n')
|
||||
|
||||
# Convert to full-fledged commit objects
|
||||
commits = [repo.get_commit(c) for c in commits]
|
||||
|
||||
# Filter out merge commits for PRs
|
||||
return list(filter(lambda c: not is_pr_merge_commit(c), commits))
|
||||
|
||||
# Is the given commit the automatic merge commit from when merging a PR
|
||||
def is_pr_merge_commit(commit):
|
||||
return commit.committer.login == 'web-flow' and len(commit.parents) > 1
|
||||
|
||||
# Gets a copy of the commit message that should display nicely
|
||||
def get_truncated_commit_message(commit):
|
||||
message = commit.commit.message.split('\n')[0]
|
||||
if len(message) > 60:
|
||||
return message[:57] + '...'
|
||||
else:
|
||||
return message
|
||||
|
||||
# Converts a commit into the PR that introduced it to the main branch.
|
||||
# Returns the PR object, or None if no PR could be found.
|
||||
def get_pr_for_commit(repo, commit):
|
||||
prs = commit.get_pulls()
|
||||
|
||||
if prs.totalCount > 0:
|
||||
# In the case that there are multiple PRs, return the earliest one
|
||||
prs = list(prs)
|
||||
sorted(prs, key=lambda pr: int(pr.number))
|
||||
return prs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
# Get the person who merged the pull request.
|
||||
# For most cases this will be the same as the author, but for PRs opened
|
||||
# by external contributors getting the merger will get us the GitHub
|
||||
# employee who reviewed and merged the PR.
|
||||
def get_merger_of_pr(repo, pr):
|
||||
return repo.get_commit(pr.merge_commit_sha).author.login
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
raise Exception('Usage: update-release.branch.py <github token> <repository nwo>')
|
||||
github_token = sys.argv[1]
|
||||
repository_nwo = sys.argv[2]
|
||||
|
||||
repo = Github(github_token).get_repo(repository_nwo)
|
||||
|
||||
# Print what we intend to go
|
||||
print('Considering difference between ' + MAIN_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
|
||||
short_main_sha = run_git('rev-parse', '--short', MAIN_BRANCH).strip()
|
||||
print('Current head of ' + MAIN_BRANCH + ' is ' + short_main_sha)
|
||||
|
||||
# See if there are any commits to merge in
|
||||
commits = get_commit_difference(repo)
|
||||
if len(commits) == 0:
|
||||
print('No commits to merge from ' + MAIN_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
|
||||
return
|
||||
|
||||
# The branch name is based off of the name of branch being merged into
|
||||
# and the SHA of the branch being merged from. Thus if the branch already
|
||||
# exists we can assume we don't need to recreate it.
|
||||
new_branch_name = 'update-' + LATEST_RELEASE_BRANCH + '-' + short_main_sha
|
||||
print('Branch name is ' + new_branch_name)
|
||||
|
||||
# Check if the branch already exists. If so we can abort as this script
|
||||
# has already run on this combination of branches.
|
||||
if branch_exists_on_remote(new_branch_name):
|
||||
print('Branch ' + new_branch_name + ' already exists. Nothing to do.')
|
||||
return
|
||||
|
||||
# Create the new branch and push it to the remote
|
||||
print('Creating branch ' + new_branch_name)
|
||||
run_git('checkout', '-b', new_branch_name, MAIN_BRANCH)
|
||||
run_git('push', ORIGIN, new_branch_name)
|
||||
|
||||
# Open a PR to update the branch
|
||||
open_pr(repo, commits, short_main_sha, new_branch_name)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
15
.github/workflows/codeql.yml
vendored
15
.github/workflows/codeql.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: "CodeQL action"
|
||||
|
||||
on: [push]
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -11,7 +11,18 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
with:
|
||||
# Must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head of the pull request.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
- uses: ./init
|
||||
with:
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
languages: javascript
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
- uses: ./analyze
|
||||
|
||||
136
.github/workflows/integration-testing.yml
vendored
136
.github/workflows/integration-testing.yml
vendored
@@ -1,22 +1,126 @@
|
||||
name: "Integration Testing"
|
||||
|
||||
on: [push]
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
dispatch-events:
|
||||
if: github.event.repository.full_name == 'github/codeql-action'
|
||||
multi-language-repo_test-autodetect-languages:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send repository dispatch events
|
||||
run: |
|
||||
curl -X POST \
|
||||
-H "Authorization: Bearer ${{ secrets.CODEQL_TESTING_TOKEN }}" \
|
||||
-H "Accept: application/vnd.github.everest-preview+json" \
|
||||
https://api.github.com/repos/Anthophila/amazon-cognito-js-copy/dispatches \
|
||||
-d '{"event_type":"codeql-integration","client_payload": {"sha": "${{ github.sha }}"}}'
|
||||
|
||||
curl -X POST \
|
||||
-H "Authorization: Bearer ${{ secrets.CODEQL_TESTING_TOKEN }}" \
|
||||
-H "Accept: application/vnd.github.everest-preview+json" \
|
||||
https://api.github.com/repos/Anthophila/electron-test-action/dispatches \
|
||||
-d '{"event_type":"codeql-integration","client_payload": {"sha": "${{ github.sha }}"}}'
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- run: |
|
||||
cd "$CODEQL_ACTION_DATABASE_DIR"
|
||||
# List all directories as there will be precisely one directory per database
|
||||
# but there may be other files in this directory such as query suites.
|
||||
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
||||
[[ ! -d cpp ]] || \
|
||||
[[ ! -d csharp ]] || \
|
||||
[[ ! -d go ]] || \
|
||||
[[ ! -d java ]] || \
|
||||
[[ ! -d javascript ]] || \
|
||||
[[ ! -d python ]]; then
|
||||
echo "Did not find expected number of databases. Database dir contains: $(ls)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
multi-language-repo_test-custom-queries:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
config-file: ./.github/codeql/custom-queries.yml
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
# Currently is not possible to analyze Go in conjunction with other languages in macos
|
||||
multi-language-repo_test-go-custom-queries:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/setup-go@v2
|
||||
if: ${{ matrix.os == 'macos-latest' }}
|
||||
with:
|
||||
go-version: '^1.13.1'
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
config-file: ./.github/codeql/custom-queries.yml
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
|
||||
multi-language-repo_rubocop:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
||||
- name: Install dependencies
|
||||
run: bundle install
|
||||
- name: Rubocop run
|
||||
run: |
|
||||
bash -c "
|
||||
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
||||
[[ $? -ne 2 ]]
|
||||
"
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
sarif_file: rubocop.sarif
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
27
.github/workflows/js-uptodate-check.yml
vendored
27
.github/workflows/js-uptodate-check.yml
vendored
@@ -1,27 +0,0 @@
|
||||
name: "Check generated JavaScript"
|
||||
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
check-js:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Check generated JavaScript
|
||||
run: |
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
# Generate the JavaScript files
|
||||
npm run-script build
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: JavaScript files are up to date"
|
||||
12
.github/workflows/npm-test.yml
vendored
12
.github/workflows/npm-test.yml
vendored
@@ -1,12 +0,0 @@
|
||||
name: "npm run-script test"
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
npm-test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: npm run-script test
|
||||
run: npm run-script test
|
||||
71
.github/workflows/pr-checks.yml
vendored
Normal file
71
.github/workflows/pr-checks.yml
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
name: "PR checks"
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
tslint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: tslint
|
||||
run: npm run-script lint
|
||||
|
||||
check-js:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Check generated JavaScript
|
||||
run: |
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
# Generate the JavaScript files
|
||||
npm run-script build
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||
git status
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: JavaScript files are up to date"
|
||||
|
||||
check-node-modules:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Check node modules up to date
|
||||
run: |
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Reinstall modules and then clean to remove absolute paths
|
||||
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||
npm ci
|
||||
npm run removeNPMAbsolutePaths
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
|
||||
git status
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: node_modules are up to date"
|
||||
|
||||
npm-test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: npm run-script test
|
||||
run: npm run-script test
|
||||
12
.github/workflows/ts-lint.yml
vendored
12
.github/workflows/ts-lint.yml
vendored
@@ -1,12 +0,0 @@
|
||||
name: "TSLint"
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
tslint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: tslint
|
||||
run: npm run-script lint
|
||||
31
.github/workflows/update-release-branch.yml
vendored
Normal file
31
.github/workflows/update-release-branch.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Update release branch
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 9 * * 1
|
||||
repository_dispatch:
|
||||
# Example of how to trigger this:
|
||||
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
|
||||
# Replace <token> with a personal access token from this page: https://github.com/settings/tokens
|
||||
types: [update-release-branch]
|
||||
|
||||
jobs:
|
||||
update:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
# Need full history so we calculate diffs
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.5
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install PyGithub==1.51 requests
|
||||
|
||||
- name: Update release branch
|
||||
run: python .github/update-release-branch.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }}
|
||||
10
.vscode/settings.json
vendored
Normal file
10
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"files.exclude": {
|
||||
// include the defaults from VS Code
|
||||
"**/.git": true,
|
||||
"**/.DS_Store": true,
|
||||
|
||||
// transpiled JavaScript
|
||||
"lib": true,
|
||||
}
|
||||
}
|
||||
113
README.md
113
README.md
@@ -1,9 +1,17 @@
|
||||
# CodeQL Action
|
||||
|
||||
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/semmle/ql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||
|
||||
## License
|
||||
|
||||
This project is released under the [MIT License](LICENSE).
|
||||
|
||||
The underlying CodeQL CLI, used in this action, is licensed under the [GitHub CodeQL Terms and Conditions](https://securitylab.github.com/tools/codeql/license). As such, this action may be used on open source projects hosted on GitHub, and on private repositories that are owned by an organisation with GitHub Advanced Security enabled.
|
||||
|
||||
## Usage
|
||||
|
||||
This is a short walkthrough, but for more information read [configuring code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning).
|
||||
|
||||
To get code scanning results from CodeQL analysis on your repo you can use the following workflow as a template:
|
||||
|
||||
```yaml
|
||||
@@ -12,21 +20,29 @@ name: "Code Scanning - Action"
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 0 * * 0'
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# Must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head of the pull request.
|
||||
# Only include this option if you are running this workflow on pull requests.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
# Only include this step if you are running this workflow on pull requests.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
@@ -72,24 +88,9 @@ If you prefer to integrate this within an existing CI workflow, it should end up
|
||||
uses: github/codeql-action/analyze@v1
|
||||
```
|
||||
|
||||
### Actions triggers
|
||||
### Configuration file
|
||||
|
||||
The CodeQL action should be run on `push` events, and on a `schedule`. `Push` events allow us to do a detailed analysis of the delta in a pull request, while the `schedule` event ensures that GitHub regularly scans the repository for the latest vulnerabilities, even if the repository becomes inactive. This action does not support the `pull_request` event.
|
||||
|
||||
### Configuration
|
||||
|
||||
You may optionally specify additional queries for CodeQL to execute by using a config file. The queries must belong to a [QL pack](https://help.semmle.com/codeql/codeql-cli/reference/qlpack-overview.html) and can be in your repository or any public repository. You can choose a single .ql file, a folder containing multiple .ql files, a .qls [query suite](https://help.semmle.com/codeql/codeql-cli/procedures/query-suites.html) file, or any combination of the above. To use queries from other repositories use the same syntax as when [using an action](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepsuses).
|
||||
|
||||
You can disable the default queries using `disable-default-queries: true`.
|
||||
|
||||
You can choose to ignore some files or folders from the analysis, or include additional files/folders for analysis. This *only* works for Javascript and Python analysis.
|
||||
Identifying potential files for extraction:
|
||||
|
||||
- Scans each folder that's defined as `paths` in turn, traversing subfolders, and looking for relevant files.
|
||||
- If it finds a subfolder that's defined as `paths-ignore`, stop traversing.
|
||||
- If a file or folder is both in `paths` and `paths-ignore`, the `paths-ignore` is ignored.
|
||||
|
||||
Use the `config-file` parameter of the init action to enable the configuration file. For example:
|
||||
Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`.
|
||||
|
||||
```yaml
|
||||
- uses: github/codeql-action/init@v1
|
||||
@@ -97,72 +98,8 @@ Use the `config-file` parameter of the init action to enable the configuration f
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
```
|
||||
|
||||
A config file looks like this:
|
||||
|
||||
```yaml
|
||||
name: "My CodeQL config"
|
||||
|
||||
disable-default-queries: true
|
||||
|
||||
queries:
|
||||
- name: In-repo queries (Runs the queries located in the my-queries folder of the repo)
|
||||
uses: ./my-queries
|
||||
- name: External Javascript QL pack (Runs a QL pack located in an external repo)
|
||||
uses: /Semmle/ql/javascript/ql/src/Electron@master
|
||||
- name: External query (Runs a single query located in an external QL pack)
|
||||
uses: Semmle/ql/javascript/ql/src/AngularJS/DeadAngularJSEventListener.ql@master
|
||||
- name: Select query suite (Runs a query suites)
|
||||
uses: ./codeql-querypacks/complex-python-querypack/rootAndBar.qls
|
||||
|
||||
paths:
|
||||
- src/util.ts
|
||||
|
||||
paths-ignore:
|
||||
- src
|
||||
- lib
|
||||
```
|
||||
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration)."
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Trouble with Go dependencies
|
||||
|
||||
#### If you use a vendor directory
|
||||
|
||||
Try passing
|
||||
|
||||
```yaml
|
||||
env:
|
||||
GOFLAGS: "-mod=vendor"
|
||||
```
|
||||
|
||||
to `github/codeql-action/analyze`.
|
||||
|
||||
### If you do not use a vendor directory
|
||||
|
||||
Dependencies on public repositories should just work. If you have dependencies on private repositories, one option is to use `git config` and a [personal access token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) to authenticate when downloading dependencies. Add a section like
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- name: Configure git private repo access
|
||||
env:
|
||||
TOKEN: ${{ secrets.GITHUB_PAT }}
|
||||
run: |
|
||||
git config --global url."https://${TOKEN}@github.com/foo/bar".insteadOf "https://github.com/foo/bar"
|
||||
git config --global url."https://${TOKEN}@github.com/foo/baz".insteadOf "https://github.com/foo/baz"
|
||||
```
|
||||
|
||||
before any codeql actions. A similar thing can also be done with an SSH key or deploy key.
|
||||
|
||||
### C# using dotnet version 2 on linux
|
||||
|
||||
This currently requires invoking `dotnet` with the `/p:UseSharedCompilation=false` flag. For example:
|
||||
|
||||
```shell
|
||||
dotnet build /p:UseSharedCompilation=false
|
||||
```
|
||||
|
||||
Version 3 does not require the additional flag.
|
||||
|
||||
## License
|
||||
|
||||
This project is released under the [MIT License](LICENSE).
|
||||
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
|
||||
|
||||
@@ -4,6 +4,7 @@ author: 'GitHub'
|
||||
inputs:
|
||||
check_name:
|
||||
description: The name of the check run to add text to.
|
||||
required: false
|
||||
output:
|
||||
description: The path of the directory in which to save the SARIF results
|
||||
required: false
|
||||
@@ -11,7 +12,14 @@ inputs:
|
||||
upload:
|
||||
description: Upload the SARIF file
|
||||
required: false
|
||||
default: true
|
||||
default: "true"
|
||||
ram:
|
||||
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
|
||||
required: false
|
||||
threads:
|
||||
description: The number of threads to be used by CodeQL.
|
||||
required: false
|
||||
default: "1"
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
|
||||
@@ -5,12 +5,14 @@ inputs:
|
||||
tools:
|
||||
description: URL of CodeQL tools
|
||||
required: false
|
||||
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200427/codeql-bundle.tar.gz
|
||||
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200601/codeql-bundle.tar.gz
|
||||
languages:
|
||||
description: The languages to be analysed
|
||||
required: false
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
default: ${{ toJson(matrix) }}
|
||||
config-file:
|
||||
description: Path of the config file to use
|
||||
required: false
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
module.exports = {
|
||||
clearMocks: true,
|
||||
moduleFileExtensions: ['js', 'ts'],
|
||||
testEnvironment: 'node',
|
||||
testMatch: ['**/*.test.ts'],
|
||||
testRunner: 'jest-circus/runner',
|
||||
transform: {
|
||||
'^.+\\.ts$': 'ts-jest'
|
||||
},
|
||||
verbose: true
|
||||
}
|
||||
3
lib/analysis-paths.js
generated
3
lib/analysis-paths.js
generated
@@ -16,7 +16,7 @@ function includeAndExcludeAnalysisPaths(config, languages) {
|
||||
core.exportVariable('LGTM_INDEX_EXCLUDE', config.pathsIgnore.join('\n'));
|
||||
}
|
||||
function isInterpretedLanguage(language) {
|
||||
return language === 'javascript' && language === 'python';
|
||||
return language === 'javascript' || language === 'python';
|
||||
}
|
||||
// Index include/exclude only work in javascript and python
|
||||
// If some other language is detected/configured show a warning
|
||||
@@ -25,3 +25,4 @@ function includeAndExcludeAnalysisPaths(config, languages) {
|
||||
}
|
||||
}
|
||||
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;
|
||||
//# sourceMappingURL=analysis-paths.js.map
|
||||
1
lib/analysis-paths.js.map
Normal file
1
lib/analysis-paths.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAgB,8BAA8B,CAAC,MAA0B,EAAE,SAAmB;IAC5F,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KACpE;IAED,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC1E;IAED,SAAS,qBAAqB,CAAC,QAAQ;QACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;IAC5D,CAAC;IAED,2DAA2D;IAC3D,+DAA+D;IAC/D,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAC7G,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC5G;AACH,CAAC;AAlBD,wEAkBC"}
|
||||
32
lib/analysis-paths.test.js
generated
Normal file
32
lib/analysis-paths.test.js
generated
Normal file
@@ -0,0 +1,32 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
testing_utils_1.silenceDebugOutput(ava_1.default);
|
||||
ava_1.default("emptyPaths", async (t) => {
|
||||
let config = new configUtils.Config();
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
|
||||
});
|
||||
ava_1.default("nonEmptyPaths", async (t) => {
|
||||
let config = new configUtils.Config();
|
||||
config.paths.push('path1', 'path2');
|
||||
config.pathsIgnore.push('path3', 'path4');
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path3\npath4');
|
||||
});
|
||||
//# sourceMappingURL=analysis-paths.test.js.map
|
||||
1
lib/analysis-paths.test.js.map
Normal file
1
lib/analysis-paths.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,4DAA8C;AAC9C,mDAAmD;AAEnD,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IACpC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC1C,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;AAC1D,CAAC,CAAC,CAAC"}
|
||||
23
lib/api-client.js
generated
Normal file
23
lib/api-client.js
generated
Normal file
@@ -0,0 +1,23 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const octokit = __importStar(require("@octokit/rest"));
|
||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||
const githubAPIURL = process.env["GITHUB_API_URL"] || "https://api.github.com";
|
||||
exports.client = new octokit.Octokit({
|
||||
auth: core.getInput("token"),
|
||||
baseUrl: githubAPIURL,
|
||||
userAgent: "CodeQL Action",
|
||||
log: console_log_level_1.default({ level: "debug" })
|
||||
});
|
||||
//# sourceMappingURL=api-client.js.map
|
||||
1
lib/api-client.js.map
Normal file
1
lib/api-client.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,uDAAyC;AACzC,0EAAgD;AAEhD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,IAAI,wBAAwB,CAAC;AAClE,QAAA,MAAM,GAAG,IAAI,OAAO,CAAC,OAAO,CAAC;IACxC,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC;IAC5B,OAAO,EAAE,YAAY;IACrB,SAAS,EAAE,eAAe;IAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;CACzC,CAAC,CAAC"}
|
||||
11
lib/autobuild.js
generated
11
lib/autobuild.js
generated
@@ -22,12 +22,16 @@ async function run() {
|
||||
// We want pick the dominant language in the repo from the ones we're able to build
|
||||
// The languages are sorted in order specified by user or by lines of code if we got
|
||||
// them from the GitHub API, so try to build the first language on the list.
|
||||
const language = (_a = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]) === null || _a === void 0 ? void 0 : _a.split(',')[0];
|
||||
const autobuildLanguages = ((_a = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]) === null || _a === void 0 ? void 0 : _a.split(',')) || [];
|
||||
const language = autobuildLanguages[0];
|
||||
if (!language) {
|
||||
core.info("None of the languages in this project require extra build steps");
|
||||
return;
|
||||
}
|
||||
core.debug(`Detected dominant traced language: ${language}`);
|
||||
if (autobuildLanguages.length > 1) {
|
||||
core.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages.slice(1).join(' and ')}, you must replace this block with custom build steps.`);
|
||||
}
|
||||
core.startGroup(`Attempting to automatically build ${language} code`);
|
||||
// TODO: share config accross actions better via env variables
|
||||
const codeqlCmd = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_CMD);
|
||||
@@ -44,13 +48,14 @@ async function run() {
|
||||
core.endGroup();
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
|
||||
await util.reportActionFailed('autobuild', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
await util.reportActionSucceeded('autobuild');
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("autobuild action failed: " + e);
|
||||
core.setFailed("autobuild action failed. " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=autobuild.js.map
|
||||
1
lib/autobuild.js.map
Normal file
1
lib/autobuild.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,2CAA6B;AAE7B,gEAAkD;AAClD,6CAA+B;AAE/B,KAAK,UAAU,GAAG;;IAChB,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,EAAE;YACzF,OAAO;SACR;QAED,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,OAAA,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,8BAA8B,CAAC,0CAAE,KAAK,CAAC,GAAG,MAAK,EAAE,CAAC;QACnG,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,8DAA8D;QAC9D,MAAM,SAAS,GAAG,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,iBAAiB,CAAC,CAAC;QAExE,MAAM,OAAO,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,cAAc,CAAC;QAChF,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;QAGpF,+DAA+D;QAC/D,0FAA0F;QAC1F,qDAAqD;QACrD,8EAA8E;QAC9E,gHAAgH;QAChH,IAAI,eAAe,GAAG,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,EAAE,CAAC;QAC7D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,wBAAwB,EAAE,+BAA+B,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAE1I,MAAM,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QAC9B,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,MAAM,IAAI,CAAC,kBAAkB,CAAC,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QACvE,OAAO;KACR;IAED,MAAM,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,CAAC;AAChD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
208
lib/config-utils.js
generated
208
lib/config-utils.js
generated
@@ -12,6 +12,13 @@ const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const path = __importStar(require("path"));
|
||||
const util = __importStar(require("./util"));
|
||||
const NAME_PROPERTY = 'name';
|
||||
const DISPLAY_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
|
||||
const QUERIES_PROPERTY = 'queries';
|
||||
const QUERIES_USES_PROPERTY = 'uses';
|
||||
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
|
||||
const PATHS_PROPERTY = 'paths';
|
||||
class ExternalQuery {
|
||||
constructor(repository, ref) {
|
||||
this.path = '';
|
||||
@@ -20,39 +27,74 @@ class ExternalQuery {
|
||||
}
|
||||
}
|
||||
exports.ExternalQuery = ExternalQuery;
|
||||
// The set of acceptable values for built-in suites from the codeql bundle
|
||||
const builtinSuites = ['security-extended', 'security-and-quality'];
|
||||
class Config {
|
||||
constructor() {
|
||||
this.name = "";
|
||||
this.disableDefaultQueries = false;
|
||||
this.additionalQueries = [];
|
||||
this.externalQueries = [];
|
||||
this.additionalSuites = [];
|
||||
this.pathsIgnore = [];
|
||||
this.paths = [];
|
||||
}
|
||||
addQuery(queryUses) {
|
||||
addQuery(configFile, queryUses) {
|
||||
// The logic for parsing the string is based on what actions does for
|
||||
// parsing the 'uses' actions in the workflow file
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw '"uses" value for queries cannot be blank';
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
this.additionalQueries.push(queryUses.slice(2));
|
||||
const localQueryPath = queryUses.slice(2);
|
||||
// Resolve the local path against the workspace so that when this is
|
||||
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
||||
const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
|
||||
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(absoluteQueryPath)) {
|
||||
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||
}
|
||||
// Call this after checking file exists, because it'll fail if file doesn't exist
|
||||
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
|
||||
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||
if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||
}
|
||||
this.additionalQueries.push(absoluteQueryPath);
|
||||
return;
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
|
||||
const suite = builtinSuites.find((suite) => suite === queryUses);
|
||||
if (suite) {
|
||||
this.additionalSuites.push(suite);
|
||||
return;
|
||||
}
|
||||
else {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
}
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
const ref = tok[1];
|
||||
tok = tok[0].split('/');
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length < 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
if (tok.length > 3) {
|
||||
tok = [tok[0], tok[1], tok.slice(2).join('/')];
|
||||
}
|
||||
if (tok.length < 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
// Check none of the parts of the repository name are empty
|
||||
if (tok[0].trim() === '' || tok[1].trim() === '') {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
|
||||
if (tok.length === 3) {
|
||||
@@ -62,62 +104,137 @@ class Config {
|
||||
}
|
||||
}
|
||||
exports.Config = Config;
|
||||
const configFolder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
function getNameInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
|
||||
}
|
||||
exports.getNameInvalid = getNameInvalid;
|
||||
function getDisableDefaultQueriesInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, DISPLAY_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
|
||||
}
|
||||
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
|
||||
function getQueriesInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
|
||||
}
|
||||
exports.getQueriesInvalid = getQueriesInvalid;
|
||||
function getQueryUsesInvalid(configFile, queryUses) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'must be a built-in suite (' + builtinSuites.join(' or ') +
|
||||
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
|
||||
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
|
||||
}
|
||||
exports.getQueryUsesInvalid = getQueryUsesInvalid;
|
||||
function getPathsIgnoreInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
exports.getPathsIgnoreInvalid = getPathsIgnoreInvalid;
|
||||
function getPathsInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
exports.getPathsInvalid = getPathsInvalid;
|
||||
function getLocalPathOutsideOfRepository(configFile, localPath) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" is outside of the repository');
|
||||
}
|
||||
exports.getLocalPathOutsideOfRepository = getLocalPathOutsideOfRepository;
|
||||
function getLocalPathDoesNotExist(configFile, localPath) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" does not exist in the repository');
|
||||
}
|
||||
exports.getLocalPathDoesNotExist = getLocalPathDoesNotExist;
|
||||
function getConfigFileOutsideWorkspaceErrorMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" is outside of the workspace';
|
||||
}
|
||||
exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage;
|
||||
function getConfigFileDoesNotExistErrorMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" does not exist';
|
||||
}
|
||||
exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage;
|
||||
function getConfigFilePropertyError(configFile, property, error) {
|
||||
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
|
||||
}
|
||||
function initConfig() {
|
||||
const configFile = core.getInput('config-file');
|
||||
let configFile = core.getInput('config-file');
|
||||
const config = new Config();
|
||||
// If no config file was provided create an empty one
|
||||
if (configFile === '') {
|
||||
core.debug('No configuration file was provided');
|
||||
return config;
|
||||
}
|
||||
try {
|
||||
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
if (parsedYAML.name && typeof parsedYAML.name === "string") {
|
||||
config.name = parsedYAML.name;
|
||||
}
|
||||
if (parsedYAML['disable-default-queries'] && typeof parsedYAML['disable-default-queries'] === "boolean") {
|
||||
config.disableDefaultQueries = parsedYAML['disable-default-queries'];
|
||||
}
|
||||
const queries = parsedYAML.queries;
|
||||
if (queries && queries instanceof Array) {
|
||||
queries.forEach(query => {
|
||||
if (query.uses && typeof query.uses === "string") {
|
||||
config.addQuery(query.uses);
|
||||
}
|
||||
});
|
||||
}
|
||||
const pathsIgnore = parsedYAML['paths-ignore'];
|
||||
if (pathsIgnore && pathsIgnore instanceof Array) {
|
||||
pathsIgnore.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
config.pathsIgnore.push(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
const paths = parsedYAML.paths;
|
||||
if (paths && paths instanceof Array) {
|
||||
paths.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
config.paths.push(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
// Treat the config file as relative to the workspace
|
||||
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
configFile = path.resolve(workspacePath, configFile);
|
||||
// Error if the config file is now outside of the workspace
|
||||
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
}
|
||||
catch (err) {
|
||||
core.setFailed(err);
|
||||
// Error if the file does not exist
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
||||
}
|
||||
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
if (NAME_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
if (parsedYAML[NAME_PROPERTY].length === 0) {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
config.name = parsedYAML[NAME_PROPERTY];
|
||||
}
|
||||
if (DISPLAY_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
||||
throw new Error(getDisableDefaultQueriesInvalid(configFile));
|
||||
}
|
||||
config.disableDefaultQueries = parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY];
|
||||
}
|
||||
if (QUERIES_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getQueriesInvalid(configFile));
|
||||
}
|
||||
parsedYAML[QUERIES_PROPERTY].forEach(query => {
|
||||
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
config.addQuery(configFile, query[QUERIES_USES_PROPERTY]);
|
||||
});
|
||||
}
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
config.pathsIgnore.push(path);
|
||||
});
|
||||
}
|
||||
if (PATHS_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
config.paths.push(path);
|
||||
});
|
||||
}
|
||||
return config;
|
||||
}
|
||||
function getConfigFolder() {
|
||||
return util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
}
|
||||
function getConfigFile() {
|
||||
return path.join(getConfigFolder(), 'config');
|
||||
}
|
||||
exports.getConfigFile = getConfigFile;
|
||||
async function saveConfig(config) {
|
||||
const configString = JSON.stringify(config);
|
||||
await io.mkdirP(configFolder);
|
||||
fs.writeFileSync(path.join(configFolder, 'config'), configString, 'utf8');
|
||||
await io.mkdirP(getConfigFolder());
|
||||
fs.writeFileSync(getConfigFile(), configString, 'utf8');
|
||||
core.debug('Saved config:');
|
||||
core.debug(configString);
|
||||
}
|
||||
async function loadConfig() {
|
||||
const configFile = path.join(configFolder, 'config');
|
||||
const configFile = getConfigFile();
|
||||
if (fs.existsSync(configFile)) {
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
@@ -133,3 +250,4 @@ async function loadConfig() {
|
||||
}
|
||||
}
|
||||
exports.loadConfig = loadConfig;
|
||||
//# sourceMappingURL=config-utils.js.map
|
||||
1
lib/config-utils.js.map
Normal file
1
lib/config-utils.js.map
Normal file
File diff suppressed because one or more lines are too long
164
lib/config-utils.test.js
generated
Normal file
164
lib/config-utils.test.js
generated
Normal file
@@ -0,0 +1,164 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.silenceDebugOutput(ava_1.default);
|
||||
function setInput(name, value) {
|
||||
// Transformation copied from
|
||||
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
|
||||
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
|
||||
if (value !== undefined) {
|
||||
process.env[envVar] = value;
|
||||
}
|
||||
else {
|
||||
delete process.env[envVar];
|
||||
}
|
||||
}
|
||||
ava_1.default("load empty config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', undefined);
|
||||
const config = await configUtils.loadConfig();
|
||||
t.deepEqual(config, new configUtils.Config());
|
||||
});
|
||||
});
|
||||
ava_1.default("loading config saves config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const configFile = configUtils.getConfigFile();
|
||||
// Sanity check the saved config file does not already exist
|
||||
t.false(fs.existsSync(configFile));
|
||||
const config = await configUtils.loadConfig();
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configFile));
|
||||
// And the contents should parse correctly to the config that was returned
|
||||
t.deepEqual(fs.readFileSync(configFile, 'utf8'), JSON.stringify(config));
|
||||
});
|
||||
});
|
||||
ava_1.default("load input outside of workspace", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', '../input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("load non-existent input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
t.false(fs.existsSync(path.join(tmpDir, 'input')));
|
||||
setInput('config-file', 'input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("load non-empty input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
// Just create a generic config object with non-default values for all fields
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- uses: ./
|
||||
- uses: ./foo
|
||||
- uses: foo/bar@dev
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = new configUtils.Config();
|
||||
expectedConfig.name = 'my config';
|
||||
expectedConfig.disableDefaultQueries = true;
|
||||
expectedConfig.additionalQueries.push(fs.realpathSync(tmpDir));
|
||||
expectedConfig.additionalQueries.push(fs.realpathSync(path.join(tmpDir, 'foo')));
|
||||
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
|
||||
expectedConfig.pathsIgnore = ['a', 'b'];
|
||||
expectedConfig.paths = ['c/d'];
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
const actualConfig = await configUtils.loadConfig();
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
|
||||
ava_1.default("load invalid input - " + testName, async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const inputFile = path.join(tmpDir, 'input');
|
||||
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
doInvalidInputTest('name invalid type', `
|
||||
name:
|
||||
- foo: bar`, configUtils.getNameInvalid);
|
||||
doInvalidInputTest('disable-default-queries invalid type', `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
|
||||
doInvalidInputTest('queries invalid type', `queries: foo`, configUtils.getQueriesInvalid);
|
||||
doInvalidInputTest('paths-ignore invalid type', `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
|
||||
doInvalidInputTest('paths invalid type', `paths: 17`, configUtils.getPathsInvalid);
|
||||
doInvalidInputTest('queries uses invalid type', `
|
||||
queries:
|
||||
- uses:
|
||||
- hello: world`, configUtils.getQueryUsesInvalid);
|
||||
function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) {
|
||||
// Invalid contents of a "queries.uses" field.
|
||||
// Should fail with the expected error message
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
queries:
|
||||
- name: foo
|
||||
uses: ` + input;
|
||||
doInvalidInputTest("queries uses \"" + input + "\"", inputFileContents, expectedErrorMessageGenerator);
|
||||
}
|
||||
// Various "uses" fields, and the errors they should produce
|
||||
doInvalidQueryUsesTest("''", c => configUtils.getQueryUsesInvalid(c, undefined));
|
||||
doInvalidQueryUsesTest("foo/bar", c => configUtils.getQueryUsesInvalid(c, "foo/bar"));
|
||||
doInvalidQueryUsesTest("foo/bar@v1@v2", c => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
|
||||
doInvalidQueryUsesTest("foo@master", c => configUtils.getQueryUsesInvalid(c, "foo@master"));
|
||||
doInvalidQueryUsesTest("https://github.com/foo/bar@master", c => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
|
||||
doInvalidQueryUsesTest("./foo", c => configUtils.getLocalPathDoesNotExist(c, "foo"));
|
||||
doInvalidQueryUsesTest("./..", c => configUtils.getLocalPathOutsideOfRepository(c, ".."));
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
1
lib/config-utils.test.js.map
Normal file
1
lib/config-utils.test.js.map
Normal file
File diff suppressed because one or more lines are too long
4
lib/external-queries.js
generated
4
lib/external-queries.js
generated
@@ -11,8 +11,9 @@ const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function checkoutExternalQueries(config) {
|
||||
const folder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
const folder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
for (const externalQuery of config.externalQueries) {
|
||||
core.info('Checking out ' + externalQuery.repository);
|
||||
const checkoutLocation = path.join(folder, externalQuery.repository);
|
||||
@@ -29,3 +30,4 @@ async function checkoutExternalQueries(config) {
|
||||
}
|
||||
}
|
||||
exports.checkoutExternalQueries = checkoutExternalQueries;
|
||||
//# sourceMappingURL=external-queries.js.map
|
||||
1
lib/external-queries.js.map
Normal file
1
lib/external-queries.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAG7B,6CAA+B;AAExB,KAAK,UAAU,uBAAuB,CAAC,MAA0B;IACtE,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC;IAEvD,KAAK,MAAM,aAAa,IAAI,MAAM,CAAC,eAAe,EAAE;QAClD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,UAAU,CAAC,CAAC;QAEtD,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,aAAa,CAAC,UAAU,CAAC,CAAC;QACrE,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;YACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,aAAa,CAAC,UAAU,GAAG,MAAM,CAAC;YAC1E,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;YAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;gBACrB,cAAc,GAAG,gBAAgB;gBACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;gBACzC,UAAU,EAAE,aAAa,CAAC,GAAG;aAC9B,CAAC,CAAC;SACJ;QAED,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;KAChF;AACH,CAAC;AAnBD,0DAmBC"}
|
||||
33
lib/external-queries.test.js
generated
Normal file
33
lib/external-queries.test.js
generated
Normal file
@@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.silenceDebugOutput(ava_1.default);
|
||||
ava_1.default("checkoutExternalQueries", async (t) => {
|
||||
let config = new configUtils.Config();
|
||||
config.externalQueries = [
|
||||
new configUtils.ExternalQuery("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b"),
|
||||
];
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
|
||||
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=external-queries.test.js.map
|
||||
1
lib/external-queries.test.js.map
Normal file
1
lib/external-queries.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,4DAA8C;AAC9C,oEAAsD;AACtD,mDAAmD;AACnD,6CAA+B;AAE/B,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACxC,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,eAAe,GAAG;QACvB,IAAI,WAAW,CAAC,aAAa,CAAC,kBAAkB,EAAE,0CAA0C,CAAC;KAC9F,CAAC;IAEF,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QACpC,MAAM,eAAe,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtD,uFAAuF;QACvF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IAC/E,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
98
lib/finalize-db.js
generated
98
lib/finalize-db.js
generated
@@ -17,6 +17,25 @@ const externalQueries = __importStar(require("./external-queries"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
/**
|
||||
* A list of queries from https://github.com/github/codeql that
|
||||
* we don't want to run. Disabling them here is a quicker alternative to
|
||||
* disabling them in the code scanning query suites. Queries should also
|
||||
* be disabled in the suites, and removed from this list here once the
|
||||
* bundle is updated to make those suite changes live.
|
||||
*
|
||||
* Format is a map from language to an array of path suffixes of .ql files.
|
||||
*/
|
||||
const DISABLED_BUILTIN_QUERIES = {
|
||||
'csharp': [
|
||||
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
|
||||
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
|
||||
]
|
||||
};
|
||||
function queryIsDisabled(language, query) {
|
||||
return (DISABLED_BUILTIN_QUERIES[language] || [])
|
||||
.some(disabledQuery => query.endsWith(disabledQuery));
|
||||
}
|
||||
async function createdDBForScannedLanguages(codeqlCmd, databaseFolder) {
|
||||
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
||||
if (scannedLanguages) {
|
||||
@@ -49,26 +68,50 @@ async function finalizeDatabaseCreation(codeqlCmd, databaseFolder) {
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
async function runResolveQueries(codeqlCmd, queries) {
|
||||
let output = '';
|
||||
const options = {
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
output += data.toString();
|
||||
}
|
||||
}
|
||||
};
|
||||
await exec.exec(codeqlCmd, [
|
||||
'resolve',
|
||||
'queries',
|
||||
...queries,
|
||||
'--format=bylanguage'
|
||||
], options);
|
||||
return JSON.parse(output);
|
||||
}
|
||||
async function resolveQueryLanguages(codeqlCmd, config) {
|
||||
let res = new Map();
|
||||
if (config.additionalQueries.length !== 0) {
|
||||
let resolveQueriesOutput = '';
|
||||
const options = {
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
resolveQueriesOutput += data.toString();
|
||||
}
|
||||
if (!config.disableDefaultQueries || config.additionalSuites.length !== 0) {
|
||||
const suites = [];
|
||||
for (const language of await util.getLanguages()) {
|
||||
if (!config.disableDefaultQueries) {
|
||||
suites.push(language + '-code-scanning.qls');
|
||||
}
|
||||
};
|
||||
await exec.exec(codeqlCmd, [
|
||||
'resolve',
|
||||
'queries',
|
||||
...config.additionalQueries,
|
||||
'--format=bylanguage'
|
||||
], options);
|
||||
const resolveQueriesOutputObject = JSON.parse(resolveQueriesOutput);
|
||||
for (const additionalSuite of config.additionalSuites) {
|
||||
suites.push(language + '-' + additionalSuite + '.qls');
|
||||
}
|
||||
}
|
||||
const resolveQueriesOutputObject = await runResolveQueries(codeqlCmd, suites);
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
res[language] = Object.keys(queries);
|
||||
if (res[language] === undefined) {
|
||||
res[language] = [];
|
||||
}
|
||||
res[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
|
||||
}
|
||||
}
|
||||
if (config.additionalQueries.length !== 0) {
|
||||
const resolveQueriesOutputObject = await runResolveQueries(codeqlCmd, config.additionalQueries);
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
if (res[language] === undefined) {
|
||||
res[language] = [];
|
||||
}
|
||||
res[language].push(...Object.keys(queries));
|
||||
}
|
||||
const noDeclaredLanguage = resolveQueriesOutputObject.noDeclaredLanguage;
|
||||
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
||||
@@ -88,20 +131,27 @@ async function runQueries(codeqlCmd, databaseFolder, sarifFolder, config) {
|
||||
const queriesPerLanguage = await resolveQueryLanguages(codeqlCmd, config);
|
||||
for (let database of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + database);
|
||||
const queries = [];
|
||||
if (!config.disableDefaultQueries) {
|
||||
queries.push(database + '-code-scanning.qls');
|
||||
const queries = queriesPerLanguage[database] || [];
|
||||
if (queries.length === 0) {
|
||||
throw new Error('Unable to analyse ' + database + ' as no queries were selected for this language');
|
||||
}
|
||||
queries.push(...(queriesPerLanguage[database] || []));
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = path.join(databaseFolder, database + '-queries.qls');
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
core.debug('Query suite file for ' + database + '...\n' + querySuiteContents);
|
||||
const sarifFile = path.join(sarifFolder, database + '.sarif');
|
||||
await exec.exec(codeqlCmd, [
|
||||
'database',
|
||||
'analyze',
|
||||
util.getMemoryFlag(),
|
||||
util.getThreadsFlag(),
|
||||
path.join(databaseFolder, database),
|
||||
'--format=sarif-latest',
|
||||
'--output=' + sarifFile,
|
||||
'--no-sarif-add-snippets',
|
||||
...queries
|
||||
querySuite
|
||||
]);
|
||||
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
@@ -125,7 +175,10 @@ async function run() {
|
||||
core.info('Analyzing database');
|
||||
await runQueries(codeqlCmd, databaseFolder, sarifFolder, config);
|
||||
if ('true' === core.getInput('upload')) {
|
||||
await upload_lib.upload(sarifFolder);
|
||||
if (!await upload_lib.upload(sarifFolder)) {
|
||||
await util.reportActionFailed('finish', 'upload');
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
@@ -139,3 +192,4 @@ run().catch(e => {
|
||||
core.setFailed("analyze action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=finalize-db.js.map
|
||||
1
lib/finalize-db.js.map
Normal file
1
lib/finalize-db.js.map
Normal file
File diff suppressed because one or more lines are too long
2
lib/finalize-db.test.js
generated
Normal file
2
lib/finalize-db.test.js
generated
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";
|
||||
//# sourceMappingURL=finalize-db.test.js.map
|
||||
1
lib/finalize-db.test.js.map
Normal file
1
lib/finalize-db.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"finalize-db.test.js","sourceRoot":"","sources":["../src/finalize-db.test.ts"],"names":[],"mappings":""}
|
||||
1
lib/fingerprints.js
generated
1
lib/fingerprints.js
generated
@@ -245,3 +245,4 @@ function addFingerprints(sarifContents) {
|
||||
return JSON.stringify(sarif);
|
||||
}
|
||||
exports.addFingerprints = addFingerprints;
|
||||
//# sourceMappingURL=fingerprints.js.map
|
||||
1
lib/fingerprints.js.map
Normal file
1
lib/fingerprints.js.map
Normal file
File diff suppressed because one or more lines are too long
159
lib/fingerprints.test.js
generated
Normal file
159
lib/fingerprints.test.js
generated
Normal file
@@ -0,0 +1,159 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const fingerprints = __importStar(require("./fingerprints"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
testing_utils_1.silenceDebugOutput(ava_1.default);
|
||||
function testHash(t, input, expectedHashes) {
|
||||
let index = 0;
|
||||
let callback = function (lineNumber, hash) {
|
||||
t.is(lineNumber, index + 1);
|
||||
t.is(hash, expectedHashes[index]);
|
||||
index++;
|
||||
};
|
||||
fingerprints.hash(callback, input);
|
||||
t.is(index, input.split(/\r\n|\r|\n/).length);
|
||||
}
|
||||
ava_1.default('hash', (t) => {
|
||||
// Try empty file
|
||||
testHash(t, "", ["c129715d7a2bc9a3:1"]);
|
||||
// Try various combinations of newline characters
|
||||
testHash(t, " a\nb\n \t\tc\n d", [
|
||||
"271789c17abda88f:1",
|
||||
"54703d4cd895b18:1",
|
||||
"180aee12dab6264:1",
|
||||
"a23a3dc5e078b07b:1"
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
|
||||
"8b7cf3e952e7aeb2:1",
|
||||
"b1ae1287ec4718d9:1",
|
||||
"bff680108adb0fcc:1",
|
||||
"c6805c5e1288b612:1",
|
||||
"b86d3392aea1be30:1",
|
||||
"e6ceba753e1a442:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End\n", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\r\r\r \t\tGreetings\r End\r", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\r\nworld!!!\r\n\r\n\r\n \t\tGreetings\r\n End\r\n", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\r\n\n\r \t\tGreetings\r End\r\n", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
// Try repeating line that will generate identical hashes
|
||||
testHash(t, "Lorem ipsum dolor sit amet.\n".repeat(10), [
|
||||
"a7f2ff13bc495cf2:1",
|
||||
"a7f2ff13bc495cf2:2",
|
||||
"a7f2ff13bc495cf2:3",
|
||||
"a7f2ff13bc495cf2:4",
|
||||
"a7f2ff13bc495cf2:5",
|
||||
"a7f2ff13bc495cf2:6",
|
||||
"a7f2ff1481e87703:1",
|
||||
"a9cf91f7bbf1862b:1",
|
||||
"55ec222b86bcae53:1",
|
||||
"cc97dc7b1d7d8f7b:1",
|
||||
"c129715d7a2bc9a3:1"
|
||||
]);
|
||||
});
|
||||
function testResolveUriToFile(uri, index, artifactsURIs) {
|
||||
const location = { "uri": uri, "index": index };
|
||||
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
|
||||
return fingerprints.resolveUriToFile(location, artifacts);
|
||||
}
|
||||
ava_1.default('resolveUriToFile', t => {
|
||||
// The resolveUriToFile method checks that the file exists and is in the right directory
|
||||
// so we need to give it real files to look at. We will use this file as an example.
|
||||
// For this to work we require the current working directory to be a parent, but this
|
||||
// should generally always be the case so this is fine.
|
||||
const cwd = process.cwd();
|
||||
const filepath = __filename;
|
||||
t.true(filepath.startsWith(cwd + '/'));
|
||||
const relativeFilepaht = filepath.substring(cwd.length + 1);
|
||||
process.env['GITHUB_WORKSPACE'] = cwd;
|
||||
// Absolute paths are unmodified
|
||||
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
|
||||
// Relative paths are made absolute
|
||||
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile('file://' + relativeFilepaht, undefined, []), filepath);
|
||||
// Absolute paths outside the src root are discarded
|
||||
t.is(testResolveUriToFile('/src/foo/bar.js', undefined, []), undefined);
|
||||
t.is(testResolveUriToFile('file:///src/foo/bar.js', undefined, []), undefined);
|
||||
// Other schemes are discarded
|
||||
t.is(testResolveUriToFile('https://' + filepath, undefined, []), undefined);
|
||||
t.is(testResolveUriToFile('ftp://' + filepath, undefined, []), undefined);
|
||||
// Invalid URIs are discarded
|
||||
t.is(testResolveUriToFile(1, undefined, []), undefined);
|
||||
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
|
||||
// Non-existant files are discarded
|
||||
t.is(testResolveUriToFile(filepath + '2', undefined, []), undefined);
|
||||
// Index is resolved
|
||||
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
|
||||
t.is(testResolveUriToFile(undefined, 1, ['foo', filepath]), filepath);
|
||||
// Invalid indexes are discarded
|
||||
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
|
||||
t.is(testResolveUriToFile(undefined, '0', [filepath]), undefined);
|
||||
});
|
||||
ava_1.default('addFingerprints', t => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.input.sarif').toString();
|
||||
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.expected.sarif').toString();
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||
});
|
||||
ava_1.default('missingRegions', t => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.input.sarif').toString();
|
||||
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.expected.sarif').toString();
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||
});
|
||||
//# sourceMappingURL=fingerprints.test.js.map
|
||||
1
lib/fingerprints.test.js.map
Normal file
1
lib/fingerprints.test.js.map
Normal file
File diff suppressed because one or more lines are too long
47
lib/setup-tools.js
generated
47
lib/setup-tools.js
generated
@@ -10,6 +10,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const path = __importStar(require("path"));
|
||||
const semver = __importStar(require("semver"));
|
||||
class CodeQLSetup {
|
||||
constructor(codeqlDist) {
|
||||
this.dist = codeqlDist;
|
||||
@@ -19,7 +20,7 @@ class CodeQLSetup {
|
||||
if (process.platform === 'win32') {
|
||||
this.platform = 'win64';
|
||||
if (this.cmd.endsWith('codeql')) {
|
||||
this.cmd += ".cmd";
|
||||
this.cmd += ".exe";
|
||||
}
|
||||
}
|
||||
else if (process.platform === 'linux') {
|
||||
@@ -35,17 +36,41 @@ class CodeQLSetup {
|
||||
}
|
||||
exports.CodeQLSetup = CodeQLSetup;
|
||||
async function setupCodeQL() {
|
||||
const version = '1.0.0';
|
||||
const codeqlURL = core.getInput('tools', { required: true });
|
||||
let codeqlFolder = toolcache.find('CodeQL', version);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
try {
|
||||
const codeqlURL = core.getInput('tools', { required: true });
|
||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL);
|
||||
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
}
|
||||
else {
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
|
||||
}
|
||||
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
||||
}
|
||||
else {
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', version);
|
||||
catch (e) {
|
||||
core.error(e);
|
||||
throw new Error("Unable to download and extract CodeQL CLI");
|
||||
}
|
||||
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
||||
}
|
||||
exports.setupCodeQL = setupCodeQL;
|
||||
function getCodeQLURLVersion(url) {
|
||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
||||
if (match === null || match.length < 2) {
|
||||
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
|
||||
}
|
||||
let version = match[1];
|
||||
if (!semver.valid(version)) {
|
||||
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||
version = '0.0.0-' + version;
|
||||
}
|
||||
const s = semver.clean(version);
|
||||
if (!s) {
|
||||
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
exports.getCodeQLURLVersion = getCodeQLURLVersion;
|
||||
//# sourceMappingURL=setup-tools.js.map
|
||||
1
lib/setup-tools.js.map
Normal file
1
lib/setup-tools.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"setup-tools.js","sourceRoot":"","sources":["../src/setup-tools.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,+DAAiD;AACjD,2CAA6B;AAC7B,+CAAiC;AAEjC,MAAa,WAAW;IAMtB,YAAY,UAAkB;QAC5B,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAC3C,4BAA4B;QAC5B,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;YACxB,IAAI,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBAC/B,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC;aACpB;SACF;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YACvC,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;SAC3B;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACxC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;SACzB;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;SAC7D;IACH,CAAC;CACF;AAxBD,kCAwBC;AAEM,KAAK,UAAU,WAAW;IAC/B,IAAI;QACF,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;QAC7D,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAExD,IAAI,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,gBAAgB,CAAC,CAAC;QAC9D,IAAI,YAAY,EAAE;YAChB,IAAI,CAAC,KAAK,CAAC,yBAAyB,YAAY,EAAE,CAAC,CAAC;SACrD;aAAM;YACL,MAAM,UAAU,GAAG,MAAM,SAAS,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,eAAe,GAAG,MAAM,SAAS,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC/D,YAAY,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,eAAe,EAAE,QAAQ,EAAE,gBAAgB,CAAC,CAAC;SACtF;QACD,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAE3D;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACd,MAAM,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC;KAC9D;AACH,CAAC;AAnBD,kCAmBC;AAED,SAAgB,mBAAmB,CAAC,GAAW;IAE7C,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,CAAC,wBAAwB,CAAC,CAAC;IAClD,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,wBAAwB,GAAG,iCAAiC,CAAC,CAAC;KAC/E;IAED,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IAEvB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE;QAC1B,IAAI,CAAC,KAAK,CAAC,kBAAkB,OAAO,gEAAgE,OAAO,GAAG,CAAC,CAAC;QAChH,OAAO,GAAG,QAAQ,GAAG,OAAO,CAAC;KAC9B;IAED,MAAM,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;IAChC,IAAI,CAAC,CAAC,EAAE;QACN,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,iDAAiD,OAAO,UAAU,CAAC,CAAC;KAC/G;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AApBD,kDAoBC"}
|
||||
60
lib/setup-tools.test.js
generated
Normal file
60
lib/setup-tools.test.js
generated
Normal file
@@ -0,0 +1,60 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const nock_1 = __importDefault(require("nock"));
|
||||
const path = __importStar(require("path"));
|
||||
const setupTools = __importStar(require("./setup-tools"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.silenceDebugOutput(ava_1.default);
|
||||
ava_1.default('download codeql bundle cache', async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
|
||||
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
|
||||
const versions = ['20200601', '20200610'];
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
const version = versions[i];
|
||||
nock_1.default('https://example.com')
|
||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||
await setupTools.setupCodeQL();
|
||||
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
|
||||
}
|
||||
const cachedVersions = toolcache.findAllVersions('CodeQL');
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
ava_1.default('parse codeql bundle url version', t => {
|
||||
const tests = {
|
||||
'20200601': '0.0.0-20200601',
|
||||
'20200601.0': '0.0.0-20200601.0',
|
||||
'20200601.0.0': '20200601.0.0',
|
||||
'1.2.3': '1.2.3',
|
||||
'1.2.3-alpha': '1.2.3-alpha',
|
||||
'1.2.3-beta.1': '1.2.3-beta.1',
|
||||
};
|
||||
for (const [version, expectedVersion] of Object.entries(tests)) {
|
||||
const url = `https://github.com/.../codeql-bundle-${version}/...`;
|
||||
try {
|
||||
const parsedVersion = setupTools.getCodeQLURLVersion(url);
|
||||
t.deepEqual(parsedVersion, expectedVersion);
|
||||
}
|
||||
catch (e) {
|
||||
t.fail(e.message);
|
||||
}
|
||||
}
|
||||
});
|
||||
//# sourceMappingURL=setup-tools.test.js.map
|
||||
1
lib/setup-tools.test.js.map
Normal file
1
lib/setup-tools.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"setup-tools.test.js","sourceRoot":"","sources":["../src/setup-tools.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,0DAA4C;AAC5C,mDAAmD;AACnD,6CAA+B;AAE/B,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,UAAU,CAAC,WAAW,EAAE,CAAC;YAE/B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,UAAU,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YAC1D,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC"}
|
||||
46
lib/setup-tracer.js
generated
46
lib/setup-tracer.js
generated
@@ -100,12 +100,13 @@ function concatTracerConfigs(configs) {
|
||||
totalCount += count;
|
||||
totalLines.push(...lines.slice(2));
|
||||
}
|
||||
const newLogFilePath = path.resolve(util.workspaceFolder(), 'compound-build-tracer.log');
|
||||
const spec = path.resolve(util.workspaceFolder(), 'compound-spec');
|
||||
const tempFolder = path.resolve(util.workspaceFolder(), 'compound-temp');
|
||||
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
|
||||
const spec = path.resolve(tempFolder, 'compound-spec');
|
||||
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
|
||||
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
|
||||
if (copyExecutables) {
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = tempFolder;
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
|
||||
envSize += 1;
|
||||
}
|
||||
fs.writeFileSync(spec, newSpecContent.join('\n'));
|
||||
@@ -126,22 +127,28 @@ function concatTracerConfigs(configs) {
|
||||
return { env, spec };
|
||||
}
|
||||
async function run() {
|
||||
let languages;
|
||||
try {
|
||||
if (util.should_abort('init', false) || !await util.reportActionStarting('init')) {
|
||||
return;
|
||||
}
|
||||
// The config file MUST be parsed in the init action
|
||||
const config = await configUtils.loadConfig();
|
||||
core.startGroup('Load language configuration');
|
||||
const languages = await util.getLanguages();
|
||||
const config = await configUtils.loadConfig();
|
||||
languages = await util.getLanguages();
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
if (languages.length === 0) {
|
||||
core.setFailed("Did not detect any languages to analyze. Please update input in workflow.");
|
||||
return;
|
||||
throw new Error("Did not detect any languages to analyze. Please update input in workflow.");
|
||||
}
|
||||
core.endGroup();
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, languages);
|
||||
core.endGroup();
|
||||
}
|
||||
catch (e) {
|
||||
core.setFailed(e.message);
|
||||
await util.reportActionAborted('init', e.message);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const sourceRoot = path.resolve();
|
||||
core.startGroup('Setup CodeQL tools');
|
||||
const codeqlSetup = await setuptools.setupCodeQL();
|
||||
@@ -156,7 +163,7 @@ async function run() {
|
||||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
||||
core.exportVariable('CODEQL_RAM', codeqlRam);
|
||||
const databaseFolder = path.resolve(util.workspaceFolder(), 'codeql_databases');
|
||||
const databaseFolder = path.resolve(util.getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
|
||||
await io.mkdirP(databaseFolder);
|
||||
let tracedLanguages = {};
|
||||
let scannedLanguages = [];
|
||||
@@ -164,7 +171,13 @@ async function run() {
|
||||
for (let language of languages) {
|
||||
const languageDatabase = path.join(databaseFolder, language);
|
||||
// Init language database
|
||||
await exec.exec(codeqlSetup.cmd, ['database', 'init', languageDatabase, '--language=' + language, '--source-root=' + sourceRoot]);
|
||||
await exec.exec(codeqlSetup.cmd, [
|
||||
'database',
|
||||
'init',
|
||||
languageDatabase,
|
||||
'--language=' + language,
|
||||
'--source-root=' + sourceRoot,
|
||||
]);
|
||||
// TODO: add better detection of 'traced languages' instead of using a hard coded list
|
||||
if (['cpp', 'java', 'csharp'].includes(language)) {
|
||||
const config = await tracerConfig(codeqlSetup, languageDatabase);
|
||||
@@ -186,8 +199,10 @@ async function run() {
|
||||
core.exportVariable('DYLD_INSERT_LIBRARIES', path.join(codeqlSetup.tools, 'osx64', 'libtrace.dylib'));
|
||||
}
|
||||
else if (process.platform === 'win32') {
|
||||
await exec.exec('powershell', [path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
||||
path.resolve(codeqlSetup.tools, 'win64', 'tracer.exe')], { env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
|
||||
await exec.exec('powershell', [
|
||||
path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
||||
path.resolve(codeqlSetup.tools, 'win64', 'tracer.exe'),
|
||||
], { env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
|
||||
}
|
||||
else {
|
||||
core.exportVariable('LD_PRELOAD', path.join(codeqlSetup.tools, 'linux64', '${LIB}trace.so'));
|
||||
@@ -205,10 +220,11 @@ async function run() {
|
||||
await util.reportActionFailed('init', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
await util.reportActionSucceeded('init');
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("init action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=setup-tracer.js.map
|
||||
1
lib/setup-tracer.js.map
Normal file
1
lib/setup-tracer.js.map
Normal file
File diff suppressed because one or more lines are too long
2
lib/shared-environment.js
generated
2
lib/shared-environment.js
generated
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CODEQL_ACTION_CMD = 'CODEQL_ACTION_CMD';
|
||||
exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
|
||||
exports.CODEQL_ACTION_LANGUAGES = 'CODEQL_ACTION_LANGUAGES';
|
||||
exports.CODEQL_ACTION_ANALYSIS_KEY = 'CODEQL_ACTION_ANALYSIS_KEY';
|
||||
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
|
||||
exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
|
||||
exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
||||
@@ -14,3 +15,4 @@ exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
||||
exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
|
||||
// Populated when the init action completes successfully
|
||||
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
|
||||
//# sourceMappingURL=shared-environment.js.map
|
||||
1
lib/shared-environment.js.map
Normal file
1
lib/shared-environment.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,iBAAiB,GAAG,mBAAmB,CAAC;AACxC,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,uBAAuB,GAAG,yBAAyB,CAAC;AACpD,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,+BAA+B,GAAG,iCAAiC,CAAC;AACpE,QAAA,8BAA8B,GAAG,gCAAgC,CAAC;AAC/E,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,wBAAwB,GAAG,0BAA0B,CAAC;AACnE,wDAAwD;AAC3C,QAAA,4BAA4B,GAAG,8BAA8B,CAAC"}
|
||||
22
lib/test-utils.js
generated
Normal file
22
lib/test-utils.js
generated
Normal file
@@ -0,0 +1,22 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
function silenceDebugOutput(test) {
|
||||
const typedTest = test;
|
||||
typedTest.beforeEach(t => {
|
||||
const processStdoutWrite = process.stdout.write.bind(process.stdout);
|
||||
t.context.write = processStdoutWrite;
|
||||
process.stdout.write = (str, encoding, cb) => {
|
||||
// Core library will directly call process.stdout.write for commands
|
||||
// We don't want :: commands to be executed by the runner during tests
|
||||
if (!str.match(/^::/)) {
|
||||
processStdoutWrite(str, encoding, cb);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
});
|
||||
typedTest.afterEach(t => {
|
||||
process.stdout.write = t.context.write;
|
||||
});
|
||||
}
|
||||
exports.silenceDebugOutput = silenceDebugOutput;
|
||||
//# sourceMappingURL=test-utils.js.map
|
||||
1
lib/test-utils.js.map
Normal file
1
lib/test-utils.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"test-utils.js","sourceRoot":"","sources":["../src/test-utils.ts"],"names":[],"mappings":";;AAEA,SAAgB,kBAAkB,CAAC,IAAwB;IACzD,MAAM,SAAS,GAAG,IAAmC,CAAC;IAEtD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACrB,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,KAAK,GAAG,kBAAkB,CAAC;QACrC,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,GAAQ,EAAE,QAAc,EAAE,EAA0B,EAAE,EAAE;YAC5E,oEAAoE;YACpE,sEAAsE;YACtE,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACnB,kBAAkB,CAAC,GAAG,EAAE,QAAQ,EAAE,EAAE,CAAC,CAAC;aACzC;YACD,OAAO,IAAI,CAAC;QAChB,CAAC,CAAC;IACN,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE;QACpB,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC;IAC3C,CAAC,CAAC,CAAC;AACL,CAAC;AAnBD,gDAmBC"}
|
||||
48
lib/testing-utils.js
generated
Normal file
48
lib/testing-utils.js
generated
Normal file
@@ -0,0 +1,48 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
function wrapOutput(context) {
|
||||
// Function signature taken from Socket.write.
|
||||
// Note there are two overloads:
|
||||
// write(buffer: Uint8Array | string, cb?: (err?: Error) => void): boolean;
|
||||
// write(str: Uint8Array | string, encoding?: string, cb?: (err?: Error) => void): boolean;
|
||||
return (chunk, encoding, cb) => {
|
||||
// Work out which method overload we are in
|
||||
if (cb === undefined && typeof encoding === 'function') {
|
||||
cb = encoding;
|
||||
encoding = undefined;
|
||||
}
|
||||
// Record the output
|
||||
if (typeof chunk === 'string') {
|
||||
context.testOutput += chunk;
|
||||
}
|
||||
else {
|
||||
context.testOutput += new TextDecoder(encoding || 'utf-8').decode(chunk);
|
||||
}
|
||||
// Satisfy contract by calling callback when done
|
||||
if (cb !== undefined && typeof cb === 'function') {
|
||||
cb();
|
||||
}
|
||||
return true;
|
||||
};
|
||||
}
|
||||
function silenceDebugOutput(test) {
|
||||
const typedTest = test;
|
||||
typedTest.beforeEach(t => {
|
||||
t.context.testOutput = "";
|
||||
const processStdoutWrite = process.stdout.write.bind(process.stdout);
|
||||
t.context.stdoutWrite = processStdoutWrite;
|
||||
process.stdout.write = wrapOutput(t.context);
|
||||
const processStderrWrite = process.stderr.write.bind(process.stderr);
|
||||
t.context.stderrWrite = processStderrWrite;
|
||||
process.stderr.write = wrapOutput(t.context);
|
||||
});
|
||||
typedTest.afterEach.always(t => {
|
||||
process.stdout.write = t.context.stdoutWrite;
|
||||
process.stderr.write = t.context.stderrWrite;
|
||||
if (!t.passed) {
|
||||
process.stdout.write(t.context.testOutput);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.silenceDebugOutput = silenceDebugOutput;
|
||||
//# sourceMappingURL=testing-utils.js.map
|
||||
1
lib/testing-utils.js.map
Normal file
1
lib/testing-utils.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;AAIA,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CAAC,KAA0B,EAAE,QAAiB,EAAE,EAA0B,EAAW,EAAE;QAC5F,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,kBAAkB,CAAC,IAAwB;IACzD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACvB,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAE1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;IACtD,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;QAC7B,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAE7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAvBD,gDAuBC"}
|
||||
1
lib/tracer-env.js
generated
1
lib/tracer-env.js
generated
@@ -18,3 +18,4 @@ for (let entry of Object.entries(process.env)) {
|
||||
}
|
||||
process.stdout.write(process.argv[2]);
|
||||
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');
|
||||
//# sourceMappingURL=tracer-env.js.map
|
||||
1
lib/tracer-env.js.map
Normal file
1
lib/tracer-env.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"tracer-env.js","sourceRoot":"","sources":["../src/tracer-env.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AAEzB,MAAM,GAAG,GAAG,EAAE,CAAC;AACf,KAAK,IAAI,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;IAC7C,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACrB,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACvB,IAAI,OAAO,KAAK,KAAK,WAAW,IAAI,GAAG,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;QACtF,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KAClB;CACF;AACD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,CAAC"}
|
||||
231
lib/upload-lib.js
generated
231
lib/upload-lib.js
generated
@@ -11,28 +11,15 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const auth = __importStar(require("@actions/http-client/auth"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const file_url_1 = __importDefault(require("file-url"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const jsonschema = __importStar(require("jsonschema"));
|
||||
const path = __importStar(require("path"));
|
||||
const zlib_1 = __importDefault(require("zlib"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const fingerprints = __importStar(require("./fingerprints"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util = __importStar(require("./util"));
|
||||
// Construct the location of the sentinel file for detecting multiple uploads.
|
||||
// The returned location should be writable.
|
||||
async function getSentinelFilePath() {
|
||||
// Use the temp dir instead of placing next to the sarif file because of
|
||||
// issues with docker actions. The directory containing the sarif file
|
||||
// may not be writable by us.
|
||||
const uploadsTmpDir = path.join(process.env['RUNNER_TEMP'] || '/tmp/codeql-action', 'uploads');
|
||||
await io.mkdirP(uploadsTmpDir);
|
||||
// Hash the absolute path so we'll behave correctly in the unlikely
|
||||
// scenario a file is referenced twice with different paths.
|
||||
return path.join(uploadsTmpDir, 'codeql-action-upload-sentinel');
|
||||
}
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
function combineSarifFiles(sarifFiles) {
|
||||
@@ -54,89 +41,169 @@ function combineSarifFiles(sarifFiles) {
|
||||
return JSON.stringify(combinedSarif);
|
||||
}
|
||||
exports.combineSarifFiles = combineSarifFiles;
|
||||
// Upload the given payload.
|
||||
// If the request fails then this will retry a small number of times.
|
||||
async function uploadPayload(payload) {
|
||||
core.info('Uploading results');
|
||||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env['TEST_MODE'] === 'true' || false;
|
||||
if (testMode) {
|
||||
return true;
|
||||
}
|
||||
const [owner, repo] = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
||||
// Make up to 4 attempts to upload, and sleep for these
|
||||
// number of seconds between each attempt.
|
||||
// We don't want to backoff too much to avoid wasting action
|
||||
// minutes, but just waiting a little bit could maybe help.
|
||||
const backoffPeriods = [1, 5, 15];
|
||||
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
|
||||
const response = await api.client.request("PUT /repos/:owner/:repo/code-scanning/analysis", ({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
data: payload,
|
||||
}));
|
||||
core.debug('response status: ' + response.status);
|
||||
const statusCode = response.status;
|
||||
if (statusCode === 202) {
|
||||
core.info("Successfully uploaded results");
|
||||
return true;
|
||||
}
|
||||
const requestID = response.headers["x-github-request-id"];
|
||||
// On any other status code that's not 5xx mark the upload as failed
|
||||
if (!statusCode || statusCode < 500 || statusCode >= 600) {
|
||||
core.setFailed('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
return false;
|
||||
}
|
||||
// On a 5xx status code we may retry the request
|
||||
if (attempt < backoffPeriods.length) {
|
||||
// Log the failure as a warning but don't mark the action as failed yet
|
||||
core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
|
||||
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
|
||||
' seconds: (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
// Sleep for the backoff period
|
||||
await new Promise(r => setTimeout(r, backoffPeriods[attempt] * 1000));
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
// If the upload fails with 5xx then we assume it is a temporary problem
|
||||
// and not an error that the user has caused or can fix.
|
||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||
core.error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function upload(input) {
|
||||
if (fs.lstatSync(input).isDirectory()) {
|
||||
const sarifFiles = fs.readdirSync(input)
|
||||
.filter(f => f.endsWith(".sarif"))
|
||||
.map(f => path.resolve(input, f));
|
||||
await uploadFiles(sarifFiles);
|
||||
if (sarifFiles.length === 0) {
|
||||
core.setFailed("No SARIF files found to upload in \"" + input + "\".");
|
||||
return false;
|
||||
}
|
||||
return await uploadFiles(sarifFiles);
|
||||
}
|
||||
else {
|
||||
await uploadFiles([input]);
|
||||
return await uploadFiles([input]);
|
||||
}
|
||||
}
|
||||
exports.upload = upload;
|
||||
// Counts the number of results in the given SARIF file
|
||||
function countResultsInSarif(sarif) {
|
||||
let numResults = 0;
|
||||
for (const run of JSON.parse(sarif).runs) {
|
||||
numResults += run.results.length;
|
||||
}
|
||||
return numResults;
|
||||
}
|
||||
exports.countResultsInSarif = countResultsInSarif;
|
||||
// Validates that the given file path refers to a valid SARIF file.
|
||||
// Returns a non-empty list of error message if the file is invalid,
|
||||
// otherwise returns the empty list if the file is valid.
|
||||
function validateSarifFileSchema(sarifFilePath) {
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
|
||||
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
if (result.valid) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
// Set the failure message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const errorMessages = result.errors.map(e => "- " + e.stack);
|
||||
core.setFailed("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + errorMessages.join("\n"));
|
||||
// Also output the more verbose error messages in groups as these may be very large.
|
||||
for (const error of result.errors) {
|
||||
core.startGroup("Error details: " + error.stack);
|
||||
core.info(JSON.stringify(error, null, 2));
|
||||
core.endGroup();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles) {
|
||||
core.startGroup("Uploading results");
|
||||
try {
|
||||
// Check if an upload has happened before. If so then abort.
|
||||
// This is intended to catch when the finish and upload-sarif actions
|
||||
// are used together, and then the upload-sarif action is invoked twice.
|
||||
const sentinelFile = await getSentinelFilePath();
|
||||
if (fs.existsSync(sentinelFile)) {
|
||||
core.info("Aborting as an upload has already happened from this job");
|
||||
return;
|
||||
}
|
||||
const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRequiredEnvParam('GITHUB_REF'); // it's in the form "refs/heads/master"
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
core.debug("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||
let checkoutPath = core.getInput('checkout_path');
|
||||
let checkoutURI = file_url_1.default(checkoutPath);
|
||||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
return;
|
||||
}
|
||||
let matrix = core.getInput('matrix');
|
||||
if (matrix === "null" || matrix === "") {
|
||||
matrix = undefined;
|
||||
}
|
||||
const toolNames = util.getToolNames(sarifPayload);
|
||||
const payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
"checkout_uri": checkoutURI,
|
||||
"environment": matrix,
|
||||
"started_at": startedAt,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
core.info('Uploading results');
|
||||
const githubToken = core.getInput('token');
|
||||
const ph = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY'] + '/code-scanning/analysis';
|
||||
const res = await client.put(url, payload);
|
||||
const requestID = res.message.headers["x-github-request-id"];
|
||||
core.debug('response status: ' + res.message.statusCode);
|
||||
if (res.message.statusCode === 500) {
|
||||
// If the upload fails with 500 then we assume it is a temporary problem
|
||||
// with turbo-scan and not an error that the user has caused or can fix.
|
||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||
core.error('Upload failed (' + requestID + '): ' + await res.readBody());
|
||||
}
|
||||
else if (res.message.statusCode !== 202) {
|
||||
core.setFailed('Upload failed (' + requestID + '): ' + await res.readBody());
|
||||
}
|
||||
else {
|
||||
core.info("Successfully uploaded results");
|
||||
}
|
||||
// Mark that we have made an upload
|
||||
fs.writeFileSync(sentinelFile, '');
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
return false;
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
if (!validateSarifFileSchema(file)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
const commitOid = await util.getCommitOid();
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRef();
|
||||
const analysisKey = await util.getAnalysisKey();
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||
let checkoutPath = core.getInput('checkout_path');
|
||||
let checkoutURI = file_url_1.default(checkoutPath);
|
||||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
return false;
|
||||
}
|
||||
let matrix = core.getInput('matrix');
|
||||
if (matrix === "null" || matrix === "") {
|
||||
matrix = undefined;
|
||||
}
|
||||
const toolNames = util.getToolNames(sarifPayload);
|
||||
const payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_key": analysisKey,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
"checkout_uri": checkoutURI,
|
||||
"environment": matrix,
|
||||
"started_at": startedAt,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
// Log some useful debug info about the info
|
||||
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
|
||||
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
|
||||
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
|
||||
// Make the upload
|
||||
const succeeded = await uploadPayload(payload);
|
||||
core.endGroup();
|
||||
return succeeded;
|
||||
}
|
||||
//# sourceMappingURL=upload-lib.js.map
|
||||
1
lib/upload-lib.js.map
Normal file
1
lib/upload-lib.js.map
Normal file
File diff suppressed because one or more lines are too long
27
lib/upload-lib.test.js
generated
Normal file
27
lib/upload-lib.test.js
generated
Normal file
@@ -0,0 +1,27 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const uploadLib = __importStar(require("./upload-lib"));
|
||||
testing_utils_1.silenceDebugOutput(ava_1.default);
|
||||
ava_1.default('validateSarifFileSchema - valid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
|
||||
t.true(uploadLib.validateSarifFileSchema(inputFile));
|
||||
});
|
||||
ava_1.default('validateSarifFileSchema - invalid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
|
||||
t.false(uploadLib.validateSarifFileSchema(inputFile));
|
||||
// validateSarifFileSchema calls core.setFailed which sets the exit code on error
|
||||
process.exitCode = 0;
|
||||
});
|
||||
//# sourceMappingURL=upload-lib.test.js.map
|
||||
1
lib/upload-lib.test.js.map
Normal file
1
lib/upload-lib.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,mDAAmD;AACnD,wDAA0C;AAE1C,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;IACtD,iFAAiF;IACjF,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC"}
|
||||
11
lib/upload-sarif.js
generated
11
lib/upload-sarif.js
generated
@@ -15,16 +15,21 @@ async function run() {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await upload_lib.upload(core.getInput('sarif_file'));
|
||||
if (await upload_lib.upload(core.getInput('sarif_file'))) {
|
||||
await util.reportActionSucceeded('upload-sarif');
|
||||
}
|
||||
else {
|
||||
await util.reportActionFailed('upload-sarif', 'upload');
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('upload-sarif', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
await util.reportActionSucceeded('upload-sarif');
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("upload-sarif action failed: " + e);
|
||||
core.setFailed("codeql/upload-sarif action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=upload-sarif.js.map
|
||||
1
lib/upload-sarif.js.map
Normal file
1
lib/upload-sarif.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"upload-sarif.js","sourceRoot":"","sources":["../src/upload-sarif.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,yDAA2C;AAC3C,6CAA+B;AAE/B,KAAK,UAAU,GAAG;IAChB,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,KAAK,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,cAAc,CAAC,EAAE;QAChG,OAAO;KACR;IAED,IAAI;QACF,IAAI,MAAM,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE;YACxD,MAAM,IAAI,CAAC,qBAAqB,CAAC,cAAc,CAAC,CAAC;SAClD;aAAM;YACL,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;SACzD;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QAC1E,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
218
lib/util.js
generated
218
lib/util.js
generated
@@ -6,16 +6,13 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const auth = __importStar(require("@actions/http-client/auth"));
|
||||
const octokit = __importStar(require("@octokit/rest"));
|
||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
/**
|
||||
* Should the current action be aborted?
|
||||
@@ -31,12 +28,6 @@ function should_abort(actionName, requireInitActionHasRun) {
|
||||
core.setFailed('GITHUB_REF must be set.');
|
||||
return true;
|
||||
}
|
||||
// Should abort if called on a merge commit for a pull request.
|
||||
if (ref.startsWith('refs/pull/')) {
|
||||
core.warning('The CodeQL ' + actionName + ' action is intended for workflows triggered on `push` events, '
|
||||
+ 'but the current workflow is running on a pull request. Aborting.');
|
||||
return true;
|
||||
}
|
||||
// If the init action is required, then check the it completed successfully.
|
||||
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
|
||||
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
|
||||
@@ -45,22 +36,12 @@ function should_abort(actionName, requireInitActionHasRun) {
|
||||
return false;
|
||||
}
|
||||
exports.should_abort = should_abort;
|
||||
/**
|
||||
* Resolve the path to the workspace folder.
|
||||
*/
|
||||
function workspaceFolder() {
|
||||
let workspaceFolder = process.env['RUNNER_WORKSPACE'];
|
||||
if (!workspaceFolder)
|
||||
workspaceFolder = path.resolve('..');
|
||||
return workspaceFolder;
|
||||
}
|
||||
exports.workspaceFolder = workspaceFolder;
|
||||
/**
|
||||
* Get an environment parameter, but throw an error if it is not set.
|
||||
*/
|
||||
function getRequiredEnvParam(paramName) {
|
||||
const value = process.env[paramName];
|
||||
if (value === undefined) {
|
||||
if (value === undefined || value.length === 0) {
|
||||
throw new Error(paramName + ' environment variable must be set');
|
||||
}
|
||||
core.debug(paramName + '=' + value);
|
||||
@@ -88,12 +69,7 @@ async function getLanguagesInRepo() {
|
||||
let owner = repo_nwo[0];
|
||||
let repo = repo_nwo[1];
|
||||
core.debug(`GitHub repo ${owner} ${repo}`);
|
||||
let ok = new octokit.Octokit({
|
||||
auth: core.getInput('token'),
|
||||
userAgent: "CodeQL Action",
|
||||
log: console_log_level_1.default({ level: "debug" })
|
||||
});
|
||||
const response = await ok.request("GET /repos/:owner/:repo/languages", ({
|
||||
const response = await api.client.request("GET /repos/:owner/:repo/languages", ({
|
||||
owner,
|
||||
repo
|
||||
}));
|
||||
@@ -149,6 +125,90 @@ async function getLanguages() {
|
||||
return languages;
|
||||
}
|
||||
exports.getLanguages = getLanguages;
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
async function getCommitOid() {
|
||||
// Try to use git to get the current commit SHA. If that fails then
|
||||
// log but otherwise silently fall back to using the SHA from the environment.
|
||||
// The only time these two values will differ is during analysis of a PR when
|
||||
// the workflow has changed the current commit to the head commit instead of
|
||||
// the merge commit, which must mean that git is available.
|
||||
// Even if this does go wrong, it's not a huge problem for the alerts to
|
||||
// reported on the merge commit.
|
||||
try {
|
||||
let commitOid = '';
|
||||
await exec.exec('git', ['rev-parse', 'HEAD'], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => { commitOid += data.toString(); },
|
||||
stderr: (data) => { process.stderr.write(data); }
|
||||
}
|
||||
});
|
||||
return commitOid.trim();
|
||||
}
|
||||
catch (e) {
|
||||
core.info("Failed to call git to get current commit. Continuing with data from environment: " + e);
|
||||
return getRequiredEnvParam('GITHUB_SHA');
|
||||
}
|
||||
}
|
||||
exports.getCommitOid = getCommitOid;
|
||||
/**
|
||||
* Get the path of the currently executing workflow.
|
||||
*/
|
||||
async function getWorkflowPath() {
|
||||
const repo_nwo = getRequiredEnvParam('GITHUB_REPOSITORY').split("/");
|
||||
const owner = repo_nwo[0];
|
||||
const repo = repo_nwo[1];
|
||||
const run_id = Number(getRequiredEnvParam('GITHUB_RUN_ID'));
|
||||
const runsResponse = await api.client.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
|
||||
owner,
|
||||
repo,
|
||||
run_id
|
||||
});
|
||||
const workflowUrl = runsResponse.data.workflow_url;
|
||||
const workflowResponse = await api.client.request('GET ' + workflowUrl);
|
||||
return workflowResponse.data.path;
|
||||
}
|
||||
/**
|
||||
* Get the analysis key paramter for the current job.
|
||||
*
|
||||
* This will combine the workflow path and current job name.
|
||||
* Computing this the first time requires making requests to
|
||||
* the github API, but after that the result will be cached.
|
||||
*/
|
||||
async function getAnalysisKey() {
|
||||
let analysisKey = process.env[sharedEnv.CODEQL_ACTION_ANALYSIS_KEY];
|
||||
if (analysisKey !== undefined) {
|
||||
return analysisKey;
|
||||
}
|
||||
const workflowPath = await getWorkflowPath();
|
||||
const jobName = getRequiredEnvParam('GITHUB_JOB');
|
||||
analysisKey = workflowPath + ':' + jobName;
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_ANALYSIS_KEY, analysisKey);
|
||||
return analysisKey;
|
||||
}
|
||||
exports.getAnalysisKey = getAnalysisKey;
|
||||
/**
|
||||
* Get the ref currently being analyzed.
|
||||
*/
|
||||
function getRef() {
|
||||
// Will be in the form "refs/heads/master" on a push event
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const ref = getRequiredEnvParam('GITHUB_REF');
|
||||
// For pull request refs we want to convert from the 'merge' ref
|
||||
// to the 'head' ref, as that is what we want to analyse.
|
||||
// There should have been some code earlier in the workflow to do
|
||||
// the checkout, but we have no way of verifying that here.
|
||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||
if (pull_ref_regex.test(ref)) {
|
||||
return ref.replace(pull_ref_regex, 'refs/pull/$1/head');
|
||||
}
|
||||
else {
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
exports.getRef = getRef;
|
||||
/**
|
||||
* Compose a StatusReport.
|
||||
*
|
||||
@@ -159,6 +219,7 @@ exports.getLanguages = getLanguages;
|
||||
*/
|
||||
async function createStatusReport(actionName, status, cause, exception) {
|
||||
const commitOid = process.env['GITHUB_SHA'] || '';
|
||||
const ref = getRef();
|
||||
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
|
||||
let workflowRunID = -1;
|
||||
if (workflowRunIDStr) {
|
||||
@@ -175,6 +236,7 @@ async function createStatusReport(actionName, status, cause, exception) {
|
||||
job_name: jobName,
|
||||
languages: languages,
|
||||
commit_oid: commitOid,
|
||||
ref: ref,
|
||||
action_name: actionName,
|
||||
action_oid: "unknown",
|
||||
started_at: startedAt,
|
||||
@@ -187,7 +249,7 @@ async function createStatusReport(actionName, status, cause, exception) {
|
||||
if (exception) {
|
||||
statusReport.exception = exception;
|
||||
}
|
||||
if (status === 'success' || status === 'failure') {
|
||||
if (status === 'success' || status === 'failure' || status === 'aborted') {
|
||||
statusReport.completed_at = new Date().toISOString();
|
||||
}
|
||||
let matrix = core.getInput('matrix');
|
||||
@@ -199,21 +261,19 @@ async function createStatusReport(actionName, status, cause, exception) {
|
||||
/**
|
||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||
*
|
||||
* Returns the status code of the response to the status request, or
|
||||
* undefined if the given statusReport is undefined or no response was
|
||||
* received.
|
||||
* Returns the status code of the response to the status request.
|
||||
*/
|
||||
async function sendStatusReport(statusReport) {
|
||||
var _a;
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
core.debug('Sending status report: ' + statusReportJSON);
|
||||
const githubToken = core.getInput('token');
|
||||
const ph = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Status Report', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY']
|
||||
+ '/code-scanning/analysis/status';
|
||||
const res = await client.put(url, statusReportJSON);
|
||||
return (_a = res.message) === null || _a === void 0 ? void 0 : _a.statusCode;
|
||||
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
|
||||
const [owner, repo] = nwo.split("/");
|
||||
const statusResponse = await api.client.request('PUT /repos/:owner/:repo/code-scanning/analysis/status', {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
data: statusReportJSON,
|
||||
});
|
||||
return statusResponse.status;
|
||||
}
|
||||
/**
|
||||
* Send a status report that an action is starting.
|
||||
@@ -262,6 +322,16 @@ async function reportActionSucceeded(action) {
|
||||
await sendStatusReport(await createStatusReport(action, 'success'));
|
||||
}
|
||||
exports.reportActionSucceeded = reportActionSucceeded;
|
||||
/**
|
||||
* Report that an action has been aborted.
|
||||
*
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
async function reportActionAborted(action, cause) {
|
||||
await sendStatusReport(await createStatusReport(action, 'aborted', cause));
|
||||
}
|
||||
exports.reportActionAborted = reportActionAborted;
|
||||
/**
|
||||
* Get the array of all the tool names contained in the given sarif contents.
|
||||
*
|
||||
@@ -280,3 +350,67 @@ function getToolNames(sarifContents) {
|
||||
return Object.keys(toolNames);
|
||||
}
|
||||
exports.getToolNames = getToolNames;
|
||||
// Creates a random temporary directory, runs the given body, and then deletes the directory.
|
||||
// Mostly intended for use within tests.
|
||||
async function withTmpDir(body) {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
|
||||
const realSubdir = path.join(tmpDir, 'real');
|
||||
fs.mkdirSync(realSubdir);
|
||||
const symlinkSubdir = path.join(tmpDir, 'symlink');
|
||||
fs.symlinkSync(realSubdir, symlinkSubdir, 'dir');
|
||||
const result = await body(symlinkSubdir);
|
||||
fs.rmdirSync(tmpDir, { recursive: true });
|
||||
return result;
|
||||
}
|
||||
exports.withTmpDir = withTmpDir;
|
||||
/**
|
||||
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
||||
* specified, the total available memory will be used minus 256 MB.
|
||||
*
|
||||
* @returns string
|
||||
*/
|
||||
function getMemoryFlag() {
|
||||
let memoryToUseMegaBytes;
|
||||
const memoryToUseString = core.getInput("ram");
|
||||
if (memoryToUseString) {
|
||||
memoryToUseMegaBytes = Number(memoryToUseString);
|
||||
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
|
||||
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
|
||||
}
|
||||
}
|
||||
else {
|
||||
const totalMemoryBytes = os.totalmem();
|
||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||
const systemReservedMemoryMegaBytes = 256;
|
||||
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
|
||||
}
|
||||
return "--ram=" + Math.floor(memoryToUseMegaBytes);
|
||||
}
|
||||
exports.getMemoryFlag = getMemoryFlag;
|
||||
/**
|
||||
* Get the codeql `--threads` value specified for the `threads` input. The value
|
||||
* defaults to 1. The value will be capped to the number of available CPUs.
|
||||
*
|
||||
* @returns string
|
||||
*/
|
||||
function getThreadsFlag() {
|
||||
let numThreads = 1;
|
||||
const numThreadsString = core.getInput("threads");
|
||||
if (numThreadsString) {
|
||||
numThreads = Number(numThreadsString);
|
||||
if (Number.isNaN(numThreads)) {
|
||||
throw new Error(`Invalid threads setting "${numThreadsString}", specified.`);
|
||||
}
|
||||
const maxThreads = os.cpus().length;
|
||||
if (numThreads > maxThreads) {
|
||||
numThreads = maxThreads;
|
||||
}
|
||||
const minThreads = -maxThreads;
|
||||
if (numThreads < minThreads) {
|
||||
numThreads = minThreads;
|
||||
}
|
||||
}
|
||||
return `--threads=${numThreads}`;
|
||||
}
|
||||
exports.getThreadsFlag = getThreadsFlag;
|
||||
//# sourceMappingURL=util.js.map
|
||||
1
lib/util.js.map
Normal file
1
lib/util.js.map
Normal file
File diff suppressed because one or more lines are too long
64
lib/util.test.js
generated
Normal file
64
lib/util.test.js
generated
Normal file
@@ -0,0 +1,64 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.silenceDebugOutput(ava_1.default);
|
||||
ava_1.default('getToolNames', t => {
|
||||
const input = fs.readFileSync(__dirname + '/../src/testdata/tool-names.sarif', 'utf8');
|
||||
const toolNames = util.getToolNames(input);
|
||||
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
|
||||
});
|
||||
ava_1.default('getMemoryFlag() should return the correct --ram flag', t => {
|
||||
const totalMem = Math.floor(os.totalmem() / (1024 * 1024));
|
||||
const tests = {
|
||||
"": `--ram=${totalMem - 256}`,
|
||||
"512": "--ram=512",
|
||||
};
|
||||
for (const [input, expectedFlag] of Object.entries(tests)) {
|
||||
process.env['INPUT_RAM'] = input;
|
||||
const flag = util.getMemoryFlag();
|
||||
t.deepEqual(flag, expectedFlag);
|
||||
}
|
||||
});
|
||||
ava_1.default('getMemoryFlag() throws if the ram input is < 0 or NaN', t => {
|
||||
for (const input of ["-1", "hello!"]) {
|
||||
process.env['INPUT_RAM'] = input;
|
||||
t.throws(util.getMemoryFlag);
|
||||
}
|
||||
});
|
||||
ava_1.default('getThreadsFlag() should return the correct --threads flag', t => {
|
||||
const numCpus = os.cpus().length;
|
||||
const tests = {
|
||||
"0": "--threads=0",
|
||||
"1": "--threads=1",
|
||||
[`${numCpus + 1}`]: `--threads=${numCpus}`,
|
||||
[`${-numCpus - 1}`]: `--threads=${-numCpus}`
|
||||
};
|
||||
for (const [input, expectedFlag] of Object.entries(tests)) {
|
||||
process.env['INPUT_THREADS'] = input;
|
||||
const flag = util.getThreadsFlag();
|
||||
t.deepEqual(flag, expectedFlag);
|
||||
}
|
||||
});
|
||||
ava_1.default('getThreadsFlag() throws if the threads input is not an integer', t => {
|
||||
process.env['INPUT_THREADS'] = "hello!";
|
||||
t.throws(util.getThreadsFlag);
|
||||
});
|
||||
ava_1.default('getRef() throws on the empty string', t => {
|
||||
process.env["GITHUB_REF"] = "";
|
||||
t.throws(util.getRef);
|
||||
});
|
||||
//# sourceMappingURL=util.test.js.map
|
||||
1
lib/util.test.js.map
Normal file
1
lib/util.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"util.test.js","sourceRoot":"","sources":["../src/util.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,uCAAyB;AAEzB,mDAAmD;AACnD,6CAA+B;AAE/B,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACvB,MAAM,KAAK,GAAG,EAAE,CAAC,YAAY,CAAC,SAAS,GAAG,mCAAmC,EAAE,MAAM,CAAC,CAAC;IACvF,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,+BAA+B,EAAE,QAAQ,CAAC,CAAC,CAAC;AACtE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,sDAAsD,EAAE,CAAC,CAAC,EAAE;IAE/D,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC;IAE3D,MAAM,KAAK,GAAG;QACZ,EAAE,EAAE,SAAS,QAAQ,GAAG,GAAG,EAAE;QAC7B,KAAK,EAAE,WAAW;KACnB,CAAC;IAEF,KAAK,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAEzD,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC;QAEjC,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,EAAE,CAAC;QAClC,CAAC,CAAC,SAAS,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;KACjC;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,uDAAuD,EAAE,CAAC,CAAC,EAAE;IAChE,KAAK,MAAM,KAAK,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,EAAE;QACpC,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC;QACjC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;KAC9B;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,2DAA2D,EAAE,CAAC,CAAC,EAAE;IAEpE,MAAM,OAAO,GAAG,EAAE,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC;IAEjC,MAAM,KAAK,GAAG;QACZ,GAAG,EAAE,aAAa;QAClB,GAAG,EAAE,aAAa;QAClB,CAAC,GAAG,OAAO,GAAG,CAAC,EAAE,CAAC,EAAE,aAAa,OAAO,EAAE;QAC1C,CAAC,GAAG,CAAC,OAAO,GAAG,CAAC,EAAE,CAAC,EAAE,aAAa,CAAC,OAAO,EAAE;KAC7C,CAAC;IAEF,KAAK,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAEzD,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,GAAG,KAAK,CAAC;QAErC,MAAM,IAAI,GAAG,IAAI,CAAC,cAAc,EAAE,CAAC;QACnC,CAAC,CAAC,SAAS,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;KACjC;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,gEAAgE,EAAE,CAAC,CAAC,EAAE;IACzE,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,GAAG,QAAQ,CAAC;IACxC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;AAChC,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE;IAC9C,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC;IAC/B,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACxB,CAAC,CAAC,CAAC"}
|
||||
1
node_modules/.bin/atob
generated
vendored
1
node_modules/.bin/atob
generated
vendored
@@ -1 +0,0 @@
|
||||
../atob/bin/atob.js
|
||||
1
node_modules/.bin/ava
generated
vendored
Symbolic link
1
node_modules/.bin/ava
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../ava/cli.js
|
||||
1
node_modules/.bin/escodegen
generated
vendored
1
node_modules/.bin/escodegen
generated
vendored
@@ -1 +0,0 @@
|
||||
../escodegen/bin/escodegen.js
|
||||
1
node_modules/.bin/esgenerate
generated
vendored
1
node_modules/.bin/esgenerate
generated
vendored
@@ -1 +0,0 @@
|
||||
../escodegen/bin/esgenerate.js
|
||||
1
node_modules/.bin/esparse
generated
vendored
1
node_modules/.bin/esparse
generated
vendored
@@ -1 +0,0 @@
|
||||
../esprima/bin/esparse.js
|
||||
1
node_modules/.bin/esvalidate
generated
vendored
1
node_modules/.bin/esvalidate
generated
vendored
@@ -1 +0,0 @@
|
||||
../esprima/bin/esvalidate.js
|
||||
1
node_modules/.bin/jest
generated
vendored
1
node_modules/.bin/jest
generated
vendored
@@ -1 +0,0 @@
|
||||
../jest/bin/jest.js
|
||||
1
node_modules/.bin/jest-runtime
generated
vendored
1
node_modules/.bin/jest-runtime
generated
vendored
@@ -1 +0,0 @@
|
||||
../jest-runtime/bin/jest-runtime.js
|
||||
1
node_modules/.bin/jsesc
generated
vendored
1
node_modules/.bin/jsesc
generated
vendored
@@ -1 +0,0 @@
|
||||
../jsesc/bin/jsesc
|
||||
1
node_modules/.bin/json5
generated
vendored
1
node_modules/.bin/json5
generated
vendored
@@ -1 +0,0 @@
|
||||
../json5/lib/cli.js
|
||||
1
node_modules/.bin/parser
generated
vendored
1
node_modules/.bin/parser
generated
vendored
@@ -1 +0,0 @@
|
||||
../@babel/parser/bin/babel-parser.js
|
||||
1
node_modules/.bin/rc
generated
vendored
Symbolic link
1
node_modules/.bin/rc
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../rc/cli.js
|
||||
1
node_modules/.bin/removeNPMAbsolutePaths
generated
vendored
Symbolic link
1
node_modules/.bin/removeNPMAbsolutePaths
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../removeNPMAbsolutePaths/bin/removeNPMAbsolutePaths
|
||||
1
node_modules/.bin/sane
generated
vendored
1
node_modules/.bin/sane
generated
vendored
@@ -1 +0,0 @@
|
||||
../sane/src/cli.js
|
||||
2
node_modules/.bin/semver
generated
vendored
2
node_modules/.bin/semver
generated
vendored
@@ -1 +1 @@
|
||||
../semver/bin/semver
|
||||
../semver/bin/semver.js
|
||||
1
node_modules/.bin/sshpk-conv
generated
vendored
1
node_modules/.bin/sshpk-conv
generated
vendored
@@ -1 +0,0 @@
|
||||
../sshpk/bin/sshpk-conv
|
||||
1
node_modules/.bin/sshpk-sign
generated
vendored
1
node_modules/.bin/sshpk-sign
generated
vendored
@@ -1 +0,0 @@
|
||||
../sshpk/bin/sshpk-sign
|
||||
1
node_modules/.bin/sshpk-verify
generated
vendored
1
node_modules/.bin/sshpk-verify
generated
vendored
@@ -1 +0,0 @@
|
||||
../sshpk/bin/sshpk-verify
|
||||
1
node_modules/.bin/ts-jest
generated
vendored
1
node_modules/.bin/ts-jest
generated
vendored
@@ -1 +0,0 @@
|
||||
../ts-jest/cli.js
|
||||
1
node_modules/.bin/watch
generated
vendored
1
node_modules/.bin/watch
generated
vendored
@@ -1 +0,0 @@
|
||||
../@cnakazawa/watch/cli.js
|
||||
8
node_modules/@actions/http-client/README.md
generated
vendored
8
node_modules/@actions/http-client/README.md
generated
vendored
@@ -18,6 +18,8 @@ A lightweight HTTP client optimized for use with actions, TypeScript with generi
|
||||
- Basic, Bearer and PAT Support out of the box. Extensible handlers for others.
|
||||
- Redirects supported
|
||||
|
||||
Features and releases [here](./RELEASES.md)
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
@@ -49,7 +51,11 @@ export NODE_DEBUG=http
|
||||
|
||||
## Node support
|
||||
|
||||
The http-client is built using the latest LTS version of Node 12. We also support the latest LTS for Node 6, 8 and Node 10.
|
||||
The http-client is built using the latest LTS version of Node 12. It may work on previous node LTS versions but it's tested and officially supported on Node12+.
|
||||
|
||||
## Support and Versioning
|
||||
|
||||
We follow semver and will hold compatibility between major versions and increment the minor version with new features and capabilities (while holding compat).
|
||||
|
||||
## Contributing
|
||||
|
||||
|
||||
16
node_modules/@actions/http-client/RELEASES.md
generated
vendored
Normal file
16
node_modules/@actions/http-client/RELEASES.md
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
## Releases
|
||||
|
||||
## 1.0.7
|
||||
Update NPM dependencies and add 429 to the list of HttpCodes
|
||||
|
||||
## 1.0.6
|
||||
Automatically sends Content-Type and Accept application/json headers for \<verb>Json() helper methods if not set in the client or parameters.
|
||||
|
||||
## 1.0.5
|
||||
Adds \<verb>Json() helper methods for json over http scenarios.
|
||||
|
||||
## 1.0.4
|
||||
Started to add \<verb>Json() helper methods. Do not use this release for that. Use >= 1.0.5 since there was an issue with types.
|
||||
|
||||
## 1.0.1 to 1.0.3
|
||||
Adds proxy support.
|
||||
7
node_modules/@actions/http-client/auth.js
generated
vendored
7
node_modules/@actions/http-client/auth.js
generated
vendored
@@ -6,7 +6,9 @@ class BasicCredentialHandler {
|
||||
this.password = password;
|
||||
}
|
||||
prepareRequest(options) {
|
||||
options.headers['Authorization'] = 'Basic ' + Buffer.from(this.username + ':' + this.password).toString('base64');
|
||||
options.headers['Authorization'] =
|
||||
'Basic ' +
|
||||
Buffer.from(this.username + ':' + this.password).toString('base64');
|
||||
}
|
||||
// This handler cannot handle 401
|
||||
canHandleAuthentication(response) {
|
||||
@@ -42,7 +44,8 @@ class PersonalAccessTokenCredentialHandler {
|
||||
// currently implements pre-authorization
|
||||
// TODO: support preAuth = false where it hooks on 401
|
||||
prepareRequest(options) {
|
||||
options.headers['Authorization'] = 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
||||
options.headers['Authorization'] =
|
||||
'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
||||
}
|
||||
// This handler cannot handle 401
|
||||
canHandleAuthentication(response) {
|
||||
|
||||
24
node_modules/@actions/http-client/index.d.ts
generated
vendored
24
node_modules/@actions/http-client/index.d.ts
generated
vendored
@@ -1,5 +1,5 @@
|
||||
/// <reference types="node" />
|
||||
import http = require("http");
|
||||
import http = require('http');
|
||||
import ifm = require('./interfaces');
|
||||
export declare enum HttpCodes {
|
||||
OK = 200,
|
||||
@@ -23,12 +23,20 @@ export declare enum HttpCodes {
|
||||
RequestTimeout = 408,
|
||||
Conflict = 409,
|
||||
Gone = 410,
|
||||
TooManyRequests = 429,
|
||||
InternalServerError = 500,
|
||||
NotImplemented = 501,
|
||||
BadGateway = 502,
|
||||
ServiceUnavailable = 503,
|
||||
GatewayTimeout = 504
|
||||
}
|
||||
export declare enum Headers {
|
||||
Accept = "accept",
|
||||
ContentType = "content-type"
|
||||
}
|
||||
export declare enum MediaTypes {
|
||||
ApplicationJson = "application/json"
|
||||
}
|
||||
/**
|
||||
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
@@ -39,11 +47,6 @@ export declare class HttpClientResponse implements ifm.IHttpClientResponse {
|
||||
message: http.IncomingMessage;
|
||||
readBody(): Promise<string>;
|
||||
}
|
||||
export interface ITypedResponse<T> {
|
||||
statusCode: number;
|
||||
result: T | null;
|
||||
headers: Object;
|
||||
}
|
||||
export declare function isHttps(requestUrl: string): boolean;
|
||||
export declare class HttpClient {
|
||||
userAgent: string | undefined;
|
||||
@@ -73,10 +76,10 @@ export declare class HttpClient {
|
||||
* Gets a typed object from an endpoint
|
||||
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||
*/
|
||||
getJson<T>(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
||||
postJson<T>(requestUrl: string, obj: T, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
||||
putJson<T>(requestUrl: string, obj: T, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
||||
patchJson<T>(requestUrl: string, obj: T, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
||||
getJson<T>(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
postJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
putJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
patchJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
/**
|
||||
* Makes a raw http request.
|
||||
* All other methods such as get, post, patch, and request ultimately call this.
|
||||
@@ -108,6 +111,7 @@ export declare class HttpClient {
|
||||
getAgent(serverUrl: string): http.Agent;
|
||||
private _prepareRequest;
|
||||
private _mergeHeaders;
|
||||
private _getExistingOrDefaultHeader;
|
||||
private _getAgent;
|
||||
private _performExponentialBackoff;
|
||||
private static dateTimeDeserializer;
|
||||
|
||||
111
node_modules/@actions/http-client/index.js
generated
vendored
111
node_modules/@actions/http-client/index.js
generated
vendored
@@ -28,12 +28,22 @@ var HttpCodes;
|
||||
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
||||
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
||||
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
||||
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
|
||||
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
||||
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
||||
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
||||
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
||||
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
||||
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
||||
var Headers;
|
||||
(function (Headers) {
|
||||
Headers["Accept"] = "accept";
|
||||
Headers["ContentType"] = "content-type";
|
||||
})(Headers = exports.Headers || (exports.Headers = {}));
|
||||
var MediaTypes;
|
||||
(function (MediaTypes) {
|
||||
MediaTypes["ApplicationJson"] = "application/json";
|
||||
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
||||
/**
|
||||
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
@@ -43,8 +53,18 @@ function getProxyUrl(serverUrl) {
|
||||
return proxyUrl ? proxyUrl.href : '';
|
||||
}
|
||||
exports.getProxyUrl = getProxyUrl;
|
||||
const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect];
|
||||
const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout];
|
||||
const HttpRedirectCodes = [
|
||||
HttpCodes.MovedPermanently,
|
||||
HttpCodes.ResourceMoved,
|
||||
HttpCodes.SeeOther,
|
||||
HttpCodes.TemporaryRedirect,
|
||||
HttpCodes.PermanentRedirect
|
||||
];
|
||||
const HttpResponseRetryCodes = [
|
||||
HttpCodes.BadGateway,
|
||||
HttpCodes.ServiceUnavailable,
|
||||
HttpCodes.GatewayTimeout
|
||||
];
|
||||
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
||||
const ExponentialBackoffCeiling = 10;
|
||||
const ExponentialBackoffTimeSlice = 5;
|
||||
@@ -136,22 +156,29 @@ class HttpClient {
|
||||
* Gets a typed object from an endpoint
|
||||
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||
*/
|
||||
async getJson(requestUrl, additionalHeaders) {
|
||||
async getJson(requestUrl, additionalHeaders = {}) {
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
let res = await this.get(requestUrl, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
async postJson(requestUrl, obj, additionalHeaders) {
|
||||
async postJson(requestUrl, obj, additionalHeaders = {}) {
|
||||
let data = JSON.stringify(obj, null, 2);
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||
let res = await this.post(requestUrl, data, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
async putJson(requestUrl, obj, additionalHeaders) {
|
||||
async putJson(requestUrl, obj, additionalHeaders = {}) {
|
||||
let data = JSON.stringify(obj, null, 2);
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||
let res = await this.put(requestUrl, data, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
async patchJson(requestUrl, obj, additionalHeaders) {
|
||||
async patchJson(requestUrl, obj, additionalHeaders = {}) {
|
||||
let data = JSON.stringify(obj, null, 2);
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||
let res = await this.patch(requestUrl, data, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
@@ -162,18 +189,22 @@ class HttpClient {
|
||||
*/
|
||||
async request(verb, requestUrl, data, headers) {
|
||||
if (this._disposed) {
|
||||
throw new Error("Client has already been disposed.");
|
||||
throw new Error('Client has already been disposed.');
|
||||
}
|
||||
let parsedUrl = url.parse(requestUrl);
|
||||
let info = this._prepareRequest(verb, parsedUrl, headers);
|
||||
// Only perform retries on reads since writes may not be idempotent.
|
||||
let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1;
|
||||
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
|
||||
? this._maxRetries + 1
|
||||
: 1;
|
||||
let numTries = 0;
|
||||
let response;
|
||||
while (numTries < maxTries) {
|
||||
response = await this.requestRaw(info, data);
|
||||
// Check if it's an authentication challenge
|
||||
if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) {
|
||||
if (response &&
|
||||
response.message &&
|
||||
response.message.statusCode === HttpCodes.Unauthorized) {
|
||||
let authenticationHandler;
|
||||
for (let i = 0; i < this.handlers.length; i++) {
|
||||
if (this.handlers[i].canHandleAuthentication(response)) {
|
||||
@@ -191,21 +222,32 @@ class HttpClient {
|
||||
}
|
||||
}
|
||||
let redirectsRemaining = this._maxRedirects;
|
||||
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1
|
||||
&& this._allowRedirects
|
||||
&& redirectsRemaining > 0) {
|
||||
const redirectUrl = response.message.headers["location"];
|
||||
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
|
||||
this._allowRedirects &&
|
||||
redirectsRemaining > 0) {
|
||||
const redirectUrl = response.message.headers['location'];
|
||||
if (!redirectUrl) {
|
||||
// if there's no location to redirect to, we won't
|
||||
break;
|
||||
}
|
||||
let parsedRedirectUrl = url.parse(redirectUrl);
|
||||
if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) {
|
||||
throw new Error("Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.");
|
||||
if (parsedUrl.protocol == 'https:' &&
|
||||
parsedUrl.protocol != parsedRedirectUrl.protocol &&
|
||||
!this._allowRedirectDowngrade) {
|
||||
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
||||
}
|
||||
// we need to finish reading the response before reassigning response
|
||||
// which will leak the open socket.
|
||||
await response.readBody();
|
||||
// strip authorization header if redirected to a different hostname
|
||||
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
||||
for (let header in headers) {
|
||||
// header names are case insensitive
|
||||
if (header.toLowerCase() === 'authorization') {
|
||||
delete headers[header];
|
||||
}
|
||||
}
|
||||
}
|
||||
// let's make the request with the new redirectUrl
|
||||
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
||||
response = await this.requestRaw(info, data);
|
||||
@@ -256,8 +298,8 @@ class HttpClient {
|
||||
*/
|
||||
requestRawWithCallback(info, data, onResult) {
|
||||
let socket;
|
||||
if (typeof (data) === 'string') {
|
||||
info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8');
|
||||
if (typeof data === 'string') {
|
||||
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
||||
}
|
||||
let callbackCalled = false;
|
||||
let handleResult = (err, res) => {
|
||||
@@ -270,7 +312,7 @@ class HttpClient {
|
||||
let res = new HttpClientResponse(msg);
|
||||
handleResult(null, res);
|
||||
});
|
||||
req.on('socket', (sock) => {
|
||||
req.on('socket', sock => {
|
||||
socket = sock;
|
||||
});
|
||||
// If we ever get disconnected, we want the socket to timeout eventually
|
||||
@@ -285,10 +327,10 @@ class HttpClient {
|
||||
// res should have headers
|
||||
handleResult(err, null);
|
||||
});
|
||||
if (data && typeof (data) === 'string') {
|
||||
if (data && typeof data === 'string') {
|
||||
req.write(data, 'utf8');
|
||||
}
|
||||
if (data && typeof (data) !== 'string') {
|
||||
if (data && typeof data !== 'string') {
|
||||
data.on('close', function () {
|
||||
req.end();
|
||||
});
|
||||
@@ -315,29 +357,40 @@ class HttpClient {
|
||||
const defaultPort = usingSsl ? 443 : 80;
|
||||
info.options = {};
|
||||
info.options.host = info.parsedUrl.hostname;
|
||||
info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort;
|
||||
info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||||
info.options.port = info.parsedUrl.port
|
||||
? parseInt(info.parsedUrl.port)
|
||||
: defaultPort;
|
||||
info.options.path =
|
||||
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||||
info.options.method = method;
|
||||
info.options.headers = this._mergeHeaders(headers);
|
||||
if (this.userAgent != null) {
|
||||
info.options.headers["user-agent"] = this.userAgent;
|
||||
info.options.headers['user-agent'] = this.userAgent;
|
||||
}
|
||||
info.options.agent = this._getAgent(info.parsedUrl);
|
||||
// gives handlers an opportunity to participate
|
||||
if (this.handlers) {
|
||||
this.handlers.forEach((handler) => {
|
||||
this.handlers.forEach(handler => {
|
||||
handler.prepareRequest(info.options);
|
||||
});
|
||||
}
|
||||
return info;
|
||||
}
|
||||
_mergeHeaders(headers) {
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {});
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||
if (this.requestOptions && this.requestOptions.headers) {
|
||||
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
||||
}
|
||||
return lowercaseKeys(headers || {});
|
||||
}
|
||||
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||
let clientHeader;
|
||||
if (this.requestOptions && this.requestOptions.headers) {
|
||||
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
||||
}
|
||||
return additionalHeaders[header] || clientHeader || _default;
|
||||
}
|
||||
_getAgent(parsedUrl) {
|
||||
let agent;
|
||||
let proxyUrl = pm.getProxyUrl(parsedUrl);
|
||||
@@ -369,7 +422,7 @@ class HttpClient {
|
||||
proxyAuth: proxyUrl.auth,
|
||||
host: proxyUrl.hostname,
|
||||
port: proxyUrl.port
|
||||
},
|
||||
}
|
||||
};
|
||||
let tunnelAgent;
|
||||
const overHttps = proxyUrl.protocol === 'https:';
|
||||
@@ -396,7 +449,9 @@ class HttpClient {
|
||||
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||
// we have to cast it to any and change it directly
|
||||
agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false });
|
||||
agent.options = Object.assign(agent.options || {}, {
|
||||
rejectUnauthorized: false
|
||||
});
|
||||
}
|
||||
return agent;
|
||||
}
|
||||
@@ -457,7 +512,7 @@ class HttpClient {
|
||||
msg = contents;
|
||||
}
|
||||
else {
|
||||
msg = "Failed request: (" + statusCode + ")";
|
||||
msg = 'Failed request: (' + statusCode + ')';
|
||||
}
|
||||
let err = new Error(msg);
|
||||
// attach statusCode and body obj (if available) to the error object
|
||||
|
||||
9
node_modules/@actions/http-client/interfaces.d.ts
generated
vendored
9
node_modules/@actions/http-client/interfaces.d.ts
generated
vendored
@@ -1,6 +1,6 @@
|
||||
/// <reference types="node" />
|
||||
import http = require("http");
|
||||
import url = require("url");
|
||||
import http = require('http');
|
||||
import url = require('url');
|
||||
export interface IHeaders {
|
||||
[key: string]: any;
|
||||
}
|
||||
@@ -43,3 +43,8 @@ export interface IRequestOptions {
|
||||
allowRetries?: boolean;
|
||||
maxRetries?: number;
|
||||
}
|
||||
export interface ITypedResponse<T> {
|
||||
statusCode: number;
|
||||
result: T | null;
|
||||
headers: Object;
|
||||
}
|
||||
|
||||
1
node_modules/@actions/http-client/interfaces.js
generated
vendored
1
node_modules/@actions/http-client/interfaces.js
generated
vendored
@@ -1,3 +1,2 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
;
|
||||
|
||||
39
node_modules/@actions/http-client/node_modules/tunnel/package.json
generated
vendored
39
node_modules/@actions/http-client/node_modules/tunnel/package.json
generated
vendored
@@ -1,25 +1,7 @@
|
||||
{
|
||||
"author": {
|
||||
"name": "Koichi Kobayashi",
|
||||
"email": "koichik@improvement.jp"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/koichik/node-tunnel/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"deprecated": false,
|
||||
"name": "tunnel",
|
||||
"version": "0.0.6",
|
||||
"description": "Node HTTP/HTTPS Agents for tunneling proxies",
|
||||
"devDependencies": {
|
||||
"mocha": "^5.2.0",
|
||||
"should": "^13.2.3"
|
||||
},
|
||||
"directories": {
|
||||
"lib": "./lib"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
||||
},
|
||||
"homepage": "https://github.com/koichik/node-tunnel/",
|
||||
"keywords": [
|
||||
"http",
|
||||
"https",
|
||||
@@ -27,15 +9,26 @@
|
||||
"proxy",
|
||||
"tunnel"
|
||||
],
|
||||
"homepage": "https://github.com/koichik/node-tunnel/",
|
||||
"bugs": "https://github.com/koichik/node-tunnel/issues",
|
||||
"license": "MIT",
|
||||
"author": "Koichi Kobayashi <koichik@improvement.jp>",
|
||||
"main": "./index.js",
|
||||
"name": "tunnel",
|
||||
"directories": {
|
||||
"lib": "./lib"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/koichik/node-tunnel.git"
|
||||
"url": "https://github.com/koichik/node-tunnel.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha"
|
||||
},
|
||||
"version": "0.0.6"
|
||||
"devDependencies": {
|
||||
"mocha": "^5.2.0",
|
||||
"should": "^13.2.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
||||
}
|
||||
}
|
||||
62
node_modules/@actions/http-client/package.json
generated
vendored
62
node_modules/@actions/http-client/package.json
generated
vendored
@@ -1,39 +1,39 @@
|
||||
{
|
||||
"author": {
|
||||
"name": "GitHub, Inc."
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/http-client/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {
|
||||
"tunnel": "0.0.6"
|
||||
},
|
||||
"deprecated": false,
|
||||
"description": "Actions Http Client",
|
||||
"devDependencies": {
|
||||
"@types/jest": "^24.0.25",
|
||||
"@types/node": "^12.12.24",
|
||||
"jest": "^24.9.0",
|
||||
"proxy": "^1.0.1",
|
||||
"ts-jest": "^24.3.0",
|
||||
"typescript": "^3.7.4"
|
||||
},
|
||||
"homepage": "https://github.com/actions/http-client#readme",
|
||||
"keywords": [
|
||||
"Actions",
|
||||
"Http"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "index.js",
|
||||
"name": "@actions/http-client",
|
||||
"version": "1.0.8",
|
||||
"description": "Actions Http Client",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"build": "rm -Rf ./_out && tsc && cp package*.json ./_out && cp *.md ./_out && cp LICENSE ./_out && cp actions.png ./_out",
|
||||
"test": "jest",
|
||||
"format": "prettier --write *.ts && prettier --write **/*.ts",
|
||||
"format-check": "prettier --check *.ts && prettier --check **/*.ts",
|
||||
"audit-check": "npm audit --audit-level=moderate"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/http-client.git"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "rm -Rf ./_out && tsc && cp package*.json ./_out && cp *.md ./_out && cp LICENSE ./_out && cp actions.png ./_out",
|
||||
"test": "jest"
|
||||
"keywords": [
|
||||
"Actions",
|
||||
"Http"
|
||||
],
|
||||
"author": "GitHub, Inc.",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/http-client/issues"
|
||||
},
|
||||
"version": "1.0.4"
|
||||
"homepage": "https://github.com/actions/http-client#readme",
|
||||
"devDependencies": {
|
||||
"@types/jest": "^25.1.4",
|
||||
"@types/node": "^12.12.31",
|
||||
"jest": "^25.1.0",
|
||||
"prettier": "^2.0.4",
|
||||
"proxy": "^1.0.1",
|
||||
"ts-jest": "^25.2.1",
|
||||
"typescript": "^3.8.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"tunnel": "0.0.6"
|
||||
}
|
||||
}
|
||||
13
node_modules/@actions/http-client/proxy.js
generated
vendored
13
node_modules/@actions/http-client/proxy.js
generated
vendored
@@ -9,12 +9,10 @@ function getProxyUrl(reqUrl) {
|
||||
}
|
||||
let proxyVar;
|
||||
if (usingSsl) {
|
||||
proxyVar = process.env["https_proxy"] ||
|
||||
process.env["HTTPS_PROXY"];
|
||||
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];
|
||||
}
|
||||
else {
|
||||
proxyVar = process.env["http_proxy"] ||
|
||||
process.env["HTTP_PROXY"];
|
||||
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
||||
}
|
||||
if (proxyVar) {
|
||||
proxyUrl = url.parse(proxyVar);
|
||||
@@ -26,7 +24,7 @@ function checkBypass(reqUrl) {
|
||||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
let noProxy = process.env["no_proxy"] || process.env["NO_PROXY"] || '';
|
||||
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
}
|
||||
@@ -47,7 +45,10 @@ function checkBypass(reqUrl) {
|
||||
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
||||
}
|
||||
// Compare request host against noproxy
|
||||
for (let upperNoProxyItem of noProxy.split(',').map(x => x.trim().toUpperCase()).filter(x => x)) {
|
||||
for (let upperNoProxyItem of noProxy
|
||||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
8
node_modules/@actions/tool-cache/README.md
generated
vendored
8
node_modules/@actions/tool-cache/README.md
generated
vendored
@@ -22,11 +22,11 @@ These can then be extracted in platform specific ways:
|
||||
const tc = require('@actions/tool-cache');
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
const node12Path = tc.downloadTool('https://nodejs.org/dist/v12.7.0/node-v12.7.0-win-x64.zip');
|
||||
const node12Path = await tc.downloadTool('https://nodejs.org/dist/v12.7.0/node-v12.7.0-win-x64.zip');
|
||||
const node12ExtractedFolder = await tc.extractZip(node12Path, 'path/to/extract/to');
|
||||
|
||||
// Or alternately
|
||||
const node12Path = tc.downloadTool('https://nodejs.org/dist/v12.7.0/node-v12.7.0-win-x64.7z');
|
||||
const node12Path = await tc.downloadTool('https://nodejs.org/dist/v12.7.0/node-v12.7.0-win-x64.7z');
|
||||
const node12ExtractedFolder = await tc.extract7z(node12Path, 'path/to/extract/to');
|
||||
}
|
||||
else {
|
||||
@@ -37,7 +37,7 @@ else {
|
||||
|
||||
#### Cache
|
||||
|
||||
Finally, you can cache these directories in our tool-cache. This is useful if you want to switch back and forth between versions of a tool, or save a tool between runs for private runners (private runners are still in development but are on the roadmap).
|
||||
Finally, you can cache these directories in our tool-cache. This is useful if you want to switch back and forth between versions of a tool, or save a tool between runs for self-hosted runners.
|
||||
|
||||
You'll often want to add it to the path as part of this step:
|
||||
|
||||
@@ -57,7 +57,7 @@ You can also cache files for reuse.
|
||||
```js
|
||||
const tc = require('@actions/tool-cache');
|
||||
|
||||
tc.cacheFile('path/to/exe', 'destFileName.exe', 'myExeName', '1.1.0');
|
||||
const cachedPath = await tc.cacheFile('path/to/exe', 'destFileName.exe', 'myExeName', '1.1.0');
|
||||
```
|
||||
|
||||
#### Find
|
||||
|
||||
16
node_modules/@actions/tool-cache/lib/manifest.d.ts
generated
vendored
Normal file
16
node_modules/@actions/tool-cache/lib/manifest.d.ts
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
export interface IToolReleaseFile {
|
||||
filename: string;
|
||||
platform: string;
|
||||
platform_version?: string;
|
||||
arch: string;
|
||||
download_url: string;
|
||||
}
|
||||
export interface IToolRelease {
|
||||
version: string;
|
||||
stable: boolean;
|
||||
release_url: string;
|
||||
files: IToolReleaseFile[];
|
||||
}
|
||||
export declare function _findMatch(versionSpec: string, stable: boolean, candidates: IToolRelease[], archFilter: string): Promise<IToolRelease | undefined>;
|
||||
export declare function _getOsVersion(): string;
|
||||
export declare function _readLinuxVersionFile(): string;
|
||||
106
node_modules/@actions/tool-cache/lib/manifest.js
generated
vendored
Normal file
106
node_modules/@actions/tool-cache/lib/manifest.js
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const semver = __importStar(require("semver"));
|
||||
const core_1 = require("@actions/core");
|
||||
// needs to be require for core node modules to be mocked
|
||||
/* eslint @typescript-eslint/no-require-imports: 0 */
|
||||
const os = require("os");
|
||||
const cp = require("child_process");
|
||||
const fs = require("fs");
|
||||
function _findMatch(versionSpec, stable, candidates, archFilter) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const platFilter = os.platform();
|
||||
let result;
|
||||
let match;
|
||||
let file;
|
||||
for (const candidate of candidates) {
|
||||
const version = candidate.version;
|
||||
core_1.debug(`check ${version} satisfies ${versionSpec}`);
|
||||
if (semver.satisfies(version, versionSpec) &&
|
||||
(!stable || candidate.stable === stable)) {
|
||||
file = candidate.files.find(item => {
|
||||
core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);
|
||||
let chk = item.arch === archFilter && item.platform === platFilter;
|
||||
if (chk && item.platform_version) {
|
||||
const osVersion = module.exports._getOsVersion();
|
||||
if (osVersion === item.platform_version) {
|
||||
chk = true;
|
||||
}
|
||||
else {
|
||||
chk = semver.satisfies(osVersion, item.platform_version);
|
||||
}
|
||||
}
|
||||
return chk;
|
||||
});
|
||||
if (file) {
|
||||
core_1.debug(`matched ${candidate.version}`);
|
||||
match = candidate;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (match && file) {
|
||||
// clone since we're mutating the file list to be only the file that matches
|
||||
result = Object.assign({}, match);
|
||||
result.files = [file];
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
exports._findMatch = _findMatch;
|
||||
function _getOsVersion() {
|
||||
// TODO: add windows and other linux, arm variants
|
||||
// right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python)
|
||||
const plat = os.platform();
|
||||
let version = '';
|
||||
if (plat === 'darwin') {
|
||||
version = cp.execSync('sw_vers -productVersion').toString();
|
||||
}
|
||||
else if (plat === 'linux') {
|
||||
// lsb_release process not in some containers, readfile
|
||||
// Run cat /etc/lsb-release
|
||||
// DISTRIB_ID=Ubuntu
|
||||
// DISTRIB_RELEASE=18.04
|
||||
// DISTRIB_CODENAME=bionic
|
||||
// DISTRIB_DESCRIPTION="Ubuntu 18.04.4 LTS"
|
||||
const lsbContents = module.exports._readLinuxVersionFile();
|
||||
if (lsbContents) {
|
||||
const lines = lsbContents.split('\n');
|
||||
for (const line of lines) {
|
||||
const parts = line.split('=');
|
||||
if (parts.length === 2 && parts[0].trim() === 'DISTRIB_RELEASE') {
|
||||
version = parts[1].trim();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return version;
|
||||
}
|
||||
exports._getOsVersion = _getOsVersion;
|
||||
function _readLinuxVersionFile() {
|
||||
const lsbFile = '/etc/lsb-release';
|
||||
let contents = '';
|
||||
if (fs.existsSync(lsbFile)) {
|
||||
contents = fs.readFileSync(lsbFile).toString();
|
||||
}
|
||||
return contents;
|
||||
}
|
||||
exports._readLinuxVersionFile = _readLinuxVersionFile;
|
||||
//# sourceMappingURL=manifest.js.map
|
||||
1
node_modules/@actions/tool-cache/lib/manifest.js.map
generated
vendored
Normal file
1
node_modules/@actions/tool-cache/lib/manifest.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"manifest.js","sourceRoot":"","sources":["../src/manifest.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,+CAAgC;AAChC,wCAAmC;AAEnC,yDAAyD;AACzD,qDAAqD;AAErD,yBAAyB;AACzB,oCAAoC;AACpC,yBAAyB;AAqDzB,SAAsB,UAAU,CAC9B,WAAmB,EACnB,MAAe,EACf,UAA0B,EAC1B,UAAkB;;QAElB,MAAM,UAAU,GAAG,EAAE,CAAC,QAAQ,EAAE,CAAA;QAEhC,IAAI,MAAgC,CAAA;QACpC,IAAI,KAA+B,CAAA;QAEnC,IAAI,IAAkC,CAAA;QACtC,KAAK,MAAM,SAAS,IAAI,UAAU,EAAE;YAClC,MAAM,OAAO,GAAG,SAAS,CAAC,OAAO,CAAA;YAEjC,YAAK,CAAC,SAAS,OAAO,cAAc,WAAW,EAAE,CAAC,CAAA;YAClD,IACE,MAAM,CAAC,SAAS,CAAC,OAAO,EAAE,WAAW,CAAC;gBACtC,CAAC,CAAC,MAAM,IAAI,SAAS,CAAC,MAAM,KAAK,MAAM,CAAC,EACxC;gBACA,IAAI,GAAG,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;oBACjC,YAAK,CACH,GAAG,IAAI,CAAC,IAAI,MAAM,UAAU,OAAO,IAAI,CAAC,QAAQ,MAAM,UAAU,EAAE,CACnE,CAAA;oBAED,IAAI,GAAG,GAAG,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,IAAI,CAAC,QAAQ,KAAK,UAAU,CAAA;oBAClE,IAAI,GAAG,IAAI,IAAI,CAAC,gBAAgB,EAAE;wBAChC,MAAM,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC,aAAa,EAAE,CAAA;wBAEhD,IAAI,SAAS,KAAK,IAAI,CAAC,gBAAgB,EAAE;4BACvC,GAAG,GAAG,IAAI,CAAA;yBACX;6BAAM;4BACL,GAAG,GAAG,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAA;yBACzD;qBACF;oBAED,OAAO,GAAG,CAAA;gBACZ,CAAC,CAAC,CAAA;gBAEF,IAAI,IAAI,EAAE;oBACR,YAAK,CAAC,WAAW,SAAS,CAAC,OAAO,EAAE,CAAC,CAAA;oBACrC,KAAK,GAAG,SAAS,CAAA;oBACjB,MAAK;iBACN;aACF;SACF;QAED,IAAI,KAAK,IAAI,IAAI,EAAE;YACjB,4EAA4E;YAC5E,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,CAAA;YACjC,MAAM,CAAC,KAAK,GAAG,CAAC,IAAI,CAAC,CAAA;SACtB;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CAAA;AAtDD,gCAsDC;AAED,SAAgB,aAAa;IAC3B,kDAAkD;IAClD,6GAA6G;IAC7G,MAAM,IAAI,GAAG,EAAE,CAAC,QAAQ,EAAE,CAAA;IAC1B,IAAI,OAAO,GAAG,EAAE,CAAA;IAEhB,IAAI,IAAI,KAAK,QAAQ,EAAE;QACrB,OAAO,GAAG,EAAE,CAAC,QAAQ,CAAC,yBAAyB,CAAC,CAAC,QAAQ,EAAE,CAAA;KAC5D;SAAM,IAAI,IAAI,KAAK,OAAO,EAAE;QAC3B,uDAAuD;QACvD,2BAA2B;QAC3B,oBAAoB;QACpB,wBAAwB;QACxB,0BAA0B;QAC1B,2CAA2C;QAC3C,MAAM,WAAW,GAAG,MAAM,CAAC,OAAO,CAAC,qBAAqB,EAAE,CAAA;QAC1D,IAAI,WAAW,EAAE;YACf,MAAM,KAAK,GAAG,WAAW,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;YACrC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;gBACxB,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;gBAC7B,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,KAAK,iBAAiB,EAAE;oBAC/D,OAAO,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAA;oBACzB,MAAK;iBACN;aACF;SACF;KACF;IAED,OAAO,OAAO,CAAA;AAChB,CAAC;AA7BD,sCA6BC;AAED,SAAgB,qBAAqB;IACnC,MAAM,OAAO,GAAG,kBAAkB,CAAA;IAClC,IAAI,QAAQ,GAAG,EAAE,CAAA;IAEjB,IAAI,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QAC1B,QAAQ,GAAG,EAAE,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC,QAAQ,EAAE,CAAA;KAC/C;IAED,OAAO,QAAQ,CAAA;AACjB,CAAC;AATD,sDASC"}
|
||||
12
node_modules/@actions/tool-cache/lib/retry-helper.d.ts
generated
vendored
Normal file
12
node_modules/@actions/tool-cache/lib/retry-helper.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
/**
|
||||
* Internal class for retries
|
||||
*/
|
||||
export declare class RetryHelper {
|
||||
private maxAttempts;
|
||||
private minSeconds;
|
||||
private maxSeconds;
|
||||
constructor(maxAttempts: number, minSeconds: number, maxSeconds: number);
|
||||
execute<T>(action: () => Promise<T>, isRetryable?: (e: Error) => boolean): Promise<T>;
|
||||
private getSleepAmount;
|
||||
private sleep;
|
||||
}
|
||||
70
node_modules/@actions/tool-cache/lib/retry-helper.js
generated
vendored
Normal file
70
node_modules/@actions/tool-cache/lib/retry-helper.js
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
/**
|
||||
* Internal class for retries
|
||||
*/
|
||||
class RetryHelper {
|
||||
constructor(maxAttempts, minSeconds, maxSeconds) {
|
||||
if (maxAttempts < 1) {
|
||||
throw new Error('max attempts should be greater than or equal to 1');
|
||||
}
|
||||
this.maxAttempts = maxAttempts;
|
||||
this.minSeconds = Math.floor(minSeconds);
|
||||
this.maxSeconds = Math.floor(maxSeconds);
|
||||
if (this.minSeconds > this.maxSeconds) {
|
||||
throw new Error('min seconds should be less than or equal to max seconds');
|
||||
}
|
||||
}
|
||||
execute(action, isRetryable) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let attempt = 1;
|
||||
while (attempt < this.maxAttempts) {
|
||||
// Try
|
||||
try {
|
||||
return yield action();
|
||||
}
|
||||
catch (err) {
|
||||
if (isRetryable && !isRetryable(err)) {
|
||||
throw err;
|
||||
}
|
||||
core.info(err.message);
|
||||
}
|
||||
// Sleep
|
||||
const seconds = this.getSleepAmount();
|
||||
core.info(`Waiting ${seconds} seconds before trying again`);
|
||||
yield this.sleep(seconds);
|
||||
attempt++;
|
||||
}
|
||||
// Last attempt
|
||||
return yield action();
|
||||
});
|
||||
}
|
||||
getSleepAmount() {
|
||||
return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) +
|
||||
this.minSeconds);
|
||||
}
|
||||
sleep(seconds) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise(resolve => setTimeout(resolve, seconds * 1000));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.RetryHelper = RetryHelper;
|
||||
//# sourceMappingURL=retry-helper.js.map
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user