mirror of
https://github.com/github/codeql-action.git
synced 2025-12-06 15:58:06 +08:00
Compare commits
312 Commits
examples-t
...
bundle-tes
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bcf676e52d | ||
|
|
7c2a7b236c | ||
|
|
0fdcc52338 | ||
|
|
d5693a7fd2 | ||
|
|
eb4eda5cbe | ||
|
|
6e18b27d4d | ||
|
|
de0b59097a | ||
|
|
d5c453c995 | ||
|
|
657540584e | ||
|
|
a0660c80bd | ||
|
|
396f7167d8 | ||
|
|
e5ad069f2c | ||
|
|
5b35de62bd | ||
|
|
d5853409b4 | ||
|
|
8608105240 | ||
|
|
ac66bbe1fe | ||
|
|
3a28cb4ca8 | ||
|
|
8127c47bbd | ||
|
|
44c88fdd05 | ||
|
|
6230b36dc2 | ||
|
|
3d552ba624 | ||
|
|
42235cc048 | ||
|
|
631929a68f | ||
|
|
128c2cf718 | ||
|
|
69bf3f24d4 | ||
|
|
02d3d62def | ||
|
|
f0894d52f3 | ||
|
|
1fb3aaff6e | ||
|
|
dc366899d2 | ||
|
|
4896ba51da | ||
|
|
30d2cce9f8 | ||
|
|
1ef33b0330 | ||
|
|
368c14c502 | ||
|
|
e8896a906a | ||
|
|
f5ccce0c86 | ||
|
|
74268130c6 | ||
|
|
ae2d7afe3b | ||
|
|
12a37237d2 | ||
|
|
b38a014f94 | ||
|
|
5587e128ff | ||
|
|
74b4d8a6db | ||
|
|
175d681835 | ||
|
|
f4e72f4a09 | ||
|
|
5f057318b6 | ||
|
|
0f88c0111f | ||
|
|
bfaa0cf943 | ||
|
|
337dbe5618 | ||
|
|
6f42543a85 | ||
|
|
93dd64d351 | ||
|
|
87758a1402 | ||
|
|
813cb0479f | ||
|
|
9769e4a6df | ||
|
|
315a9f4b3c | ||
|
|
0446cb0aff | ||
|
|
29cf06569d | ||
|
|
ee63f4ee4b | ||
|
|
5b4f4e40af | ||
|
|
58a0034549 | ||
|
|
c7c1aa8045 | ||
|
|
b673c57b89 | ||
|
|
d138b00811 | ||
|
|
b86c3701ed | ||
|
|
7bb6ac6c60 | ||
|
|
e775d4e893 | ||
|
|
d1d80761ef | ||
|
|
7a78ec0a54 | ||
|
|
da3d6d25eb | ||
|
|
c3dcf26eaf | ||
|
|
189b6ef4bf | ||
|
|
1a4c658bbf | ||
|
|
ec154779ac | ||
|
|
ca775cfb2e | ||
|
|
fb9f2af49f | ||
|
|
60126bfb39 | ||
|
|
24367a89b5 | ||
|
|
70980b9f32 | ||
|
|
bf5b437adb | ||
|
|
b6efd2e6de | ||
|
|
8a6b404471 | ||
|
|
d781c667b1 | ||
|
|
56417be251 | ||
|
|
abf6f239fa | ||
|
|
9fb69dda17 | ||
|
|
dcebdd6441 | ||
|
|
56e74b9096 | ||
|
|
13ee335beb | ||
|
|
07caa0f5cf | ||
|
|
f77ab09bf4 | ||
|
|
8d908eeab3 | ||
|
|
cfcff89771 | ||
|
|
fe3dbb7e64 | ||
|
|
1aeb7665e7 | ||
|
|
0086c2ecdb | ||
|
|
60fdcc2376 | ||
|
|
9da537eb33 | ||
|
|
5ab09ae291 | ||
|
|
c41d287cae | ||
|
|
8947510a57 | ||
|
|
5d84e87b3d | ||
|
|
9bc459c5f1 | ||
|
|
77e9a735f6 | ||
|
|
57a57713c3 | ||
|
|
a0bf50cb7b | ||
|
|
72803c4251 | ||
|
|
eaf6649611 | ||
|
|
55a6f9e0a8 | ||
|
|
dfed1f7eea | ||
|
|
580e603e94 | ||
|
|
de7ff148e5 | ||
|
|
480467971e | ||
|
|
e2a8f32427 | ||
|
|
260a93fe06 | ||
|
|
dc2678801a | ||
|
|
c953f77bb6 | ||
|
|
aa6c2c5bda | ||
|
|
a52f1a55ed | ||
|
|
1bb294af6b | ||
|
|
25a0a6baed | ||
|
|
4b37db72e4 | ||
|
|
04b2540e30 | ||
|
|
010117c1b7 | ||
|
|
e0299c3c04 | ||
|
|
0e3f8311ed | ||
|
|
ca76a2ca94 | ||
|
|
aad14bf2cb | ||
|
|
a08742f199 | ||
|
|
6afe41036b | ||
|
|
ee4cc86b19 | ||
|
|
0607771cc2 | ||
|
|
151d531bd0 | ||
|
|
51becd2cf8 | ||
|
|
a66f2b0b11 | ||
|
|
504c8cfc6f | ||
|
|
a0d4330434 | ||
|
|
7c00663f08 | ||
|
|
f8c87948ab | ||
|
|
9566d8c220 | ||
|
|
366d8a32d1 | ||
|
|
bb9ed79f3d | ||
|
|
17548064f9 | ||
|
|
ef507971e7 | ||
|
|
96d02d50f7 | ||
|
|
0fdc2c71e4 | ||
|
|
28944b580b | ||
|
|
388403b46e | ||
|
|
32c9898fa4 | ||
|
|
56292b1fa3 | ||
|
|
50a2815790 | ||
|
|
a19d19e0a3 | ||
|
|
153a598a97 | ||
|
|
f4cf65ca2d | ||
|
|
b0af5695e6 | ||
|
|
43c1bea680 | ||
|
|
6846c702da | ||
|
|
559e2600c1 | ||
|
|
5bb9e6e131 | ||
|
|
464ce1b43a | ||
|
|
8530f5b76a | ||
|
|
a67896b792 | ||
|
|
b3ffa760ab | ||
|
|
3871ca717b | ||
|
|
a091618158 | ||
|
|
04adf2bf60 | ||
|
|
74c48f71fa | ||
|
|
b8ac06a9c8 | ||
|
|
7581ac8b17 | ||
|
|
af252d2f0d | ||
|
|
cb384e776b | ||
|
|
403832b950 | ||
|
|
52e52435f7 | ||
|
|
bc21c8f6f3 | ||
|
|
9a784b1f57 | ||
|
|
b3c9d6f3a9 | ||
|
|
f009c4c924 | ||
|
|
852d99d8e2 | ||
|
|
052d39e909 | ||
|
|
107d8ffc4c | ||
|
|
88231094bf | ||
|
|
211ad30f72 | ||
|
|
350bf488da | ||
|
|
a1d945f14f | ||
|
|
bd4042802d | ||
|
|
02776246bf | ||
|
|
66be268a09 | ||
|
|
56f06c77fd | ||
|
|
98f8945cfb | ||
|
|
a30a5ba788 | ||
|
|
9133b2b54d | ||
|
|
8e098cbb87 | ||
|
|
7ae8c32cbe | ||
|
|
beedd317d2 | ||
|
|
8a67191278 | ||
|
|
1ce7f98898 | ||
|
|
6d413dd723 | ||
|
|
31996935e6 | ||
|
|
bcb5b28954 | ||
|
|
8622312249 | ||
|
|
c0c67ce80f | ||
|
|
bc9591a12b | ||
|
|
dcba70915d | ||
|
|
2758bd30c8 | ||
|
|
f4001a0790 | ||
|
|
d55f711b71 | ||
|
|
2845a93f4c | ||
|
|
7970d85db4 | ||
|
|
74f864bee1 | ||
|
|
38c231113e | ||
|
|
34c941dc31 | ||
|
|
5eccb79587 | ||
|
|
11a9af0387 | ||
|
|
6d036cef6f | ||
|
|
f9768ac4ba | ||
|
|
3ff198f23b | ||
|
|
ff8fe44e0c | ||
|
|
3f2a60be8a | ||
|
|
4c6749115a | ||
|
|
608ed15968 | ||
|
|
14f179f70b | ||
|
|
dc4009c7ed | ||
|
|
14d602cced | ||
|
|
24096a1cb3 | ||
|
|
74d434c5ca | ||
|
|
fff3de9938 | ||
|
|
1aae76b906 | ||
|
|
013c02758e | ||
|
|
0b53ebbc36 | ||
|
|
6de3e1cde4 | ||
|
|
c9d0312cb7 | ||
|
|
0cdf645694 | ||
|
|
d00417a341 | ||
|
|
7928587bdf | ||
|
|
87ecd0d0cc | ||
|
|
7496ac4fb3 | ||
|
|
7e2e297e07 | ||
|
|
b97097aaed | ||
|
|
8a8a49d3c5 | ||
|
|
fcb696ec59 | ||
|
|
c2d2dfdcdd | ||
|
|
042ab541fd | ||
|
|
19faafba94 | ||
|
|
c13712badb | ||
|
|
476c8a44ba | ||
|
|
f9ef310b75 | ||
|
|
6bd7f17e0e | ||
|
|
582fd14a81 | ||
|
|
8425341ae0 | ||
|
|
1f2cca021a | ||
|
|
fa9e0ac2a6 | ||
|
|
a9de5b50d7 | ||
|
|
af4edf6546 | ||
|
|
5a97f7e980 | ||
|
|
d4fb7fc762 | ||
|
|
00ebedc522 | ||
|
|
840dc5ee9a | ||
|
|
c1add46efa | ||
|
|
e35c90f53d | ||
|
|
6db8182349 | ||
|
|
202704856d | ||
|
|
5ea736059a | ||
|
|
a30f8542ee | ||
|
|
b4610ac367 | ||
|
|
a0d60d5d9e | ||
|
|
f18fffbea8 | ||
|
|
655c4497ce | ||
|
|
d7a2025f2d | ||
|
|
aca790b504 | ||
|
|
22501fd7c8 | ||
|
|
07e22b1f4a | ||
|
|
3c2191ffdd | ||
|
|
28abced8ca | ||
|
|
50dcaaf00d | ||
|
|
30f7117e6a | ||
|
|
28a878efc3 | ||
|
|
d518039a6b | ||
|
|
855f965205 | ||
|
|
2909e97a32 | ||
|
|
4997c3ff4d | ||
|
|
0bd4da3a6c | ||
|
|
98ad2fc49d | ||
|
|
3ca3147cd4 | ||
|
|
96da037d49 | ||
|
|
da1e237d1e | ||
|
|
054f867322 | ||
|
|
1e600686e7 | ||
|
|
cd1625a162 | ||
|
|
8788e5aa59 | ||
|
|
8fb9090674 | ||
|
|
10a2fd615f | ||
|
|
8b71cf3e5f | ||
|
|
ae301902e1 | ||
|
|
ddee374101 | ||
|
|
80a5f3c700 | ||
|
|
080dc8c3f0 | ||
|
|
6d1f969b1c | ||
|
|
ff40939f66 | ||
|
|
7b32c3c950 | ||
|
|
90c07ef21d | ||
|
|
852b9186d6 | ||
|
|
63f52e71c0 | ||
|
|
3a883af8a6 | ||
|
|
886b7d3e6e | ||
|
|
4e12efc7c3 | ||
|
|
5c5f422edb | ||
|
|
97ef91227e | ||
|
|
25e5256866 | ||
|
|
5ec6b7524f | ||
|
|
b366432cb3 | ||
|
|
fa0a733046 | ||
|
|
0e6df42024 | ||
|
|
58c1abf92e | ||
|
|
999c772fa3 | ||
|
|
74eb3b3541 |
10
.editorconfig
Normal file
10
.editorconfig
Normal file
@@ -0,0 +1,10 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.ts]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Contact GitHub Support
|
||||
url: https://support.github.com/contact?subject=Code+Scanning+Beta+Support&tags=code-scanning-support
|
||||
about: Contact Support about code scanning
|
||||
10
.github/codeql/codeql-config.yml
vendored
10
.github/codeql/codeql-config.yml
vendored
@@ -2,5 +2,13 @@ name: "CodeQL config"
|
||||
queries:
|
||||
- name: Run custom queries
|
||||
uses: ./queries
|
||||
# Run all extra query suites, both because we want to
|
||||
# and because it'll act as extra testing. This is why
|
||||
# we include both even though one is a superset of the
|
||||
# other, because we're testing the parsing logic and
|
||||
# that the suites exist in the codeql bundle.
|
||||
- uses: security-extended
|
||||
- uses: security-and-quality
|
||||
paths-ignore:
|
||||
- tests
|
||||
- tests
|
||||
- lib
|
||||
|
||||
3
.github/pull_request_template.md
vendored
3
.github/pull_request_template.md
vendored
@@ -1,7 +1,4 @@
|
||||
### Merge / deployment checklist
|
||||
|
||||
- Run test builds as necessary. Can be on this repository or elsewhere as needed in order to test the change - please include links to tests in other repos!
|
||||
- [ ] CodeQL using init/analyze actions
|
||||
- [ ] 3rd party tool using upload action
|
||||
- [ ] Confirm this change is backwards compatible with existing workflows.
|
||||
- [ ] Confirm the [readme](https://github.com/github/codeql-action/blob/master/README.md) has been updated if necessary.
|
||||
|
||||
178
.github/update-release-branch.py
vendored
Normal file
178
.github/update-release-branch.py
vendored
Normal file
@@ -0,0 +1,178 @@
|
||||
import datetime
|
||||
from github import Github
|
||||
import random
|
||||
import requests
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# The branch being merged from.
|
||||
# This is the one that contains day-to-day development work.
|
||||
MAIN_BRANCH = 'main'
|
||||
# The branch being merged into.
|
||||
# This is the release branch that users reference.
|
||||
LATEST_RELEASE_BRANCH = 'v1'
|
||||
# Name of the remote
|
||||
ORIGIN = 'origin'
|
||||
|
||||
# Runs git with the given args and returns the stdout.
|
||||
# Raises an error if git does not exit successfully.
|
||||
def run_git(*args):
|
||||
cmd = ['git', *args]
|
||||
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if (p.returncode != 0):
|
||||
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
|
||||
return p.stdout.decode('ascii')
|
||||
|
||||
# Returns true if the given branch exists on the origin remote
|
||||
def branch_exists_on_remote(branch_name):
|
||||
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
||||
|
||||
# Opens a PR from the given branch to the release branch
|
||||
def open_pr(repo, all_commits, short_main_sha, branch_name):
|
||||
# Sort the commits into the pull requests that introduced them,
|
||||
# and any commits that don't have a pull request
|
||||
pull_requests = []
|
||||
commits_without_pull_requests = []
|
||||
for commit in all_commits:
|
||||
pr = get_pr_for_commit(repo, commit)
|
||||
|
||||
if pr is None:
|
||||
commits_without_pull_requests.append(commit)
|
||||
elif not any(p for p in pull_requests if p.number == pr.number):
|
||||
pull_requests.append(pr)
|
||||
|
||||
print('Found ' + str(len(pull_requests)) + ' pull requests')
|
||||
print('Found ' + str(len(commits_without_pull_requests)) + ' commits not in a pull request')
|
||||
|
||||
# Sort PRs and commits by age
|
||||
pull_requests = sorted(pull_requests, key=lambda pr: pr.number)
|
||||
commits_without_pull_requests = sorted(commits_without_pull_requests, key=lambda c: c.commit.author.date)
|
||||
|
||||
# Start constructing the body text
|
||||
body = 'Merging ' + short_main_sha + ' into ' + LATEST_RELEASE_BRANCH
|
||||
|
||||
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
|
||||
body += '\n\nConductor for this PR is @' + conductor
|
||||
|
||||
# List all PRs merged
|
||||
if len(pull_requests) > 0:
|
||||
body += '\n\nContains the following pull requests:'
|
||||
for pr in pull_requests:
|
||||
merger = get_merger_of_pr(repo, pr)
|
||||
body += '\n- #' + str(pr.number)
|
||||
body += ' - ' + pr.title
|
||||
body += ' (@' + merger + ')'
|
||||
|
||||
# List all commits not part of a PR
|
||||
if len(commits_without_pull_requests) > 0:
|
||||
body += '\n\nContains the following commits not from a pull request:'
|
||||
for commit in commits_without_pull_requests:
|
||||
body += '\n- ' + commit.sha
|
||||
body += ' - ' + get_truncated_commit_message(commit)
|
||||
body += ' (@' + commit.author.login + ')'
|
||||
|
||||
title = 'Merge ' + MAIN_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
|
||||
|
||||
# Create the pull request
|
||||
pr = repo.create_pull(title=title, body=body, head=branch_name, base=LATEST_RELEASE_BRANCH)
|
||||
print('Created PR #' + str(pr.number))
|
||||
|
||||
# Assign the conductor
|
||||
pr.add_to_assignees(conductor)
|
||||
print('Assigned PR to ' + conductor)
|
||||
|
||||
# Gets the person who should be in charge of the mergeback PR
|
||||
def get_conductor(repo, pull_requests, other_commits):
|
||||
# If there are any PRs then use whoever merged the last one
|
||||
if len(pull_requests) > 0:
|
||||
return get_merger_of_pr(repo, pull_requests[-1])
|
||||
|
||||
# Otherwise take the author of the latest commit
|
||||
return other_commits[-1].author.login
|
||||
|
||||
# Gets a list of the SHAs of all commits that have happened on main
|
||||
# since the release branched off.
|
||||
# This will not include any commits that exist on the release branch
|
||||
# that aren't on main.
|
||||
def get_commit_difference(repo):
|
||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '...' + MAIN_BRANCH).strip().split('\n')
|
||||
|
||||
# Convert to full-fledged commit objects
|
||||
commits = [repo.get_commit(c) for c in commits]
|
||||
|
||||
# Filter out merge commits for PRs
|
||||
return list(filter(lambda c: not is_pr_merge_commit(c), commits))
|
||||
|
||||
# Is the given commit the automatic merge commit from when merging a PR
|
||||
def is_pr_merge_commit(commit):
|
||||
return commit.committer.login == 'web-flow' and len(commit.parents) > 1
|
||||
|
||||
# Gets a copy of the commit message that should display nicely
|
||||
def get_truncated_commit_message(commit):
|
||||
message = commit.commit.message.split('\n')[0]
|
||||
if len(message) > 60:
|
||||
return message[:57] + '...'
|
||||
else:
|
||||
return message
|
||||
|
||||
# Converts a commit into the PR that introduced it to the main branch.
|
||||
# Returns the PR object, or None if no PR could be found.
|
||||
def get_pr_for_commit(repo, commit):
|
||||
prs = commit.get_pulls()
|
||||
|
||||
if prs.totalCount > 0:
|
||||
# In the case that there are multiple PRs, return the earliest one
|
||||
prs = list(prs)
|
||||
sorted(prs, key=lambda pr: int(pr.number))
|
||||
return prs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
# Get the person who merged the pull request.
|
||||
# For most cases this will be the same as the author, but for PRs opened
|
||||
# by external contributors getting the merger will get us the GitHub
|
||||
# employee who reviewed and merged the PR.
|
||||
def get_merger_of_pr(repo, pr):
|
||||
return repo.get_commit(pr.merge_commit_sha).author.login
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
raise Exception('Usage: update-release.branch.py <github token> <repository nwo>')
|
||||
github_token = sys.argv[1]
|
||||
repository_nwo = sys.argv[2]
|
||||
|
||||
repo = Github(github_token).get_repo(repository_nwo)
|
||||
|
||||
# Print what we intend to go
|
||||
print('Considering difference between ' + MAIN_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
|
||||
short_main_sha = run_git('rev-parse', '--short', MAIN_BRANCH).strip()
|
||||
print('Current head of ' + MAIN_BRANCH + ' is ' + short_main_sha)
|
||||
|
||||
# See if there are any commits to merge in
|
||||
commits = get_commit_difference(repo)
|
||||
if len(commits) == 0:
|
||||
print('No commits to merge from ' + MAIN_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
|
||||
return
|
||||
|
||||
# The branch name is based off of the name of branch being merged into
|
||||
# and the SHA of the branch being merged from. Thus if the branch already
|
||||
# exists we can assume we don't need to recreate it.
|
||||
new_branch_name = 'update-' + LATEST_RELEASE_BRANCH + '-' + short_main_sha
|
||||
print('Branch name is ' + new_branch_name)
|
||||
|
||||
# Check if the branch already exists. If so we can abort as this script
|
||||
# has already run on this combination of branches.
|
||||
if branch_exists_on_remote(new_branch_name):
|
||||
print('Branch ' + new_branch_name + ' already exists. Nothing to do.')
|
||||
return
|
||||
|
||||
# Create the new branch and push it to the remote
|
||||
print('Creating branch ' + new_branch_name)
|
||||
run_git('checkout', '-b', new_branch_name, MAIN_BRANCH)
|
||||
run_git('push', ORIGIN, new_branch_name)
|
||||
|
||||
# Open a PR to update the branch
|
||||
open_pr(repo, commits, short_main_sha, new_branch_name)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
14
.github/workflows/codeql.yml
vendored
14
.github/workflows/codeql.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: "CodeQL action"
|
||||
|
||||
on: [push]
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -10,7 +10,17 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
# Must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head of the pull request.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
- uses: ./init
|
||||
with:
|
||||
languages: javascript
|
||||
|
||||
78
.github/workflows/integration-testing.yml
vendored
78
.github/workflows/integration-testing.yml
vendored
@@ -1,14 +1,10 @@
|
||||
name: "Integration Testing"
|
||||
|
||||
on: [push]
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
multi-language-repo_test-autodetect-languages:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -16,9 +12,8 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
shopt -s dotglob
|
||||
mv * ../action/
|
||||
mv ../action/tests/multi-language-repo/* .
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
- name: Build code
|
||||
shell: bash
|
||||
@@ -26,8 +21,22 @@ jobs:
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- run: |
|
||||
cd "$RUNNER_TEMP/codeql_databases"
|
||||
# List all directories as there will be precisely one directory per database
|
||||
# but there may be other files in this directory such as query suites.
|
||||
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
||||
[[ ! -d cpp ]] || \
|
||||
[[ ! -d csharp ]] || \
|
||||
[[ ! -d go ]] || \
|
||||
[[ ! -d java ]] || \
|
||||
[[ ! -d javascript ]] || \
|
||||
[[ ! -d python ]]; then
|
||||
echo "Did not find expected number of databases. Database dir contains: $(ls)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
multi-language-repo_test-custom-queries:
|
||||
multi-language-repo_test-custom-queries-and-remote-config:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -40,13 +49,12 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
shopt -s dotglob
|
||||
mv * ../action/
|
||||
mv ../action/tests/multi-language-repo/* .
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
config-file: ./.github/codeql/custom-queries.yml
|
||||
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
@@ -72,9 +80,8 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
shopt -s dotglob
|
||||
mv * ../action/
|
||||
mv ../action/tests/multi-language-repo/* .
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
@@ -86,7 +93,6 @@ jobs:
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
|
||||
multi-language-repo_rubocop:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -96,15 +102,14 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
shopt -s dotglob
|
||||
mv * ../action/
|
||||
mv ../action/tests/multi-language-repo/* .
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
run: bundle add code-scanning-rubocop --version 0.2.0 --skip-install
|
||||
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
||||
- name: Install dependencies
|
||||
run: bundle install
|
||||
- name: Rubocop run
|
||||
@@ -117,4 +122,31 @@ jobs:
|
||||
with:
|
||||
sarif_file: rubocop.sarif
|
||||
env:
|
||||
TEST_MODE: true
|
||||
TEST_MODE: true
|
||||
|
||||
test-proxy:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ubuntu:18.04
|
||||
options: --dns 127.0.0.1
|
||||
services:
|
||||
squid-proxy:
|
||||
image: datadog/squid:latest
|
||||
ports:
|
||||
- 3128:3128
|
||||
env:
|
||||
https_proxy: http://squid-proxy:3128
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
32
.github/workflows/update-release-branch.yml
vendored
Normal file
32
.github/workflows/update-release-branch.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: Update release branch
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 9 * * 1
|
||||
repository_dispatch:
|
||||
# Example of how to trigger this:
|
||||
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
|
||||
# Replace <token> with a personal access token from this page: https://github.com/settings/tokens
|
||||
types: [update-release-branch]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
# Need full history so we calculate diffs
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.5
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install PyGithub==1.51 requests
|
||||
|
||||
- name: Update release branch
|
||||
run: python .github/update-release-branch.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }}
|
||||
25
.vscode/launch.json
vendored
Normal file
25
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Debug AVA test file",
|
||||
"runtimeExecutable": "${workspaceFolder}/node_modules/.bin/ava",
|
||||
"runtimeArgs": [
|
||||
"${file}",
|
||||
"--break",
|
||||
"--serial",
|
||||
"--timeout=20m"
|
||||
],
|
||||
"port": 9229,
|
||||
"outputCapture": "std",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**/*.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
10
.vscode/settings.json
vendored
Normal file
10
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"files.exclude": {
|
||||
// include the defaults from VS Code
|
||||
"**/.git": true,
|
||||
"**/.DS_Store": true,
|
||||
|
||||
// transpiled JavaScript
|
||||
"lib": true,
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
## Contributing
|
||||
# Contributing
|
||||
|
||||
[fork]: https://github.com/github/codeql-action/fork
|
||||
[pr]: https://github.com/github/codeql-action/compare
|
||||
@@ -10,13 +10,54 @@ Contributions to this project are [released](https://help.github.com/articles/gi
|
||||
|
||||
Please note that this project is released with a [Contributor Code of Conduct][code-of-conduct]. By participating in this project you agree to abide by its terms.
|
||||
|
||||
## Development and Testing
|
||||
|
||||
Before you start, ensure that you have a recent version of node installed. You can see which version of node is used by the action in `init/action.yml`.
|
||||
|
||||
### Common tasks
|
||||
|
||||
* Transpile the TypeScript to JavaScript: `npm run build`. Note that the JavaScript files are committed to git.
|
||||
* Run tests: `npm run test`. You’ll need to ensure that the JavaScript files are up-to-date first by running the command above.
|
||||
* Run the linter: `npm run lint`.
|
||||
|
||||
This project also includes configuration to run tests from VSCode (with support for breakpoints) - open the test file you wish to run and choose "Debug AVA test file" from the Run menu in the Run panel.
|
||||
|
||||
### Running the action
|
||||
|
||||
To see the effect of your changes and to test them, push your changes in a branch and then look at the [Actions output](https://github.com/github/codeql-action/actions) for that branch. You can also exercise the code locally by running the automated tests.
|
||||
|
||||
### Running the action locally
|
||||
|
||||
It is possible to run this action locally via [act](https://github.com/nektos/act) via the following steps:
|
||||
|
||||
1. Create a GitHub [Personal Access Token](https://github.com/settings/tokens) (PAT).
|
||||
1. Install [act](https://github.com/nektos/act) v0.2.10 or greater.
|
||||
1. Add a `.env` file in the root of the project you are running:
|
||||
|
||||
```bash
|
||||
CODEQL_LOCAL_RUN=true
|
||||
|
||||
# Optional, for better logging
|
||||
GITHUB_JOB=<ANY_JOB_NAME>
|
||||
```
|
||||
|
||||
1. Run `act -j codeql -s GITHUB_TOKEN=<PAT>`
|
||||
|
||||
Running locally will generate the CodeQL database and run all the queries, but it will avoid uploading and reporting results to GitHub. Note that this must be done on a repository that _consumes_ this action, not this repository. The use case is to debug failures of this action on specific repositories.
|
||||
|
||||
### Integration tests
|
||||
|
||||
As well as the unit tests (see _Common tasks_ above), there are integration tests, defined in `.github/workflows/integration-testing.yml`. These are run by a CI check. Depending on the change you’re making, you may want to add a test to this file or extend an existing one.
|
||||
|
||||
## Submitting a pull request
|
||||
|
||||
1. [Fork][fork] and clone the repository
|
||||
2. Create a new branch: `git checkout -b my-branch-name`
|
||||
3. Make your change, add tests, and make sure the tests still pass
|
||||
4. Push to your fork and [submit a pull request][pr]
|
||||
5. Pat your self on the back and wait for your pull request to be reviewed and merged.
|
||||
5. Pat yourself on the back and wait for your pull request to be reviewed and merged.
|
||||
|
||||
If you're a GitHub staff member, you can merge your own PR once it's approved; for external contributors, GitHub staff will merge your PR once it's approved.
|
||||
|
||||
Here are a few things you can do that will increase the likelihood of your pull request being accepted:
|
||||
|
||||
|
||||
107
README.md
107
README.md
@@ -1,6 +1,6 @@
|
||||
# CodeQL Action
|
||||
|
||||
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/semmle/ql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||
|
||||
## License
|
||||
|
||||
@@ -10,6 +10,8 @@ The underlying CodeQL CLI, used in this action, is licensed under the [GitHub Co
|
||||
|
||||
## Usage
|
||||
|
||||
This is a short walkthrough, but for more information read [configuring code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning).
|
||||
|
||||
To get code scanning results from CodeQL analysis on your repo you can use the following workflow as a template:
|
||||
|
||||
```yaml
|
||||
@@ -18,21 +20,29 @@ name: "Code Scanning - Action"
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 0 * * 0'
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# Must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head of the pull request.
|
||||
# Only include this option if you are running this workflow on pull requests.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
# Only include this step if you are running this workflow on pull requests.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
@@ -78,24 +88,9 @@ If you prefer to integrate this within an existing CI workflow, it should end up
|
||||
uses: github/codeql-action/analyze@v1
|
||||
```
|
||||
|
||||
### Actions triggers
|
||||
### Configuration file
|
||||
|
||||
The CodeQL action should be run on `push` events, and on a `schedule`. `Push` events allow us to do a detailed analysis of the delta in a pull request, while the `schedule` event ensures that GitHub regularly scans the repository for the latest vulnerabilities, even if the repository becomes inactive. This action does not support the `pull_request` event.
|
||||
|
||||
### Configuration
|
||||
|
||||
You may optionally specify additional queries for CodeQL to execute by using a config file. The queries must belong to a [QL pack](https://help.semmle.com/codeql/codeql-cli/reference/qlpack-overview.html) and can be in your repository or any public repository. You can choose a single .ql file, a folder containing multiple .ql files, a .qls [query suite](https://help.semmle.com/codeql/codeql-cli/procedures/query-suites.html) file, or any combination of the above. To use queries from other repositories use the same syntax as when [using an action](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepsuses).
|
||||
|
||||
You can disable the default queries using `disable-default-queries: true`.
|
||||
|
||||
You can choose to ignore some files or folders from the analysis, or include additional files/folders for analysis. This *only* works for Javascript and Python analysis.
|
||||
Identifying potential files for extraction:
|
||||
|
||||
- Scans each folder that's defined as `paths` in turn, traversing subfolders, and looking for relevant files.
|
||||
- If it finds a subfolder that's defined as `paths-ignore`, stop traversing.
|
||||
- If a file or folder is both in `paths` and `paths-ignore`, the `paths-ignore` is ignored.
|
||||
|
||||
Use the `config-file` parameter of the init action to enable the configuration file. For example:
|
||||
Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`.
|
||||
|
||||
```yaml
|
||||
- uses: github/codeql-action/init@v1
|
||||
@@ -103,72 +98,8 @@ Use the `config-file` parameter of the init action to enable the configuration f
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
```
|
||||
|
||||
A config file looks like this:
|
||||
|
||||
```yaml
|
||||
name: "My CodeQL config"
|
||||
|
||||
disable-default-queries: true
|
||||
|
||||
queries:
|
||||
- name: In-repo queries (Runs the queries located in the my-queries folder of the repo)
|
||||
uses: ./my-queries
|
||||
- name: External Javascript QL pack (Runs a QL pack located in an external repo)
|
||||
uses: /Semmle/ql/javascript/ql/src/Electron@master
|
||||
- name: External query (Runs a single query located in an external QL pack)
|
||||
uses: Semmle/ql/javascript/ql/src/AngularJS/DeadAngularJSEventListener.ql@master
|
||||
- name: Select query suite (Runs a query suites)
|
||||
uses: ./codeql-querypacks/complex-python-querypack/rootAndBar.qls
|
||||
|
||||
paths:
|
||||
- src/util.ts
|
||||
|
||||
paths-ignore:
|
||||
- src
|
||||
- lib
|
||||
```
|
||||
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration)."
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Trouble with Go dependencies
|
||||
|
||||
#### If you use a vendor directory
|
||||
|
||||
Try passing
|
||||
|
||||
```yaml
|
||||
env:
|
||||
GOFLAGS: "-mod=vendor"
|
||||
```
|
||||
|
||||
to `github/codeql-action/analyze`.
|
||||
|
||||
#### If you do not use a vendor directory
|
||||
|
||||
Dependencies on public repositories should just work. If you have dependencies on private repositories, one option is to use `git config` and a [personal access token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) to authenticate when downloading dependencies. Add a section like
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- name: Configure git private repo access
|
||||
env:
|
||||
TOKEN: ${{ secrets.GITHUB_PAT }}
|
||||
run: |
|
||||
git config --global url."https://${TOKEN}@github.com/foo/bar".insteadOf "https://github.com/foo/bar"
|
||||
git config --global url."https://${TOKEN}@github.com/foo/baz".insteadOf "https://github.com/foo/baz"
|
||||
```
|
||||
|
||||
before any codeql actions. A similar thing can also be done with an SSH key or deploy key.
|
||||
|
||||
### C# using dotnet version 2 on linux
|
||||
|
||||
This currently requires invoking `dotnet` with the `/p:UseSharedCompilation=false` flag. For example:
|
||||
|
||||
```shell
|
||||
dotnet build /p:UseSharedCompilation=false
|
||||
```
|
||||
|
||||
Version 3 does not require the additional flag.
|
||||
|
||||
### Analysing Go together with other languages on `macos-latest`
|
||||
|
||||
When running on macos it is currently not possible to analyze Go in conjunction with any of Java, C/C++, or C#. Each language can still be analyzed separately.
|
||||
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
|
||||
|
||||
@@ -4,6 +4,7 @@ author: 'GitHub'
|
||||
inputs:
|
||||
check_name:
|
||||
description: The name of the check run to add text to.
|
||||
required: false
|
||||
output:
|
||||
description: The path of the directory in which to save the SARIF results
|
||||
required: false
|
||||
@@ -11,7 +12,18 @@ inputs:
|
||||
upload:
|
||||
description: Upload the SARIF file
|
||||
required: false
|
||||
default: true
|
||||
default: "true"
|
||||
ram:
|
||||
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
|
||||
required: false
|
||||
threads:
|
||||
description: The number of threads to be used by CodeQL.
|
||||
required: false
|
||||
default: "1"
|
||||
checkout_path:
|
||||
description: "The path at which the analyzed repository was checked out. Used to relativeize any absolute paths in the uploaded SARIF file."
|
||||
required: false
|
||||
default: ${{ github.workspace }}
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
|
||||
@@ -5,12 +5,14 @@ inputs:
|
||||
tools:
|
||||
description: URL of CodeQL tools
|
||||
required: false
|
||||
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200427/codeql-bundle.tar.gz
|
||||
# If not specified the Action will check in several places until it finds the CodeQL tools.
|
||||
languages:
|
||||
description: The languages to be analysed
|
||||
required: false
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
default: ${{ toJson(matrix) }}
|
||||
config-file:
|
||||
description: Path of the config file to use
|
||||
required: false
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
module.exports = {
|
||||
clearMocks: true,
|
||||
moduleFileExtensions: ['js', 'ts'],
|
||||
testEnvironment: 'node',
|
||||
testMatch: ['**/*.test.ts'],
|
||||
testRunner: 'jest-circus/runner',
|
||||
transform: {
|
||||
'^.+\\.ts$': 'ts-jest'
|
||||
},
|
||||
verbose: true
|
||||
}
|
||||
47
lib/analysis-paths.js
generated
47
lib/analysis-paths.js
generated
@@ -8,19 +8,50 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
function includeAndExcludeAnalysisPaths(config, languages) {
|
||||
function isInterpretedLanguage(language) {
|
||||
return language === 'javascript' || language === 'python';
|
||||
}
|
||||
// Matches a string containing only characters that are legal to include in paths on windows.
|
||||
exports.legalWindowsPathCharactersRegex = /^[^<>:"\|?]*$/;
|
||||
// Builds an environment variable suitable for LGTM_INDEX_INCLUDE or LGTM_INDEX_EXCLUDE
|
||||
function buildIncludeExcludeEnvVar(paths) {
|
||||
// Ignore anything containing a *
|
||||
paths = paths.filter(p => p.indexOf('*') === -1);
|
||||
// Some characters are illegal in path names in windows
|
||||
if (process.platform === 'win32') {
|
||||
paths = paths.filter(p => p.match(exports.legalWindowsPathCharactersRegex));
|
||||
}
|
||||
return paths.join('\n');
|
||||
}
|
||||
function includeAndExcludeAnalysisPaths(config) {
|
||||
// The 'LGTM_INDEX_INCLUDE' and 'LGTM_INDEX_EXCLUDE' environment variables
|
||||
// control which files/directories are traversed when scanning.
|
||||
// This allows including files that otherwise would not be scanned, or
|
||||
// excluding and not traversing entire file subtrees.
|
||||
// It does not understand globs or double-globs because that would require it to
|
||||
// traverse the entire file tree to determine which files are matched.
|
||||
// Any paths containing "*" are not included in these.
|
||||
if (config.paths.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_INCLUDE', config.paths.join('\n'));
|
||||
core.exportVariable('LGTM_INDEX_INCLUDE', buildIncludeExcludeEnvVar(config.paths));
|
||||
}
|
||||
if (config.pathsIgnore.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_EXCLUDE', config.pathsIgnore.join('\n'));
|
||||
core.exportVariable('LGTM_INDEX_EXCLUDE', buildIncludeExcludeEnvVar(config.pathsIgnore));
|
||||
}
|
||||
function isInterpretedLanguage(language) {
|
||||
return language === 'javascript' && language === 'python';
|
||||
// The 'LGTM_INDEX_FILTERS' environment variable controls which files are
|
||||
// extracted or ignored. It does not control which directories are traversed.
|
||||
// This does understand the glob and double-glob syntax.
|
||||
const filters = [];
|
||||
filters.push(...config.paths.map(p => 'include:' + p));
|
||||
filters.push(...config.pathsIgnore.map(p => 'exclude:' + p));
|
||||
if (filters.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_FILTERS', filters.join('\n'));
|
||||
}
|
||||
// Index include/exclude only work in javascript and python
|
||||
// If some other language is detected/configured show a warning
|
||||
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) && !languages.every(isInterpretedLanguage)) {
|
||||
// Index include/exclude/filters only work in javascript and python.
|
||||
// If any other languages are detected/configured then show a warning.
|
||||
if ((config.paths.length !== 0 ||
|
||||
config.pathsIgnore.length !== 0 ||
|
||||
filters.length !== 0) &&
|
||||
!config.languages.every(isInterpretedLanguage)) {
|
||||
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAgB,8BAA8B,CAAC,MAA0B,EAAE,SAAmB;IAC1F,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KACtE;IAED,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACjC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC5E;IAED,SAAS,qBAAqB,CAAC,QAAQ;QACnC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;IAC9D,CAAC;IAED,2DAA2D;IAC3D,+DAA+D;IAC/D,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAC3G,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC9G;AACL,CAAC;AAlBD,wEAkBC"}
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEjD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACrE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACpF;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC;KAC1F;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACvD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC/D;IAED,oEAAoE;IACpE,sEAAsE;IACtE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;QACxB,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC;QAC/B,OAAO,CAAC,MAAM,KAAK,CAAC,CAAC;QACvB,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAClD,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC5G;AACH,CAAC;AAjCD,wEAiCC"}
|
||||
29
lib/analysis-paths.test.js
generated
29
lib/analysis-paths.test.js
generated
@@ -12,19 +12,32 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default("emptyPaths", async (t) => {
|
||||
let config = new configUtils.Config();
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
|
||||
t.is(process.env['LGTM_INDEX_FILTERS'], undefined);
|
||||
});
|
||||
ava_1.default("nonEmptyPaths", async (t) => {
|
||||
let config = new configUtils.Config();
|
||||
config.paths.push('path1', 'path2');
|
||||
config.pathsIgnore.push('path3', 'path4');
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
paths: ['path1', 'path2', '**/path3'],
|
||||
pathsIgnore: ['path4', 'path5', 'path6/**'],
|
||||
originalUserInput: {},
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path3\npath4');
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path4\npath5');
|
||||
t.is(process.env['LGTM_INDEX_FILTERS'], 'include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**');
|
||||
});
|
||||
//# sourceMappingURL=analysis-paths.test.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,4DAA8C;AAE9C,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACzB,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC5B,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IACpC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC1C,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;AAC5D,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA2C;AAE3C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;QACT,iBAAiB,EAAE,EAAE;KACtB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QAC3C,iBAAiB,EAAE,EAAE;KACtB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,gGAAgG,CAAC,CAAC;AAC5I,CAAC,CAAC,CAAC"}
|
||||
26
lib/api-client.js
generated
Normal file
26
lib/api-client.js
generated
Normal file
@@ -0,0 +1,26 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||
const util_1 = require("./util");
|
||||
exports.getApiClient = function (allowLocalRun = false) {
|
||||
if (util_1.isLocalRun() && !allowLocalRun) {
|
||||
throw new Error('Invalid API call in local run');
|
||||
}
|
||||
return new github.GitHub(core.getInput('token'), {
|
||||
userAgent: "CodeQL Action",
|
||||
log: console_log_level_1.default({ level: "debug" })
|
||||
});
|
||||
};
|
||||
//# sourceMappingURL=api-client.js.map
|
||||
1
lib/api-client.js.map
Normal file
1
lib/api-client.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAEhD,iCAAoC;AAEvB,QAAA,YAAY,GAAG,UAAS,aAAa,GAAG,KAAK;IACxD,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,MAAM,CAAC,MAAM,CACtB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB;QACE,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACP,CAAC,CAAC"}
|
||||
46
lib/autobuild.js
generated
46
lib/autobuild.js
generated
@@ -8,22 +8,35 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const path = __importStar(require("path"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils = __importStar(require("./config-utils"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
||||
var _a, _b;
|
||||
const status = failingLanguage !== undefined || cause !== undefined ? 'failure' : 'success';
|
||||
const statusReportBase = await util.createStatusReportBase('autobuild', status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
autobuild_languages: allLanguages.join(','),
|
||||
autobuild_failure: failingLanguage,
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
var _a;
|
||||
const startedAt = new Date();
|
||||
let language;
|
||||
try {
|
||||
if (util.should_abort('autobuild', true) || !await util.reportActionStarting('autobuild')) {
|
||||
util.prepareLocalRunEnvironment();
|
||||
if (!await util.sendStatusReport(await util.createStatusReportBase('autobuild', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
const config = await config_utils.getConfig();
|
||||
// Attempt to find a language to autobuild
|
||||
// We want pick the dominant language in the repo from the ones we're able to build
|
||||
// The languages are sorted in order specified by user or by lines of code if we got
|
||||
// them from the GitHub API, so try to build the first language on the list.
|
||||
const autobuildLanguages = ((_a = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]) === null || _a === void 0 ? void 0 : _a.split(',')) || [];
|
||||
const language = autobuildLanguages[0];
|
||||
const autobuildLanguages = config.languages.filter(codeql_1.isTracedLanguage);
|
||||
language = autobuildLanguages[0];
|
||||
if (!language) {
|
||||
core.info("None of the languages in this project require extra build steps");
|
||||
return;
|
||||
@@ -33,26 +46,17 @@ async function run() {
|
||||
core.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages.slice(1).join(' and ')}, you must replace this block with custom build steps.`);
|
||||
}
|
||||
core.startGroup(`Attempting to automatically build ${language} code`);
|
||||
// TODO: share config accross actions better via env variables
|
||||
const codeqlCmd = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_CMD);
|
||||
const cmdName = process.platform === 'win32' ? 'autobuild.cmd' : 'autobuild.sh';
|
||||
const autobuildCmd = path.join(path.dirname(codeqlCmd), language, 'tools', cmdName);
|
||||
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
|
||||
// This is because of an issue with Azure pipelines timing out connections after 4 minutes
|
||||
// and Maven not properly handling closed connections
|
||||
// Otherwise long build processes will timeout when pulling down Java packages
|
||||
// https://developercommunity.visualstudio.com/content/problem/292284/maven-hosted-agent-connection-timeout.html
|
||||
let javaToolOptions = process.env['JAVA_TOOL_OPTIONS'] || "";
|
||||
process.env['JAVA_TOOL_OPTIONS'] = [...javaToolOptions.split(/\s+/), '-Dhttp.keepAlive=false', '-Dmaven.wagon.http.pool=false'].join(' ');
|
||||
await exec.exec(autobuildCmd);
|
||||
const codeQL = codeql_1.getCodeQL();
|
||||
await codeQL.runAutobuild(language);
|
||||
core.endGroup();
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
|
||||
await util.reportActionFailed('autobuild', error.message, error.stack);
|
||||
console.log(error);
|
||||
await sendCompletedStatusReport(startedAt, [language], language, error);
|
||||
return;
|
||||
}
|
||||
await util.reportActionSucceeded('autobuild');
|
||||
await sendCompletedStatusReport(startedAt, [language]);
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("autobuild action failed. " + e);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,2CAA6B;AAE7B,gEAAkD;AAClD,6CAA+B;AAE/B,KAAK,UAAU,GAAG;;IAChB,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,EAAE;YACzF,OAAO;SACR;QAED,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,OAAA,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,8BAA8B,CAAC,0CAAE,KAAK,CAAC,GAAG,MAAK,EAAE,CAAC;QACnG,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,8DAA8D;QAC9D,MAAM,SAAS,GAAG,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,iBAAiB,CAAC,CAAC;QAExE,MAAM,OAAO,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,cAAc,CAAC;QAChF,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;QAGpF,+DAA+D;QAC/D,0FAA0F;QAC1F,qDAAqD;QACrD,8EAA8E;QAC9E,gHAAgH;QAChH,IAAI,eAAe,GAAG,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,EAAE,CAAC;QAC7D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,wBAAwB,EAAE,+BAA+B,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAE1I,MAAM,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QAC9B,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,MAAM,IAAI,CAAC,kBAAkB,CAAC,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QACvE,OAAO;KACR;IAED,MAAM,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,CAAC;AAChD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,qCAAuD;AACvD,6DAA+C;AAC/C,6CAA+B;AAS/B,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GAAG,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5F,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CAAC,CAAC;IAChB,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,CAAC;IACb,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC7G,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,EAAE,CAAC;QAE9C,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,yBAAgB,CAAC,CAAC;QACrE,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEjC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAEpC,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;QACxE,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
331
lib/codeql.js
generated
Normal file
331
lib/codeql.js
generated
Normal file
@@ -0,0 +1,331 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const stream = __importStar(require("stream"));
|
||||
const globalutil = __importStar(require("util"));
|
||||
const v4_1 = __importDefault(require("uuid/v4"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const util = __importStar(require("./util"));
|
||||
/**
|
||||
* Stores the CodeQL object, and is populated by `setupCodeQL` or `getCodeQL`.
|
||||
* Can be overridden in tests using `setCodeQL`.
|
||||
*/
|
||||
let cachedCodeQL = undefined;
|
||||
/**
|
||||
* Environment variable used to store the location of the CodeQL CLI executable.
|
||||
* Value is set by setupCodeQL and read by getCodeQL.
|
||||
*/
|
||||
const CODEQL_ACTION_CMD = "CODEQL_ACTION_CMD";
|
||||
const CODEQL_BUNDLE_VERSION = "codeql-bundle-20200630";
|
||||
const CODEQL_BUNDLE_NAME = "codeql-bundle.tar.gz";
|
||||
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||
function getCodeQLActionRepository() {
|
||||
// Actions do not know their own repository name,
|
||||
// so we currently use this hack to find the name based on where our files are.
|
||||
// This can be removed once the change to the runner in https://github.com/actions/runner/pull/585 is deployed.
|
||||
const runnerTemp = util.getRequiredEnvParam("RUNNER_TEMP");
|
||||
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
|
||||
const relativeScriptPath = path.relative(actionsDirectory, __filename);
|
||||
// This handles the case where the Action does not come from an Action repository,
|
||||
// e.g. our integration tests which use the Action code from the current checkout.
|
||||
if (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath)) {
|
||||
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||
}
|
||||
const relativeScriptPathParts = relativeScriptPath.split(path.sep);
|
||||
return relativeScriptPathParts[0] + "/" + relativeScriptPathParts[1];
|
||||
}
|
||||
async function getCodeQLBundleDownloadURL() {
|
||||
const codeQLActionRepository = getCodeQLActionRepository();
|
||||
const potentialDownloadSources = [
|
||||
// This GitHub instance, and this Action.
|
||||
[util.getInstanceAPIURL(), codeQLActionRepository],
|
||||
// This GitHub instance, and the canonical Action.
|
||||
[util.getInstanceAPIURL(), CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
// GitHub.com, and the canonical Action.
|
||||
[util.GITHUB_DOTCOM_API_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
];
|
||||
// We now filter out any duplicates.
|
||||
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
|
||||
const uniqueDownloadSources = potentialDownloadSources.filter((url, index, self) => index === self.indexOf(url));
|
||||
for (let downloadSource of uniqueDownloadSources) {
|
||||
let [apiURL, repository] = downloadSource;
|
||||
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
|
||||
if (apiURL === util.GITHUB_DOTCOM_API_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
|
||||
break;
|
||||
}
|
||||
let [repositoryOwner, repositoryName] = repository.split("/");
|
||||
try {
|
||||
const release = await api.getApiClient().repos.getReleaseByTag({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
tag: CODEQL_BUNDLE_VERSION
|
||||
});
|
||||
for (let asset of release.data.assets) {
|
||||
if (asset.name === CODEQL_BUNDLE_NAME) {
|
||||
core.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
|
||||
return asset.url;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
core.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
|
||||
}
|
||||
}
|
||||
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${CODEQL_BUNDLE_NAME}`;
|
||||
}
|
||||
// We have to download CodeQL manually because the toolcache doesn't support Accept headers.
|
||||
// This can be removed once https://github.com/actions/toolkit/pull/530 is merged and released.
|
||||
async function toolcacheDownloadTool(url, headers) {
|
||||
const client = new http.HttpClient('CodeQL Action');
|
||||
const dest = path.join(util.getRequiredEnvParam('RUNNER_TEMP'), v4_1.default());
|
||||
const response = await client.get(url, headers);
|
||||
if (response.message.statusCode !== 200) {
|
||||
const err = new toolcache.HTTPError(response.message.statusCode);
|
||||
core.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
||||
throw err;
|
||||
}
|
||||
const pipeline = globalutil.promisify(stream.pipeline);
|
||||
fs.mkdirSync(path.dirname(dest), { recursive: true });
|
||||
await pipeline(response.message, fs.createWriteStream(dest));
|
||||
return dest;
|
||||
}
|
||||
async function setupCodeQL() {
|
||||
try {
|
||||
let codeqlURL = core.getInput('tools');
|
||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
|
||||
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
}
|
||||
else {
|
||||
if (!codeqlURL) {
|
||||
codeqlURL = await getCodeQLBundleDownloadURL();
|
||||
}
|
||||
const headers = { accept: 'application/octet-stream' };
|
||||
// We only want to provide an authorization header if we are downloading
|
||||
// from the same GitHub instance the Action is running on.
|
||||
// This avoids leaking Enterprise tokens to dotcom.
|
||||
if (codeqlURL.startsWith(util.getInstanceAPIURL() + "/")) {
|
||||
core.debug('Downloading CodeQL bundle with token.');
|
||||
let token = core.getInput('token', { required: true });
|
||||
headers.authorization = `token ${token}`;
|
||||
}
|
||||
else {
|
||||
core.debug('Downloading CodeQL bundle without token.');
|
||||
}
|
||||
let codeqlPath = await toolcacheDownloadTool(codeqlURL, headers);
|
||||
core.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
|
||||
}
|
||||
let codeqlCmd = path.join(codeqlFolder, 'codeql', 'codeql');
|
||||
if (process.platform === 'win32') {
|
||||
codeqlCmd += ".exe";
|
||||
}
|
||||
else if (process.platform !== 'linux' && process.platform !== 'darwin') {
|
||||
throw new Error("Unsupported platform: " + process.platform);
|
||||
}
|
||||
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
|
||||
core.exportVariable(CODEQL_ACTION_CMD, codeqlCmd);
|
||||
return cachedCodeQL;
|
||||
}
|
||||
catch (e) {
|
||||
core.error(e);
|
||||
throw new Error("Unable to download and extract CodeQL CLI");
|
||||
}
|
||||
}
|
||||
exports.setupCodeQL = setupCodeQL;
|
||||
function getCodeQLURLVersion(url) {
|
||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
||||
if (match === null || match.length < 2) {
|
||||
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
|
||||
}
|
||||
let version = match[1];
|
||||
if (!semver.valid(version)) {
|
||||
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||
version = '0.0.0-' + version;
|
||||
}
|
||||
const s = semver.clean(version);
|
||||
if (!s) {
|
||||
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
exports.getCodeQLURLVersion = getCodeQLURLVersion;
|
||||
function getCodeQL() {
|
||||
if (cachedCodeQL === undefined) {
|
||||
const codeqlCmd = util.getRequiredEnvParam(CODEQL_ACTION_CMD);
|
||||
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
|
||||
}
|
||||
return cachedCodeQL;
|
||||
}
|
||||
exports.getCodeQL = getCodeQL;
|
||||
function resolveFunction(partialCodeql, methodName) {
|
||||
if (typeof partialCodeql[methodName] !== 'function') {
|
||||
const dummyMethod = () => {
|
||||
throw new Error('CodeQL ' + methodName + ' method not correctly defined');
|
||||
};
|
||||
return dummyMethod;
|
||||
}
|
||||
return partialCodeql[methodName];
|
||||
}
|
||||
/**
|
||||
* Set the functionality for CodeQL methods. Only for use in tests.
|
||||
*
|
||||
* Accepts a partial object and any undefined methods will be implemented
|
||||
* to immediately throw an exception indicating which method is missing.
|
||||
*/
|
||||
function setCodeQL(partialCodeql) {
|
||||
cachedCodeQL = {
|
||||
getDir: resolveFunction(partialCodeql, 'getDir'),
|
||||
printVersion: resolveFunction(partialCodeql, 'printVersion'),
|
||||
getTracerEnv: resolveFunction(partialCodeql, 'getTracerEnv'),
|
||||
databaseInit: resolveFunction(partialCodeql, 'databaseInit'),
|
||||
runAutobuild: resolveFunction(partialCodeql, 'runAutobuild'),
|
||||
extractScannedLanguage: resolveFunction(partialCodeql, 'extractScannedLanguage'),
|
||||
finalizeDatabase: resolveFunction(partialCodeql, 'finalizeDatabase'),
|
||||
resolveQueries: resolveFunction(partialCodeql, 'resolveQueries'),
|
||||
databaseAnalyze: resolveFunction(partialCodeql, 'databaseAnalyze')
|
||||
};
|
||||
}
|
||||
exports.setCodeQL = setCodeQL;
|
||||
function getCodeQLForCmd(cmd) {
|
||||
return {
|
||||
getDir: function () {
|
||||
return path.dirname(cmd);
|
||||
},
|
||||
printVersion: async function () {
|
||||
await exec.exec(cmd, [
|
||||
'version',
|
||||
'--format=json'
|
||||
]);
|
||||
},
|
||||
getTracerEnv: async function (databasePath, compilerSpec) {
|
||||
let envFile = path.resolve(databasePath, 'working', 'env.tmp');
|
||||
const compilerSpecArg = compilerSpec ? ["--compiler-spec=" + compilerSpec] : [];
|
||||
await exec.exec(cmd, [
|
||||
'database',
|
||||
'trace-command',
|
||||
databasePath,
|
||||
...compilerSpecArg,
|
||||
process.execPath,
|
||||
path.resolve(__dirname, 'tracer-env.js'),
|
||||
envFile
|
||||
]);
|
||||
return JSON.parse(fs.readFileSync(envFile, 'utf-8'));
|
||||
},
|
||||
databaseInit: async function (databasePath, language, sourceRoot) {
|
||||
await exec.exec(cmd, [
|
||||
'database',
|
||||
'init',
|
||||
databasePath,
|
||||
'--language=' + language,
|
||||
'--source-root=' + sourceRoot,
|
||||
]);
|
||||
},
|
||||
runAutobuild: async function (language) {
|
||||
const cmdName = process.platform === 'win32' ? 'autobuild.cmd' : 'autobuild.sh';
|
||||
const autobuildCmd = path.join(path.dirname(cmd), language, 'tools', cmdName);
|
||||
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
|
||||
// This is because of an issue with Azure pipelines timing out connections after 4 minutes
|
||||
// and Maven not properly handling closed connections
|
||||
// Otherwise long build processes will timeout when pulling down Java packages
|
||||
// https://developercommunity.visualstudio.com/content/problem/292284/maven-hosted-agent-connection-timeout.html
|
||||
let javaToolOptions = process.env['JAVA_TOOL_OPTIONS'] || "";
|
||||
process.env['JAVA_TOOL_OPTIONS'] = [...javaToolOptions.split(/\s+/), '-Dhttp.keepAlive=false', '-Dmaven.wagon.http.pool=false'].join(' ');
|
||||
await exec.exec(autobuildCmd);
|
||||
},
|
||||
extractScannedLanguage: async function (databasePath, language) {
|
||||
// Get extractor location
|
||||
let extractorPath = '';
|
||||
await exec.exec(cmd, [
|
||||
'resolve',
|
||||
'extractor',
|
||||
'--format=json',
|
||||
'--language=' + language
|
||||
], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => { extractorPath += data.toString(); },
|
||||
stderr: (data) => { process.stderr.write(data); }
|
||||
}
|
||||
});
|
||||
// Set trace command
|
||||
const ext = process.platform === 'win32' ? '.cmd' : '.sh';
|
||||
const traceCommand = path.resolve(JSON.parse(extractorPath), 'tools', 'autobuild' + ext);
|
||||
// Run trace command
|
||||
await exec.exec(cmd, [
|
||||
'database',
|
||||
'trace-command',
|
||||
databasePath,
|
||||
'--',
|
||||
traceCommand
|
||||
]);
|
||||
},
|
||||
finalizeDatabase: async function (databasePath) {
|
||||
await exec.exec(cmd, [
|
||||
'database',
|
||||
'finalize',
|
||||
databasePath
|
||||
]);
|
||||
},
|
||||
resolveQueries: async function (queries, extraSearchPath) {
|
||||
const codeqlArgs = [
|
||||
'resolve',
|
||||
'queries',
|
||||
...queries,
|
||||
'--format=bylanguage'
|
||||
];
|
||||
if (extraSearchPath !== undefined) {
|
||||
codeqlArgs.push('--search-path', extraSearchPath);
|
||||
}
|
||||
let output = '';
|
||||
await exec.exec(cmd, codeqlArgs, {
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
output += data.toString();
|
||||
}
|
||||
}
|
||||
});
|
||||
return JSON.parse(output);
|
||||
},
|
||||
databaseAnalyze: async function (databasePath, sarifFile, querySuite) {
|
||||
await exec.exec(cmd, [
|
||||
'database',
|
||||
'analyze',
|
||||
util.getMemoryFlag(),
|
||||
util.getThreadsFlag(),
|
||||
databasePath,
|
||||
'--format=sarif-latest',
|
||||
'--output=' + sarifFile,
|
||||
'--no-sarif-add-snippets',
|
||||
querySuite
|
||||
]);
|
||||
}
|
||||
};
|
||||
}
|
||||
function isTracedLanguage(language) {
|
||||
return ['cpp', 'java', 'csharp'].includes(language);
|
||||
}
|
||||
exports.isTracedLanguage = isTracedLanguage;
|
||||
function isScannedLanguage(language) {
|
||||
return !isTracedLanguage(language);
|
||||
}
|
||||
exports.isScannedLanguage = isScannedLanguage;
|
||||
//# sourceMappingURL=codeql.js.map
|
||||
1
lib/codeql.js.map
Normal file
1
lib/codeql.js.map
Normal file
File diff suppressed because one or more lines are too long
60
lib/codeql.test.js
generated
Normal file
60
lib/codeql.test.js
generated
Normal file
@@ -0,0 +1,60 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const nock_1 = __importDefault(require("nock"));
|
||||
const path = __importStar(require("path"));
|
||||
const codeql = __importStar(require("./codeql"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default('download codeql bundle cache', async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
|
||||
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
|
||||
const versions = ['20200601', '20200610'];
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
const version = versions[i];
|
||||
nock_1.default('https://example.com')
|
||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||
await codeql.setupCodeQL();
|
||||
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
|
||||
}
|
||||
const cachedVersions = toolcache.findAllVersions('CodeQL');
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
ava_1.default('parse codeql bundle url version', t => {
|
||||
const tests = {
|
||||
'20200601': '0.0.0-20200601',
|
||||
'20200601.0': '0.0.0-20200601.0',
|
||||
'20200601.0.0': '20200601.0.0',
|
||||
'1.2.3': '1.2.3',
|
||||
'1.2.3-alpha': '1.2.3-alpha',
|
||||
'1.2.3-beta.1': '1.2.3-beta.1',
|
||||
};
|
||||
for (const [version, expectedVersion] of Object.entries(tests)) {
|
||||
const url = `https://github.com/.../codeql-bundle-${version}/...`;
|
||||
try {
|
||||
const parsedVersion = codeql.getCodeQLURLVersion(url);
|
||||
t.deepEqual(parsedVersion, expectedVersion);
|
||||
}
|
||||
catch (e) {
|
||||
t.fail(e.message);
|
||||
}
|
||||
}
|
||||
});
|
||||
//# sourceMappingURL=codeql.test.js.map
|
||||
1
lib/codeql.test.js.map
Normal file
1
lib/codeql.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"codeql.test.js","sourceRoot":"","sources":["../src/codeql.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,iDAAmC;AACnC,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,MAAM,CAAC,WAAW,EAAE,CAAC;YAE3B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,MAAM,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YACtD,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC"}
|
||||
664
lib/config-utils.js
generated
664
lib/config-utils.js
generated
@@ -8,129 +8,589 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const path = __importStar(require("path"));
|
||||
class ExternalQuery {
|
||||
constructor(repository, ref) {
|
||||
this.path = '';
|
||||
this.repository = repository;
|
||||
this.ref = ref;
|
||||
const api = __importStar(require("./api-client"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const util = __importStar(require("./util"));
|
||||
// Property names from the user-supplied config file.
|
||||
const NAME_PROPERTY = 'name';
|
||||
const DISABLE_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
|
||||
const QUERIES_PROPERTY = 'queries';
|
||||
const QUERIES_USES_PROPERTY = 'uses';
|
||||
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
|
||||
const PATHS_PROPERTY = 'paths';
|
||||
// All the languages supported by CodeQL
|
||||
const ALL_LANGUAGES = ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'];
|
||||
// Some alternate names for languages
|
||||
const LANGUAGE_ALIASES = {
|
||||
'c': 'cpp',
|
||||
'typescript': 'javascript',
|
||||
};
|
||||
/**
|
||||
* A list of queries from https://github.com/github/codeql that
|
||||
* we don't want to run. Disabling them here is a quicker alternative to
|
||||
* disabling them in the code scanning query suites. Queries should also
|
||||
* be disabled in the suites, and removed from this list here once the
|
||||
* bundle is updated to make those suite changes live.
|
||||
*
|
||||
* Format is a map from language to an array of path suffixes of .ql files.
|
||||
*/
|
||||
const DISABLED_BUILTIN_QUERIES = {
|
||||
'csharp': [
|
||||
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
|
||||
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
|
||||
]
|
||||
};
|
||||
function queryIsDisabled(language, query) {
|
||||
return (DISABLED_BUILTIN_QUERIES[language] || [])
|
||||
.some(disabledQuery => query.endsWith(disabledQuery));
|
||||
}
|
||||
/**
|
||||
* Asserts that the noDeclaredLanguage and multipleDeclaredLanguages fields are
|
||||
* both empty and errors if they are not.
|
||||
*/
|
||||
function validateQueries(resolvedQueries) {
|
||||
const noDeclaredLanguage = resolvedQueries.noDeclaredLanguage;
|
||||
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
||||
if (noDeclaredLanguageQueries.length !== 0) {
|
||||
throw new Error('The following queries do not declare a language. ' +
|
||||
'Their qlpack.yml files are either missing or is invalid.\n' +
|
||||
noDeclaredLanguageQueries.join('\n'));
|
||||
}
|
||||
const multipleDeclaredLanguages = resolvedQueries.multipleDeclaredLanguages;
|
||||
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
|
||||
if (multipleDeclaredLanguagesQueries.length !== 0) {
|
||||
throw new Error('The following queries declare multiple languages. ' +
|
||||
'Their qlpack.yml files are either missing or is invalid.\n' +
|
||||
multipleDeclaredLanguagesQueries.join('\n'));
|
||||
}
|
||||
}
|
||||
exports.ExternalQuery = ExternalQuery;
|
||||
class Config {
|
||||
constructor() {
|
||||
this.name = "";
|
||||
this.disableDefaultQueries = false;
|
||||
this.additionalQueries = [];
|
||||
this.externalQueries = [];
|
||||
this.pathsIgnore = [];
|
||||
this.paths = [];
|
||||
/**
|
||||
* Run 'codeql resolve queries' and add the results to resultMap
|
||||
*/
|
||||
async function runResolveQueries(resultMap, toResolve, extraSearchPath, errorOnInvalidQueries) {
|
||||
const codeQl = codeql_1.getCodeQL();
|
||||
const resolvedQueries = await codeQl.resolveQueries(toResolve, extraSearchPath);
|
||||
for (const [language, queries] of Object.entries(resolvedQueries.byLanguage)) {
|
||||
if (resultMap[language] === undefined) {
|
||||
resultMap[language] = [];
|
||||
}
|
||||
resultMap[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
|
||||
}
|
||||
addQuery(queryUses) {
|
||||
// The logic for parsing the string is based on what actions does for
|
||||
// parsing the 'uses' actions in the workflow file
|
||||
if (queryUses === "") {
|
||||
throw '"uses" value for queries cannot be blank';
|
||||
}
|
||||
if (queryUses.startsWith("./")) {
|
||||
this.additionalQueries.push(queryUses.slice(2));
|
||||
return;
|
||||
}
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
}
|
||||
const ref = tok[1];
|
||||
tok = tok[0].split('/');
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length > 3) {
|
||||
tok = [tok[0], tok[1], tok.slice(2).join('/')];
|
||||
}
|
||||
if (tok.length < 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
}
|
||||
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
|
||||
if (tok.length === 3) {
|
||||
external.path = tok[2];
|
||||
}
|
||||
this.externalQueries.push(external);
|
||||
if (errorOnInvalidQueries) {
|
||||
validateQueries(resolvedQueries);
|
||||
}
|
||||
}
|
||||
exports.Config = Config;
|
||||
const configFolder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
function initConfig() {
|
||||
/**
|
||||
* Get the set of queries included by default.
|
||||
*/
|
||||
async function addDefaultQueries(languages, resultMap) {
|
||||
const suites = languages.map(l => l + '-code-scanning.qls');
|
||||
await runResolveQueries(resultMap, suites, undefined, false);
|
||||
}
|
||||
// The set of acceptable values for built-in suites from the codeql bundle
|
||||
const builtinSuites = ['security-extended', 'security-and-quality'];
|
||||
/**
|
||||
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
||||
* Throws an error if suiteName is not a valid builtin suite.
|
||||
*/
|
||||
async function addBuiltinSuiteQueries(configFile, languages, resultMap, suiteName) {
|
||||
const suite = builtinSuites.find((suite) => suite === suiteName);
|
||||
if (!suite) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
||||
}
|
||||
const suites = languages.map(l => l + '-' + suiteName + '.qls');
|
||||
await runResolveQueries(resultMap, suites, undefined, false);
|
||||
}
|
||||
/**
|
||||
* Retrieve the set of queries at localQueryPath and add them to resultMap.
|
||||
*/
|
||||
async function addLocalQueries(configFile, resultMap, localQueryPath) {
|
||||
// Resolve the local path against the workspace so that when this is
|
||||
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
||||
const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
|
||||
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(absoluteQueryPath)) {
|
||||
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||
}
|
||||
// Call this after checking file exists, because it'll fail if file doesn't exist
|
||||
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
|
||||
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||
if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||
}
|
||||
// Get the root of the current repo to use when resolving query dependencies
|
||||
const rootOfRepo = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
await runResolveQueries(resultMap, [absoluteQueryPath], rootOfRepo, true);
|
||||
}
|
||||
/**
|
||||
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
|
||||
*/
|
||||
async function addRemoteQueries(configFile, resultMap, queryUses) {
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
const ref = tok[1];
|
||||
tok = tok[0].split('/');
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length < 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
// Check none of the parts of the repository name are empty
|
||||
if (tok[0].trim() === '' || tok[1].trim() === '') {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
const nwo = tok[0] + '/' + tok[1];
|
||||
// Checkout the external repository
|
||||
const rootOfRepo = await externalQueries.checkoutExternalRepository(nwo, ref);
|
||||
const queryPath = tok.length > 2
|
||||
? path.join(rootOfRepo, tok.slice(2).join('/'))
|
||||
: rootOfRepo;
|
||||
await runResolveQueries(resultMap, [queryPath], rootOfRepo, true);
|
||||
}
|
||||
/**
|
||||
* Parse a query 'uses' field to a discrete set of query files and update resultMap.
|
||||
*
|
||||
* The logic for parsing the string is based on what actions does for
|
||||
* parsing the 'uses' actions in the workflow file. So it can handle
|
||||
* local paths starting with './', or references to remote repos, or
|
||||
* a finite set of hardcoded terms for builtin suites.
|
||||
*/
|
||||
async function parseQueryUses(configFile, languages, resultMap, queryUses) {
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
await addLocalQueries(configFile, resultMap, queryUses.slice(2));
|
||||
return;
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
|
||||
await addBuiltinSuiteQueries(configFile, languages, resultMap, queryUses);
|
||||
return;
|
||||
}
|
||||
// Otherwise, must be a reference to another repo
|
||||
await addRemoteQueries(configFile, resultMap, queryUses);
|
||||
}
|
||||
// Regex validating stars in paths or paths-ignore entries.
|
||||
// The intention is to only allow ** to appear when immediately
|
||||
// preceded and followed by a slash.
|
||||
const pathStarsRegex = /.*(?:\*\*[^/].*|\*\*$|[^/]\*\*.*)/;
|
||||
// Characters that are supported by filters in workflows, but not by us.
|
||||
// See https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
|
||||
const filterPatternCharactersRegex = /.*[\?\+\[\]!].*/;
|
||||
// Checks that a paths of paths-ignore entry is valid, possibly modifying it
|
||||
// to make it valid, or if not possible then throws an error.
|
||||
function validateAndSanitisePath(originalPath, propertyName, configFile) {
|
||||
// Take a copy so we don't modify the original path, so we can still construct error messages
|
||||
let path = originalPath;
|
||||
// All paths are relative to the src root, so strip off leading slashes.
|
||||
while (path.charAt(0) === '/') {
|
||||
path = path.substring(1);
|
||||
}
|
||||
// Trailing ** are redundant, so strip them off
|
||||
if (path.endsWith('/**')) {
|
||||
path = path.substring(0, path.length - 2);
|
||||
}
|
||||
// An empty path is not allowed as it's meaningless
|
||||
if (path === '') {
|
||||
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" is not an invalid path. ' +
|
||||
'It is not necessary to include it, and it is not allowed to exclude it.'));
|
||||
}
|
||||
// Check for illegal uses of **
|
||||
if (path.match(pathStarsRegex)) {
|
||||
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an invalid "**" wildcard. ' +
|
||||
'They must be immediately preceeded and followed by a slash as in "/**/", or come at the start or end.'));
|
||||
}
|
||||
// Check for other regex characters that we don't support.
|
||||
// Output a warning so the user knows, but otherwise continue normally.
|
||||
if (path.match(filterPatternCharactersRegex)) {
|
||||
core.warning(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an unsupported character. ' +
|
||||
'The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.'));
|
||||
}
|
||||
// Ban any uses of backslash for now.
|
||||
// This may not play nicely with project layouts.
|
||||
// This restriction can be lifted later if we determine they are ok.
|
||||
if (path.indexOf('\\') !== -1) {
|
||||
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an "\\" character. These are not allowed in filters. ' +
|
||||
'If running on windows we recommend using "/" instead for path filters.'));
|
||||
}
|
||||
return path;
|
||||
}
|
||||
exports.validateAndSanitisePath = validateAndSanitisePath;
|
||||
function getNameInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
|
||||
}
|
||||
exports.getNameInvalid = getNameInvalid;
|
||||
function getDisableDefaultQueriesInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, DISABLE_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
|
||||
}
|
||||
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
|
||||
function getQueriesInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
|
||||
}
|
||||
exports.getQueriesInvalid = getQueriesInvalid;
|
||||
function getQueryUsesInvalid(configFile, queryUses) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'must be a built-in suite (' + builtinSuites.join(' or ') +
|
||||
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
|
||||
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
|
||||
}
|
||||
exports.getQueryUsesInvalid = getQueryUsesInvalid;
|
||||
function getPathsIgnoreInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
exports.getPathsIgnoreInvalid = getPathsIgnoreInvalid;
|
||||
function getPathsInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
exports.getPathsInvalid = getPathsInvalid;
|
||||
function getLocalPathOutsideOfRepository(configFile, localPath) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" is outside of the repository');
|
||||
}
|
||||
exports.getLocalPathOutsideOfRepository = getLocalPathOutsideOfRepository;
|
||||
function getLocalPathDoesNotExist(configFile, localPath) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" does not exist in the repository');
|
||||
}
|
||||
exports.getLocalPathDoesNotExist = getLocalPathDoesNotExist;
|
||||
function getConfigFileOutsideWorkspaceErrorMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" is outside of the workspace';
|
||||
}
|
||||
exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage;
|
||||
function getConfigFileDoesNotExistErrorMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" does not exist';
|
||||
}
|
||||
exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage;
|
||||
function getConfigFileRepoFormatInvalidMessage(configFile) {
|
||||
let error = 'The configuration file "' + configFile + '" is not a supported remote file reference.';
|
||||
error += ' Expected format <owner>/<repository>/<file-path>@<ref>';
|
||||
return error;
|
||||
}
|
||||
exports.getConfigFileRepoFormatInvalidMessage = getConfigFileRepoFormatInvalidMessage;
|
||||
function getConfigFileFormatInvalidMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" could not be read';
|
||||
}
|
||||
exports.getConfigFileFormatInvalidMessage = getConfigFileFormatInvalidMessage;
|
||||
function getConfigFileDirectoryGivenMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" looks like a directory, not a file';
|
||||
}
|
||||
exports.getConfigFileDirectoryGivenMessage = getConfigFileDirectoryGivenMessage;
|
||||
function getConfigFilePropertyError(configFile, property, error) {
|
||||
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
|
||||
}
|
||||
function getNoLanguagesError() {
|
||||
return "Did not detect any languages to analyze. " +
|
||||
"Please update input in workflow or check that GitHub detects the correct languages in your repository.";
|
||||
}
|
||||
exports.getNoLanguagesError = getNoLanguagesError;
|
||||
function getUnknownLanguagesError(languages) {
|
||||
return "Did not recognise the following languages: " + languages.join(', ');
|
||||
}
|
||||
exports.getUnknownLanguagesError = getUnknownLanguagesError;
|
||||
/**
|
||||
* Gets the set of languages in the current repository
|
||||
*/
|
||||
async function getLanguagesInRepo() {
|
||||
var _a;
|
||||
// Translate between GitHub's API names for languages and ours
|
||||
const codeqlLanguages = {
|
||||
'C': 'cpp',
|
||||
'C++': 'cpp',
|
||||
'C#': 'csharp',
|
||||
'Go': 'go',
|
||||
'Java': 'java',
|
||||
'JavaScript': 'javascript',
|
||||
'TypeScript': 'javascript',
|
||||
'Python': 'python',
|
||||
};
|
||||
let repo_nwo = (_a = process.env['GITHUB_REPOSITORY']) === null || _a === void 0 ? void 0 : _a.split("/");
|
||||
if (repo_nwo) {
|
||||
let owner = repo_nwo[0];
|
||||
let repo = repo_nwo[1];
|
||||
core.debug(`GitHub repo ${owner} ${repo}`);
|
||||
const response = await api.getApiClient(true).repos.listLanguages({
|
||||
owner,
|
||||
repo
|
||||
});
|
||||
core.debug("Languages API response: " + JSON.stringify(response));
|
||||
// The GitHub API is going to return languages in order of popularity,
|
||||
// When we pick a language to autobuild we want to pick the most popular traced language
|
||||
// Since sets in javascript maintain insertion order, using a set here and then splatting it
|
||||
// into an array gives us an array of languages ordered by popularity
|
||||
let languages = new Set();
|
||||
for (let lang in response.data) {
|
||||
if (lang in codeqlLanguages) {
|
||||
languages.add(codeqlLanguages[lang]);
|
||||
}
|
||||
}
|
||||
return [...languages];
|
||||
}
|
||||
else {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get the languages to analyse.
|
||||
*
|
||||
* The result is obtained from the action input parameter 'languages' if that
|
||||
* has been set, otherwise it is deduced as all languages in the repo that
|
||||
* can be analysed.
|
||||
*
|
||||
* If no languages could be detected from either the workflow or the repository
|
||||
* then throw an error.
|
||||
*/
|
||||
async function getLanguages() {
|
||||
// Obtain from action input 'languages' if set
|
||||
let languages = core.getInput('languages', { required: false })
|
||||
.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
core.info("Languages from configuration: " + JSON.stringify(languages));
|
||||
if (languages.length === 0) {
|
||||
// Obtain languages as all languages in the repo that can be analysed
|
||||
languages = await getLanguagesInRepo();
|
||||
core.info("Automatically detected languages: " + JSON.stringify(languages));
|
||||
}
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
if (languages.length === 0) {
|
||||
throw new Error(getNoLanguagesError());
|
||||
}
|
||||
// Make sure they are supported
|
||||
const checkedLanguages = [];
|
||||
const unknownLanguages = [];
|
||||
for (let language of languages) {
|
||||
// Normalise to lower case
|
||||
language = language.toLowerCase();
|
||||
// Resolve any known aliases
|
||||
if (language in LANGUAGE_ALIASES) {
|
||||
language = LANGUAGE_ALIASES[language];
|
||||
}
|
||||
const checkedLanguage = ALL_LANGUAGES.find(l => l === language);
|
||||
if (checkedLanguage === undefined) {
|
||||
unknownLanguages.push(language);
|
||||
}
|
||||
else if (checkedLanguages.indexOf(checkedLanguage) === -1) {
|
||||
checkedLanguages.push(checkedLanguage);
|
||||
}
|
||||
}
|
||||
if (unknownLanguages.length > 0) {
|
||||
throw new Error(getUnknownLanguagesError(unknownLanguages));
|
||||
}
|
||||
return checkedLanguages;
|
||||
}
|
||||
/**
|
||||
* Get the default config for when the user has not supplied one.
|
||||
*/
|
||||
async function getDefaultConfig() {
|
||||
const languages = await getLanguages();
|
||||
const queries = {};
|
||||
await addDefaultQueries(languages, queries);
|
||||
return {
|
||||
languages: languages,
|
||||
queries: queries,
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
};
|
||||
}
|
||||
exports.getDefaultConfig = getDefaultConfig;
|
||||
/**
|
||||
* Load the config from the given file.
|
||||
*/
|
||||
async function loadConfig(configFile) {
|
||||
let parsedYAML;
|
||||
if (isLocal(configFile)) {
|
||||
// Treat the config file as relative to the workspace
|
||||
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
configFile = path.resolve(workspacePath, configFile);
|
||||
parsedYAML = getLocalConfig(configFile, workspacePath);
|
||||
}
|
||||
else {
|
||||
parsedYAML = await getRemoteConfig(configFile);
|
||||
}
|
||||
// Validate that the 'name' property is syntactically correct,
|
||||
// even though we don't use the value yet.
|
||||
if (NAME_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
if (parsedYAML[NAME_PROPERTY].length === 0) {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
}
|
||||
const languages = await getLanguages();
|
||||
const queries = {};
|
||||
const pathsIgnore = [];
|
||||
const paths = [];
|
||||
let disableDefaultQueries = false;
|
||||
if (DISABLE_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
||||
throw new Error(getDisableDefaultQueriesInvalid(configFile));
|
||||
}
|
||||
disableDefaultQueries = parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY];
|
||||
}
|
||||
if (!disableDefaultQueries) {
|
||||
await addDefaultQueries(languages, queries);
|
||||
}
|
||||
if (QUERIES_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getQueriesInvalid(configFile));
|
||||
}
|
||||
for (const query of parsedYAML[QUERIES_PROPERTY]) {
|
||||
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
await parseQueryUses(configFile, languages, queries, query[QUERIES_USES_PROPERTY]);
|
||||
}
|
||||
}
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile));
|
||||
});
|
||||
}
|
||||
if (PATHS_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile));
|
||||
});
|
||||
}
|
||||
// The list of queries should not be empty for any language. If it is then
|
||||
// it is a user configuration error.
|
||||
for (const language of languages) {
|
||||
if (queries[language] === undefined || queries[language].length === 0) {
|
||||
throw new Error(`Did not detect any queries to run for ${language}. ` +
|
||||
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
||||
}
|
||||
}
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
pathsIgnore,
|
||||
paths,
|
||||
originalUserInput: parsedYAML
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Load and return the config.
|
||||
*
|
||||
* This will parse the config from the user input if present, or generate
|
||||
* a default config. The parsed config is then stored to a known location.
|
||||
*/
|
||||
async function initConfig() {
|
||||
const configFile = core.getInput('config-file');
|
||||
const config = new Config();
|
||||
let config;
|
||||
// If no config file was provided create an empty one
|
||||
if (configFile === '') {
|
||||
core.debug('No configuration file was provided');
|
||||
return config;
|
||||
config = await getDefaultConfig();
|
||||
}
|
||||
try {
|
||||
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
if (parsedYAML.name && typeof parsedYAML.name === "string") {
|
||||
config.name = parsedYAML.name;
|
||||
}
|
||||
if (parsedYAML['disable-default-queries'] && typeof parsedYAML['disable-default-queries'] === "boolean") {
|
||||
config.disableDefaultQueries = parsedYAML['disable-default-queries'];
|
||||
}
|
||||
const queries = parsedYAML.queries;
|
||||
if (queries && queries instanceof Array) {
|
||||
queries.forEach(query => {
|
||||
if (query.uses && typeof query.uses === "string") {
|
||||
config.addQuery(query.uses);
|
||||
}
|
||||
});
|
||||
}
|
||||
const pathsIgnore = parsedYAML['paths-ignore'];
|
||||
if (pathsIgnore && pathsIgnore instanceof Array) {
|
||||
pathsIgnore.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
config.pathsIgnore.push(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
const paths = parsedYAML.paths;
|
||||
if (paths && paths instanceof Array) {
|
||||
paths.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
config.paths.push(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
core.setFailed(err);
|
||||
else {
|
||||
config = await loadConfig(configFile);
|
||||
}
|
||||
// Save the config so we can easily access it again in the future
|
||||
await saveConfig(config);
|
||||
return config;
|
||||
}
|
||||
exports.initConfig = initConfig;
|
||||
function isLocal(configPath) {
|
||||
// If the path starts with ./, look locally
|
||||
if (configPath.indexOf("./") === 0) {
|
||||
return true;
|
||||
}
|
||||
return (configPath.indexOf("@") === -1);
|
||||
}
|
||||
function getLocalConfig(configFile, workspacePath) {
|
||||
// Error if the config file is now outside of the workspace
|
||||
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
}
|
||||
// Error if the file does not exist
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
||||
}
|
||||
return yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
}
|
||||
async function getRemoteConfig(configFile) {
|
||||
// retrieve the various parts of the config location, and ensure they're present
|
||||
const format = new RegExp('(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)');
|
||||
const pieces = format.exec(configFile);
|
||||
// 5 = 4 groups + the whole expression
|
||||
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
||||
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
|
||||
}
|
||||
const response = await api.getApiClient(true).repos.getContents({
|
||||
owner: pieces.groups.owner,
|
||||
repo: pieces.groups.repo,
|
||||
path: pieces.groups.path,
|
||||
ref: pieces.groups.ref,
|
||||
});
|
||||
let fileContents;
|
||||
if ("content" in response.data && response.data.content !== undefined) {
|
||||
fileContents = response.data.content;
|
||||
}
|
||||
else if (Array.isArray(response.data)) {
|
||||
throw new Error(getConfigFileDirectoryGivenMessage(configFile));
|
||||
}
|
||||
else {
|
||||
throw new Error(getConfigFileFormatInvalidMessage(configFile));
|
||||
}
|
||||
return yaml.safeLoad(Buffer.from(fileContents, 'base64').toString('binary'));
|
||||
}
|
||||
/**
|
||||
* Get the file path where the parsed config will be stored.
|
||||
*/
|
||||
function getPathToParsedConfigFile() {
|
||||
return path.join(util.getRequiredEnvParam('RUNNER_TEMP'), 'config');
|
||||
}
|
||||
exports.getPathToParsedConfigFile = getPathToParsedConfigFile;
|
||||
/**
|
||||
* Store the given config to the path returned from getPathToParsedConfigFile.
|
||||
*/
|
||||
async function saveConfig(config) {
|
||||
const configString = JSON.stringify(config);
|
||||
await io.mkdirP(configFolder);
|
||||
fs.writeFileSync(path.join(configFolder, 'config'), configString, 'utf8');
|
||||
const configFile = getPathToParsedConfigFile();
|
||||
fs.mkdirSync(path.dirname(configFile), { recursive: true });
|
||||
fs.writeFileSync(configFile, configString, 'utf8');
|
||||
core.debug('Saved config:');
|
||||
core.debug(configString);
|
||||
}
|
||||
async function loadConfig() {
|
||||
const configFile = path.join(configFolder, 'config');
|
||||
if (fs.existsSync(configFile)) {
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
core.debug(configString);
|
||||
return JSON.parse(configString);
|
||||
}
|
||||
else {
|
||||
const config = initConfig();
|
||||
core.debug('Initialized config:');
|
||||
core.debug(JSON.stringify(config));
|
||||
await saveConfig(config);
|
||||
return config;
|
||||
/**
|
||||
* Get the config.
|
||||
*
|
||||
* If this is the first time in a workflow that this is being called then
|
||||
* this will parse the config from the user input. The parsed config is then
|
||||
* stored to a known location. On the second and further calls, this will
|
||||
* return the contents of the parsed config from the known location.
|
||||
*/
|
||||
async function getConfig() {
|
||||
const configFile = getPathToParsedConfigFile();
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||
}
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
core.debug(configString);
|
||||
return JSON.parse(configString);
|
||||
}
|
||||
exports.loadConfig = loadConfig;
|
||||
exports.getConfig = getConfig;
|
||||
//# sourceMappingURL=config-utils.js.map
|
||||
File diff suppressed because one or more lines are too long
440
lib/config-utils.test.js
generated
Normal file
440
lib/config-utils.test.js
generated
Normal file
@@ -0,0 +1,440 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const sinon_1 = __importDefault(require("sinon"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const CodeQL = __importStar(require("./codeql"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
function setInput(name, value) {
|
||||
// Transformation copied from
|
||||
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
|
||||
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
|
||||
if (value !== undefined) {
|
||||
process.env[envVar] = value;
|
||||
}
|
||||
else {
|
||||
delete process.env[envVar];
|
||||
}
|
||||
}
|
||||
function mockGetContents(content) {
|
||||
// Passing an auth token is required, so we just use a dummy value
|
||||
let client = new github.GitHub('123');
|
||||
const response = {
|
||||
data: content
|
||||
};
|
||||
const spyGetContents = sinon_1.default.stub(client.repos, "getContents").resolves(response);
|
||||
sinon_1.default.stub(api, "getApiClient").value(() => client);
|
||||
return spyGetContents;
|
||||
}
|
||||
function mockListLanguages(languages) {
|
||||
// Passing an auth token is required, so we just use a dummy value
|
||||
let client = new github.GitHub('123');
|
||||
const response = {
|
||||
data: {},
|
||||
};
|
||||
for (const language of languages) {
|
||||
response.data[language] = 123;
|
||||
}
|
||||
sinon_1.default.stub(client.repos, "listLanguages").resolves(response);
|
||||
sinon_1.default.stub(api, "getApiClient").value(() => client);
|
||||
}
|
||||
ava_1.default("load empty config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', undefined);
|
||||
setInput('languages', 'javascript,python');
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
byLanguage: {},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
const config = await configUtils.initConfig();
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig());
|
||||
});
|
||||
});
|
||||
ava_1.default("loading config saves config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', undefined);
|
||||
setInput('languages', 'javascript,python');
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
byLanguage: {},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
// Sanity check the saved config file does not already exist
|
||||
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile()));
|
||||
// Sanity check that getConfig throws before we have called initConfig
|
||||
await t.throwsAsync(configUtils.getConfig);
|
||||
const config1 = await configUtils.initConfig();
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile()));
|
||||
// And that same newly-initialised config should now be returned by getConfig
|
||||
const config2 = await configUtils.getConfig();
|
||||
t.deepEqual(config1, config2);
|
||||
});
|
||||
});
|
||||
ava_1.default("load input outside of workspace", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', '../input');
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("load non-local input with invalid repo syntax", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
// no filename given, just a repo
|
||||
setInput('config-file', 'octo-org/codeql-config@main');
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage('octo-org/codeql-config@main')));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("load non-existent input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
t.false(fs.existsSync(path.join(tmpDir, 'input')));
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("load non-empty input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
byLanguage: {
|
||||
'javascript': {
|
||||
'/foo/a.ql': {},
|
||||
'/bar/b.ql': {},
|
||||
},
|
||||
},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
// Just create a generic config object with non-default values for all fields
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- uses: ./foo
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = {
|
||||
languages: ['javascript'],
|
||||
queries: { 'javascript': ['/foo/a.ql', '/bar/b.ql'] },
|
||||
pathsIgnore: ['a', 'b'],
|
||||
paths: ['c/d'],
|
||||
originalUserInput: {
|
||||
name: 'my config',
|
||||
'disable-default-queries': true,
|
||||
queries: [{ uses: './foo' }],
|
||||
'paths-ignore': ['a', 'b'],
|
||||
paths: ['c/d'],
|
||||
},
|
||||
};
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
const actualConfig = await configUtils.initConfig();
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
ava_1.default("default queries are used", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
// Check that the default behaviour is to add the default queries.
|
||||
// In this case if a config file is specified but does not include
|
||||
// the disable-default-queries field.
|
||||
// We determine this by whether CodeQL.resolveQueries is called
|
||||
// with the correct arguments.
|
||||
const resolveQueriesArgs = [];
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function (queries, extraSearchPath) {
|
||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||
return {
|
||||
byLanguage: {
|
||||
'javascript': {
|
||||
'foo.ql': {},
|
||||
},
|
||||
},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
// The important point of this config is that it doesn't specify
|
||||
// the disable-default-queries field.
|
||||
// Any other details are hopefully irrelevant for this tetst.
|
||||
const inputFileContents = `
|
||||
paths:
|
||||
- foo`;
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
await configUtils.initConfig();
|
||||
// Check resolve queries was called correctly
|
||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||
t.deepEqual(resolveQueriesArgs[0].queries, ['javascript-code-scanning.qls']);
|
||||
t.deepEqual(resolveQueriesArgs[0].extraSearchPath, undefined);
|
||||
});
|
||||
});
|
||||
ava_1.default("API client used when reading remote config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
byLanguage: {
|
||||
'javascript': {
|
||||
'foo.ql': {},
|
||||
},
|
||||
},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- uses: ./
|
||||
- uses: ./foo
|
||||
- uses: foo/bar@dev
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
const dummyResponse = {
|
||||
content: Buffer.from(inputFileContents).toString("base64"),
|
||||
};
|
||||
const spyGetContents = mockGetContents(dummyResponse);
|
||||
// Create checkout directory for remote queries repository
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo/bar'), { recursive: true });
|
||||
setInput('config-file', 'octo-org/codeql-config/config.yaml@main');
|
||||
setInput('languages', 'javascript');
|
||||
await configUtils.initConfig();
|
||||
t.assert(spyGetContents.called);
|
||||
});
|
||||
});
|
||||
ava_1.default("Remote config handles the case where a directory is provided", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const dummyResponse = []; // directories are returned as arrays
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = 'octo-org/codeql-config/config.yaml@main';
|
||||
setInput('config-file', repoReference);
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDirectoryGivenMessage(repoReference)));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("Invalid format of remote config handled correctly", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const dummyResponse = {
|
||||
// note no "content" property here
|
||||
};
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = 'octo-org/codeql-config/config.yaml@main';
|
||||
setInput('config-file', repoReference);
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference)));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("No detected languages", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
mockListLanguages([]);
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getNoLanguagesError()));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("Unknown languages", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('languages', 'ruby,english');
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getUnknownLanguagesError(['ruby', 'english'])));
|
||||
}
|
||||
});
|
||||
});
|
||||
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
|
||||
ava_1.default("load invalid input - " + testName, async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
byLanguage: {},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
const inputFile = path.join(tmpDir, 'input');
|
||||
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
doInvalidInputTest('name invalid type', `
|
||||
name:
|
||||
- foo: bar`, configUtils.getNameInvalid);
|
||||
doInvalidInputTest('disable-default-queries invalid type', `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
|
||||
doInvalidInputTest('queries invalid type', `queries: foo`, configUtils.getQueriesInvalid);
|
||||
doInvalidInputTest('paths-ignore invalid type', `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
|
||||
doInvalidInputTest('paths invalid type', `paths: 17`, configUtils.getPathsInvalid);
|
||||
doInvalidInputTest('queries uses invalid type', `
|
||||
queries:
|
||||
- uses:
|
||||
- hello: world`, configUtils.getQueryUsesInvalid);
|
||||
function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) {
|
||||
// Invalid contents of a "queries.uses" field.
|
||||
// Should fail with the expected error message
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
queries:
|
||||
- name: foo
|
||||
uses: ` + input;
|
||||
doInvalidInputTest("queries uses \"" + input + "\"", inputFileContents, expectedErrorMessageGenerator);
|
||||
}
|
||||
// Various "uses" fields, and the errors they should produce
|
||||
doInvalidQueryUsesTest("''", c => configUtils.getQueryUsesInvalid(c, undefined));
|
||||
doInvalidQueryUsesTest("foo/bar", c => configUtils.getQueryUsesInvalid(c, "foo/bar"));
|
||||
doInvalidQueryUsesTest("foo/bar@v1@v2", c => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
|
||||
doInvalidQueryUsesTest("foo@master", c => configUtils.getQueryUsesInvalid(c, "foo@master"));
|
||||
doInvalidQueryUsesTest("https://github.com/foo/bar@master", c => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
|
||||
doInvalidQueryUsesTest("./foo", c => configUtils.getLocalPathDoesNotExist(c, "foo"));
|
||||
doInvalidQueryUsesTest("./..", c => configUtils.getLocalPathOutsideOfRepository(c, ".."));
|
||||
const validPaths = [
|
||||
'foo',
|
||||
'foo/',
|
||||
'foo/**',
|
||||
'foo/**/',
|
||||
'foo/**/**',
|
||||
'foo/**/bar/**/baz',
|
||||
'**/',
|
||||
'**/foo',
|
||||
'/foo',
|
||||
];
|
||||
const invalidPaths = [
|
||||
'a/***/b',
|
||||
'a/**b',
|
||||
'a/b**',
|
||||
'**',
|
||||
];
|
||||
ava_1.default('path validations', t => {
|
||||
// Dummy values to pass to validateAndSanitisePath
|
||||
const propertyName = 'paths';
|
||||
const configFile = './.github/codeql/config.yml';
|
||||
for (const path of validPaths) {
|
||||
t.truthy(configUtils.validateAndSanitisePath(path, propertyName, configFile));
|
||||
}
|
||||
for (const path of invalidPaths) {
|
||||
t.throws(() => configUtils.validateAndSanitisePath(path, propertyName, configFile));
|
||||
}
|
||||
});
|
||||
ava_1.default('path sanitisation', t => {
|
||||
// Dummy values to pass to validateAndSanitisePath
|
||||
const propertyName = 'paths';
|
||||
const configFile = './.github/codeql/config.yml';
|
||||
// Valid paths are not modified
|
||||
t.deepEqual(configUtils.validateAndSanitisePath('foo/bar', propertyName, configFile), 'foo/bar');
|
||||
// Trailing stars are stripped
|
||||
t.deepEqual(configUtils.validateAndSanitisePath('foo/**', propertyName, configFile), 'foo/');
|
||||
});
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
1
lib/config-utils.test.js.map
Normal file
1
lib/config-utils.test.js.map
Normal file
File diff suppressed because one or more lines are too long
33
lib/external-queries.js
generated
33
lib/external-queries.js
generated
@@ -12,22 +12,23 @@ const exec = __importStar(require("@actions/exec"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function checkoutExternalQueries(config) {
|
||||
const folder = util.getRequiredEnvParam('RUNNER_WORKSPACE');
|
||||
for (const externalQuery of config.externalQueries) {
|
||||
core.info('Checking out ' + externalQuery.repository);
|
||||
const checkoutLocation = path.join(folder, externalQuery.repository);
|
||||
if (!fs.existsSync(checkoutLocation)) {
|
||||
const repoURL = 'https://github.com/' + externalQuery.repository + '.git';
|
||||
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
|
||||
await exec.exec('git', [
|
||||
'--work-tree=' + checkoutLocation,
|
||||
'--git-dir=' + checkoutLocation + '/.git',
|
||||
'checkout', externalQuery.ref,
|
||||
]);
|
||||
}
|
||||
config.additionalQueries.push(path.join(checkoutLocation, externalQuery.path));
|
||||
/**
|
||||
* Check out repository at the given ref, and return the directory of the checkout.
|
||||
*/
|
||||
async function checkoutExternalRepository(repository, ref) {
|
||||
const folder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
core.info('Checking out ' + repository);
|
||||
const checkoutLocation = path.join(folder, repository);
|
||||
if (!fs.existsSync(checkoutLocation)) {
|
||||
const repoURL = 'https://github.com/' + repository + '.git';
|
||||
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
|
||||
await exec.exec('git', [
|
||||
'--work-tree=' + checkoutLocation,
|
||||
'--git-dir=' + checkoutLocation + '/.git',
|
||||
'checkout', ref,
|
||||
]);
|
||||
}
|
||||
return checkoutLocation;
|
||||
}
|
||||
exports.checkoutExternalQueries = checkoutExternalQueries;
|
||||
exports.checkoutExternalRepository = checkoutExternalRepository;
|
||||
//# sourceMappingURL=external-queries.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAG7B,6CAA+B;AAExB,KAAK,UAAU,uBAAuB,CAAC,MAA0B;IACtE,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,kBAAkB,CAAC,CAAC;IAE5D,KAAK,MAAM,aAAa,IAAI,MAAM,CAAC,eAAe,EAAE;QAClD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,UAAU,CAAC,CAAC;QAEtD,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,aAAa,CAAC,UAAU,CAAC,CAAC;QACrE,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;YACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,aAAa,CAAC,UAAU,GAAG,MAAM,CAAC;YAC1E,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;YAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;gBACrB,cAAc,GAAG,gBAAgB;gBACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;gBACzC,UAAU,EAAE,aAAa,CAAC,GAAG;aAC9B,CAAC,CAAC;SACJ;QAED,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;KAChF;AACH,CAAC;AAnBD,0DAmBC"}
|
||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAE7B,6CAA+B;AAE/B;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAAC,UAAkB,EAAE,GAAW;IAC9E,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC;IAEvD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,UAAU,CAAC,CAAC;IAExC,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;IACvD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,UAAU,GAAG,MAAM,CAAC;QAC5D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;QAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YACrB,cAAc,GAAG,gBAAgB;YACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;YACzC,UAAU,EAAE,GAAG;SAChB,CAAC,CAAC;KACJ;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAjBD,gEAiBC"}
|
||||
13
lib/external-queries.test.js
generated
13
lib/external-queries.test.js
generated
@@ -13,18 +13,15 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default("checkoutExternalQueries", async (t) => {
|
||||
let config = new configUtils.Config();
|
||||
config.externalQueries = [
|
||||
new configUtils.ExternalQuery("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b"),
|
||||
];
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["RUNNER_WORKSPACE"] = tmpDir;
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
await externalQueries.checkoutExternalRepository("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b");
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in the default branch
|
||||
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,4DAA8C;AAC9C,oEAAsD;AACtD,6CAA+B;AAE/B,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACtC,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,eAAe,GAAG;QACrB,IAAI,WAAW,CAAC,aAAa,CAAC,kBAAkB,EAAE,0CAA0C,CAAC;KAChG,CAAC;IAEF,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACjC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QACzC,MAAM,eAAe,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtD,uFAAuF;QACvF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IACjF,CAAC,CAAC,CAAC;AACP,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,oEAAsD;AACtD,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACxC,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QACpC,MAAM,eAAe,CAAC,0BAA0B,CAAC,kBAAkB,EAAE,0CAA0C,CAAC,CAAC;QAEjH,mGAAmG;QACnG,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IAC/E,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
156
lib/finalize-db.js
generated
156
lib/finalize-db.js
generated
@@ -8,135 +8,103 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function createdDBForScannedLanguages(codeqlCmd, databaseFolder) {
|
||||
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
||||
if (scannedLanguages) {
|
||||
for (const language of scannedLanguages.split(',')) {
|
||||
async function sendStatusReport(startedAt, queriesStats, uploadStats, error) {
|
||||
var _a, _b, _c;
|
||||
const status = ((_a = queriesStats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined ? 'failure' : 'success';
|
||||
const statusReportBase = await util.createStatusReportBase('finish', status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
...(queriesStats || {}),
|
||||
...(uploadStats || {}),
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function createdDBForScannedLanguages(databaseFolder, config) {
|
||||
const codeql = codeql_1.getCodeQL();
|
||||
for (const language of config.languages) {
|
||||
if (codeql_1.isScannedLanguage(language)) {
|
||||
core.startGroup('Extracting ' + language);
|
||||
// Get extractor location
|
||||
let extractorPath = '';
|
||||
await exec.exec(codeqlCmd, ['resolve', 'extractor', '--format=json', '--language=' + language], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => { extractorPath += data.toString(); },
|
||||
stderr: (data) => { process.stderr.write(data); }
|
||||
}
|
||||
});
|
||||
// Set trace command
|
||||
const ext = process.platform === 'win32' ? '.cmd' : '.sh';
|
||||
const traceCommand = path.resolve(JSON.parse(extractorPath), 'tools', 'autobuild' + ext);
|
||||
// Run trace command
|
||||
await exec.exec(codeqlCmd, ['database', 'trace-command', path.join(databaseFolder, language), '--', traceCommand]);
|
||||
await codeql.extractScannedLanguage(path.join(databaseFolder, language), language);
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
}
|
||||
async function finalizeDatabaseCreation(codeqlCmd, databaseFolder) {
|
||||
await createdDBForScannedLanguages(codeqlCmd, databaseFolder);
|
||||
const languages = process.env[sharedEnv.CODEQL_ACTION_LANGUAGES] || '';
|
||||
for (const language of languages.split(',')) {
|
||||
async function finalizeDatabaseCreation(databaseFolder, config) {
|
||||
await createdDBForScannedLanguages(databaseFolder, config);
|
||||
const codeql = codeql_1.getCodeQL();
|
||||
for (const language of config.languages) {
|
||||
core.startGroup('Finalizing ' + language);
|
||||
await exec.exec(codeqlCmd, ['database', 'finalize', path.join(databaseFolder, language)]);
|
||||
await codeql.finalizeDatabase(path.join(databaseFolder, language));
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
async function resolveQueryLanguages(codeqlCmd, config) {
|
||||
let res = new Map();
|
||||
if (config.additionalQueries.length !== 0) {
|
||||
let resolveQueriesOutput = '';
|
||||
const options = {
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
resolveQueriesOutput += data.toString();
|
||||
}
|
||||
}
|
||||
};
|
||||
await exec.exec(codeqlCmd, [
|
||||
'resolve',
|
||||
'queries',
|
||||
...config.additionalQueries,
|
||||
'--format=bylanguage'
|
||||
], options);
|
||||
const resolveQueriesOutputObject = JSON.parse(resolveQueriesOutput);
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
res[language] = Object.keys(queries);
|
||||
}
|
||||
const noDeclaredLanguage = resolveQueriesOutputObject.noDeclaredLanguage;
|
||||
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
||||
if (noDeclaredLanguageQueries.length !== 0) {
|
||||
throw new Error('Some queries do not declare a language, their qlpack.yml file is missing or is invalid');
|
||||
}
|
||||
const multipleDeclaredLanguages = resolveQueriesOutputObject.multipleDeclaredLanguages;
|
||||
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
|
||||
if (multipleDeclaredLanguagesQueries.length !== 0) {
|
||||
throw new Error('Some queries declare multiple languages, their qlpack.yml file is missing or is invalid');
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(codeqlCmd, databaseFolder, sarifFolder, config) {
|
||||
const queriesPerLanguage = await resolveQueryLanguages(codeqlCmd, config);
|
||||
for (let database of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + database);
|
||||
const queries = [];
|
||||
if (!config.disableDefaultQueries) {
|
||||
queries.push(database + '-code-scanning.qls');
|
||||
async function runQueries(databaseFolder, sarifFolder, config) {
|
||||
const codeql = codeql_1.getCodeQL();
|
||||
for (let language of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + language);
|
||||
const queries = config.queries[language] || [];
|
||||
if (queries.length === 0) {
|
||||
throw new Error('Unable to analyse ' + language + ' as no queries were selected for this language');
|
||||
}
|
||||
try {
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = path.join(databaseFolder, language + '-queries.qls');
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
core.debug('Query suite file for ' + language + '...\n' + querySuiteContents);
|
||||
const sarifFile = path.join(sarifFolder, language + '.sarif');
|
||||
await codeql.databaseAnalyze(path.join(databaseFolder, language), sarifFile, querySuite);
|
||||
core.debug('SARIF results for database ' + language + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
}
|
||||
catch (e) {
|
||||
// For now the fields about query performance are not populated
|
||||
return {
|
||||
analyze_failure_language: language,
|
||||
};
|
||||
}
|
||||
queries.push(...(queriesPerLanguage[database] || []));
|
||||
const sarifFile = path.join(sarifFolder, database + '.sarif');
|
||||
await exec.exec(codeqlCmd, [
|
||||
'database',
|
||||
'analyze',
|
||||
path.join(databaseFolder, database),
|
||||
'--format=sarif-latest',
|
||||
'--output=' + sarifFile,
|
||||
'--no-sarif-add-snippets',
|
||||
...queries
|
||||
]);
|
||||
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
}
|
||||
return {};
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let queriesStats = undefined;
|
||||
let uploadStats = undefined;
|
||||
try {
|
||||
if (util.should_abort('finish', true) || !await util.reportActionStarting('finish')) {
|
||||
util.prepareLocalRunEnvironment();
|
||||
if (!await util.sendStatusReport(await util.createStatusReportBase('finish', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
const config = await configUtils.loadConfig();
|
||||
const config = await configUtils.getConfig();
|
||||
core.exportVariable(sharedEnv.ODASA_TRACER_CONFIGURATION, '');
|
||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||
const codeqlCmd = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_CMD);
|
||||
const databaseFolder = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_DATABASE_DIR);
|
||||
const databaseFolder = util.getCodeQLDatabasesDir();
|
||||
const sarifFolder = core.getInput('output');
|
||||
await io.mkdirP(sarifFolder);
|
||||
fs.mkdirSync(sarifFolder, { recursive: true });
|
||||
core.info('Finalizing database creation');
|
||||
await finalizeDatabaseCreation(codeqlCmd, databaseFolder);
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
await finalizeDatabaseCreation(databaseFolder, config);
|
||||
core.info('Analyzing database');
|
||||
await runQueries(codeqlCmd, databaseFolder, sarifFolder, config);
|
||||
queriesStats = await runQueries(databaseFolder, sarifFolder, config);
|
||||
if ('true' === core.getInput('upload')) {
|
||||
if (!await upload_lib.upload(sarifFolder)) {
|
||||
await util.reportActionFailed('failed', 'upload');
|
||||
return;
|
||||
}
|
||||
uploadStats = await upload_lib.upload(sarifFolder);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('finish', error.message, error.stack);
|
||||
console.log(error);
|
||||
await sendStatusReport(startedAt, queriesStats, uploadStats, error);
|
||||
return;
|
||||
}
|
||||
await util.reportActionSucceeded('finish');
|
||||
await sendStatusReport(startedAt, queriesStats, uploadStats);
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("analyze action failed: " + e);
|
||||
|
||||
File diff suppressed because one or more lines are too long
2
lib/finalize-db.test.js
generated
Normal file
2
lib/finalize-db.test.js
generated
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";
|
||||
//# sourceMappingURL=finalize-db.test.js.map
|
||||
1
lib/finalize-db.test.js.map
Normal file
1
lib/finalize-db.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"finalize-db.test.js","sourceRoot":"","sources":["../src/finalize-db.test.ts"],"names":[],"mappings":""}
|
||||
25
lib/fingerprints.js
generated
25
lib/fingerprints.js
generated
@@ -146,10 +146,10 @@ function locationUpdateCallback(result, location) {
|
||||
result.partialFingerprints.primaryLocationLineHash = hash;
|
||||
}
|
||||
else if (existingFingerprint !== hash) {
|
||||
core.warning("Calculated fingerprint of " + hash +
|
||||
" for file " + location.physicalLocation.artifactLocation.uri +
|
||||
" line " + lineNumber +
|
||||
", but found existing inconsistent fingerprint value " + existingFingerprint);
|
||||
core.warning('Calculated fingerprint of ' + hash +
|
||||
' for file ' + location.physicalLocation.artifactLocation.uri +
|
||||
' line ' + lineNumber +
|
||||
', but found existing inconsistent fingerprint value ' + existingFingerprint);
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -164,14 +164,14 @@ function resolveUriToFile(location, artifacts) {
|
||||
location.index < 0 ||
|
||||
location.index >= artifacts.length ||
|
||||
typeof artifacts[location.index].location !== 'object') {
|
||||
core.debug('Ignoring location as index "' + location.index + '" is invalid');
|
||||
core.debug(`Ignoring location as URI "${location.index}" is invalid`);
|
||||
return undefined;
|
||||
}
|
||||
location = artifacts[location.index].location;
|
||||
}
|
||||
// Get the URI and decode
|
||||
if (typeof location.uri !== 'string') {
|
||||
core.debug('Ignoring location as uri "' + location.uri + '" is invalid');
|
||||
core.debug(`Ignoring location as index "${location.uri}" is invalid`);
|
||||
return undefined;
|
||||
}
|
||||
let uri = decodeURIComponent(location.uri);
|
||||
@@ -181,13 +181,13 @@ function resolveUriToFile(location, artifacts) {
|
||||
uri = uri.substring(fileUriPrefix.length);
|
||||
}
|
||||
if (uri.indexOf('://') !== -1) {
|
||||
core.debug('Ignoring location URI "' + uri + "' as the scheme is not recognised");
|
||||
core.debug(`Ignoring location URI "${uri}" as the scheme is not recognised`);
|
||||
return undefined;
|
||||
}
|
||||
// Discard any absolute paths that aren't in the src root
|
||||
const srcRootPrefix = process.env['GITHUB_WORKSPACE'] + '/';
|
||||
if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
|
||||
core.debug('Ignoring location URI "' + uri + "' as it is outside of the src root");
|
||||
core.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
|
||||
return undefined;
|
||||
}
|
||||
// Just assume a relative path is relative to the src root.
|
||||
@@ -198,7 +198,7 @@ function resolveUriToFile(location, artifacts) {
|
||||
}
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(uri)) {
|
||||
core.debug("Unable to compute fingerprint for non-existent file: " + uri);
|
||||
core.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
|
||||
return undefined;
|
||||
}
|
||||
return uri;
|
||||
@@ -207,6 +207,7 @@ exports.resolveUriToFile = resolveUriToFile;
|
||||
// Compute fingerprints for results in the given sarif file
|
||||
// and return an updated sarif file contents.
|
||||
function addFingerprints(sarifContents) {
|
||||
var _a, _b;
|
||||
let sarif = JSON.parse(sarifContents);
|
||||
// Gather together results for the same file and construct
|
||||
// callbacks to accept hashes for that file and update the location
|
||||
@@ -217,10 +218,8 @@ function addFingerprints(sarifContents) {
|
||||
for (const result of run.results || []) {
|
||||
// Check the primary location is defined correctly and is in the src root
|
||||
const primaryLocation = (result.locations || [])[0];
|
||||
if (!primaryLocation ||
|
||||
!primaryLocation.physicalLocation ||
|
||||
!primaryLocation.physicalLocation.artifactLocation) {
|
||||
core.debug("Unable to compute fingerprint for invalid location: " + JSON.stringify(primaryLocation));
|
||||
if (!((_b = (_a = primaryLocation) === null || _a === void 0 ? void 0 : _a.physicalLocation) === null || _b === void 0 ? void 0 : _b.artifactLocation)) {
|
||||
core.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`);
|
||||
continue;
|
||||
}
|
||||
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts);
|
||||
|
||||
File diff suppressed because one or more lines are too long
2
lib/fingerprints.test.js
generated
2
lib/fingerprints.test.js
generated
@@ -14,6 +14,8 @@ const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const fingerprints = __importStar(require("./fingerprints"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
function testHash(t, input, expectedHashes) {
|
||||
let index = 0;
|
||||
let callback = function (lineNumber, hash) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
28
lib/setup-tools.js
generated
28
lib/setup-tools.js
generated
@@ -10,6 +10,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const path = __importStar(require("path"));
|
||||
const semver = __importStar(require("semver"));
|
||||
class CodeQLSetup {
|
||||
constructor(codeqlDist) {
|
||||
this.dist = codeqlDist;
|
||||
@@ -19,7 +20,7 @@ class CodeQLSetup {
|
||||
if (process.platform === 'win32') {
|
||||
this.platform = 'win64';
|
||||
if (this.cmd.endsWith('codeql')) {
|
||||
this.cmd += ".cmd";
|
||||
this.cmd += ".exe";
|
||||
}
|
||||
}
|
||||
else if (process.platform === 'linux') {
|
||||
@@ -35,17 +36,17 @@ class CodeQLSetup {
|
||||
}
|
||||
exports.CodeQLSetup = CodeQLSetup;
|
||||
async function setupCodeQL() {
|
||||
const version = '1.0.0';
|
||||
const codeqlURL = core.getInput('tools', { required: true });
|
||||
try {
|
||||
let codeqlFolder = toolcache.find('CodeQL', version);
|
||||
const codeqlURL = core.getInput('tools', { required: true });
|
||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL);
|
||||
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
}
|
||||
else {
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', version);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
|
||||
}
|
||||
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
||||
}
|
||||
@@ -55,4 +56,21 @@ async function setupCodeQL() {
|
||||
}
|
||||
}
|
||||
exports.setupCodeQL = setupCodeQL;
|
||||
function getCodeQLURLVersion(url) {
|
||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
||||
if (match === null || match.length < 2) {
|
||||
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
|
||||
}
|
||||
let version = match[1];
|
||||
if (!semver.valid(version)) {
|
||||
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||
version = '0.0.0-' + version;
|
||||
}
|
||||
const s = semver.clean(version);
|
||||
if (!s) {
|
||||
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
exports.getCodeQLURLVersion = getCodeQLURLVersion;
|
||||
//# sourceMappingURL=setup-tools.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"setup-tools.js","sourceRoot":"","sources":["../src/setup-tools.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,+DAAiD;AACjD,2CAA6B;AAE7B,MAAa,WAAW;IAMpB,YAAY,UAAkB;QAC1B,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAC3C,4BAA4B;QAC5B,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAC9B,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;YACxB,IAAI,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBAC7B,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC;aACtB;SACJ;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YACrC,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;SAC7B;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACtC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;SAC3B;aAAM;YACH,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;SAC/D;IACL,CAAC;CACJ;AAxBD,kCAwBC;AAEM,KAAK,UAAU,WAAW;IAC7B,MAAM,OAAO,GAAG,OAAO,CAAC;IACxB,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7D,IAAI;QACA,IAAI,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QACrD,IAAI,YAAY,EAAE;YACd,IAAI,CAAC,KAAK,CAAC,yBAAyB,YAAY,EAAE,CAAC,CAAC;SACvD;aAAM;YACH,MAAM,UAAU,GAAG,MAAM,SAAS,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,eAAe,GAAG,MAAM,SAAS,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC/D,YAAY,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,eAAe,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC;SAC/E;QACD,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAE7D;IAAC,OAAO,CAAC,EAAE;QACR,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACd,MAAM,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC;KAChE;AACL,CAAC;AAnBD,kCAmBC"}
|
||||
{"version":3,"file":"setup-tools.js","sourceRoot":"","sources":["../src/setup-tools.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,+DAAiD;AACjD,2CAA6B;AAC7B,+CAAiC;AAEjC,MAAa,WAAW;IAMtB,YAAY,UAAkB;QAC5B,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAC3C,4BAA4B;QAC5B,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;YACxB,IAAI,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBAC/B,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC;aACpB;SACF;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YACvC,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;SAC3B;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACxC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;SACzB;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;SAC7D;IACH,CAAC;CACF;AAxBD,kCAwBC;AAEM,KAAK,UAAU,WAAW;IAC/B,IAAI;QACF,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;QAC7D,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAExD,IAAI,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,gBAAgB,CAAC,CAAC;QAC9D,IAAI,YAAY,EAAE;YAChB,IAAI,CAAC,KAAK,CAAC,yBAAyB,YAAY,EAAE,CAAC,CAAC;SACrD;aAAM;YACL,MAAM,UAAU,GAAG,MAAM,SAAS,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,eAAe,GAAG,MAAM,SAAS,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC/D,YAAY,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,eAAe,EAAE,QAAQ,EAAE,gBAAgB,CAAC,CAAC;SACtF;QACD,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAE3D;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACd,MAAM,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC;KAC9D;AACH,CAAC;AAnBD,kCAmBC;AAED,SAAgB,mBAAmB,CAAC,GAAW;IAE7C,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,CAAC,wBAAwB,CAAC,CAAC;IAClD,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,wBAAwB,GAAG,iCAAiC,CAAC,CAAC;KAC/E;IAED,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IAEvB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE;QAC1B,IAAI,CAAC,KAAK,CAAC,kBAAkB,OAAO,gEAAgE,OAAO,GAAG,CAAC,CAAC;QAChH,OAAO,GAAG,QAAQ,GAAG,OAAO,CAAC;KAC9B;IAED,MAAM,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;IAChC,IAAI,CAAC,CAAC,EAAE;QACN,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,iDAAiD,OAAO,UAAU,CAAC,CAAC;KAC/G;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AApBD,kDAoBC"}
|
||||
60
lib/setup-tools.test.js
generated
Normal file
60
lib/setup-tools.test.js
generated
Normal file
@@ -0,0 +1,60 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const nock_1 = __importDefault(require("nock"));
|
||||
const path = __importStar(require("path"));
|
||||
const setupTools = __importStar(require("./setup-tools"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.silenceDebugOutput(ava_1.default);
|
||||
ava_1.default('download codeql bundle cache', async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
|
||||
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
|
||||
const versions = ['20200601', '20200610'];
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
const version = versions[i];
|
||||
nock_1.default('https://example.com')
|
||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||
await setupTools.setupCodeQL();
|
||||
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
|
||||
}
|
||||
const cachedVersions = toolcache.findAllVersions('CodeQL');
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
ava_1.default('parse codeql bundle url version', t => {
|
||||
const tests = {
|
||||
'20200601': '0.0.0-20200601',
|
||||
'20200601.0': '0.0.0-20200601.0',
|
||||
'20200601.0.0': '20200601.0.0',
|
||||
'1.2.3': '1.2.3',
|
||||
'1.2.3-alpha': '1.2.3-alpha',
|
||||
'1.2.3-beta.1': '1.2.3-beta.1',
|
||||
};
|
||||
for (const [version, expectedVersion] of Object.entries(tests)) {
|
||||
const url = `https://github.com/.../codeql-bundle-${version}/...`;
|
||||
try {
|
||||
const parsedVersion = setupTools.getCodeQLURLVersion(url);
|
||||
t.deepEqual(parsedVersion, expectedVersion);
|
||||
}
|
||||
catch (e) {
|
||||
t.fail(e.message);
|
||||
}
|
||||
}
|
||||
});
|
||||
//# sourceMappingURL=setup-tools.test.js.map
|
||||
1
lib/setup-tools.test.js.map
Normal file
1
lib/setup-tools.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"setup-tools.test.js","sourceRoot":"","sources":["../src/setup-tools.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,0DAA4C;AAC5C,mDAAmD;AACnD,6CAA+B;AAE/B,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,UAAU,CAAC,WAAW,EAAE,CAAC;YAE/B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,UAAU,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YAC1D,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC"}
|
||||
123
lib/setup-tracer.js
generated
123
lib/setup-tracer.js
generated
@@ -9,13 +9,11 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const setuptools = __importStar(require("./setup-tools"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util = __importStar(require("./util"));
|
||||
const CRITICAL_TRACER_VARS = new Set(['SEMMLE_PRELOAD_libtrace',
|
||||
,
|
||||
@@ -28,12 +26,7 @@ const CRITICAL_TRACER_VARS = new Set(['SEMMLE_PRELOAD_libtrace',
|
||||
'SEMMLE_JAVA_TOOL_OPTIONS'
|
||||
]);
|
||||
async function tracerConfig(codeql, database, compilerSpec) {
|
||||
const compilerSpecArg = compilerSpec ? ["--compiler-spec=" + compilerSpec] : [];
|
||||
let envFile = path.resolve(database, 'working', 'env.tmp');
|
||||
await exec.exec(codeql.cmd, ['database', 'trace-command', database,
|
||||
...compilerSpecArg,
|
||||
process.execPath, path.resolve(__dirname, 'tracer-env.js'), envFile]);
|
||||
const env = JSON.parse(fs.readFileSync(envFile, 'utf-8'));
|
||||
const env = await codeql.getTracerEnv(database, compilerSpec);
|
||||
const config = env['ODASA_TRACER_CONFIGURATION'];
|
||||
const info = { spec: config, env: {} };
|
||||
// Extract critical tracer variables from the environment
|
||||
@@ -63,7 +56,7 @@ function concatTracerConfigs(configs) {
|
||||
const env = {};
|
||||
let copyExecutables = false;
|
||||
let envSize = 0;
|
||||
for (let v of Object.values(configs)) {
|
||||
for (const v of configs) {
|
||||
for (let e of Object.entries(v.env)) {
|
||||
const name = e[0];
|
||||
const value = e[1];
|
||||
@@ -100,12 +93,13 @@ function concatTracerConfigs(configs) {
|
||||
totalCount += count;
|
||||
totalLines.push(...lines.slice(2));
|
||||
}
|
||||
const newLogFilePath = path.resolve(util.workspaceFolder(), 'compound-build-tracer.log');
|
||||
const spec = path.resolve(util.workspaceFolder(), 'compound-spec');
|
||||
const tempFolder = path.resolve(util.workspaceFolder(), 'compound-temp');
|
||||
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
|
||||
const spec = path.resolve(tempFolder, 'compound-spec');
|
||||
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
|
||||
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
|
||||
if (copyExecutables) {
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = tempFolder;
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
|
||||
envSize += 1;
|
||||
}
|
||||
fs.writeFileSync(spec, newSpecContent.join('\n'));
|
||||
@@ -125,28 +119,51 @@ function concatTracerConfigs(configs) {
|
||||
fs.writeFileSync(envPath, buffer);
|
||||
return { env, spec };
|
||||
}
|
||||
async function sendSuccessStatusReport(startedAt, config) {
|
||||
const statusReportBase = await util.createStatusReportBase('init', 'success', startedAt);
|
||||
const languages = config.languages.join(',');
|
||||
const workflowLanguages = core.getInput('languages', { required: false });
|
||||
const paths = (config.originalUserInput.paths || []).join(',');
|
||||
const pathsIgnore = (config.originalUserInput['paths-ignore'] || []).join(',');
|
||||
const disableDefaultQueries = config.originalUserInput['disable-default-queries'] ? languages : '';
|
||||
const queries = (config.originalUserInput.queries || []).map(q => q.uses).join(',');
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
languages: languages,
|
||||
workflow_languages: workflowLanguages,
|
||||
paths: paths,
|
||||
paths_ignore: pathsIgnore,
|
||||
disable_default_queries: disableDefaultQueries,
|
||||
queries: queries,
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let config;
|
||||
let codeql;
|
||||
try {
|
||||
if (util.should_abort('init', false) || !await util.reportActionStarting('init')) {
|
||||
util.prepareLocalRunEnvironment();
|
||||
if (!await util.sendStatusReport(await util.createStatusReportBase('init', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
// The config file MUST be parsed in the init action
|
||||
const config = await configUtils.loadConfig();
|
||||
core.startGroup('Load language configuration');
|
||||
const languages = await util.getLanguages();
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
if (languages.length === 0) {
|
||||
core.setFailed("Did not detect any languages to analyze. Please update input in workflow.");
|
||||
return;
|
||||
}
|
||||
core.endGroup();
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, languages);
|
||||
const sourceRoot = path.resolve();
|
||||
core.startGroup('Setup CodeQL tools');
|
||||
const codeqlSetup = await setuptools.setupCodeQL();
|
||||
await exec.exec(codeqlSetup.cmd, ['version', '--format=json']);
|
||||
codeql = await codeql_1.setupCodeQL();
|
||||
await codeql.printVersion();
|
||||
core.endGroup();
|
||||
core.startGroup('Load language configuration');
|
||||
config = await configUtils.initConfig();
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
core.endGroup();
|
||||
}
|
||||
catch (e) {
|
||||
core.setFailed(e.message);
|
||||
console.log(e);
|
||||
await util.sendStatusReport(await util.createStatusReportBase('init', 'aborted', startedAt, e.message));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const sourceRoot = path.resolve();
|
||||
// Forward Go flags
|
||||
const goFlags = process.env['GOFLAGS'];
|
||||
if (goFlags) {
|
||||
@@ -156,57 +173,49 @@ async function run() {
|
||||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
||||
core.exportVariable('CODEQL_RAM', codeqlRam);
|
||||
const databaseFolder = path.resolve(util.workspaceFolder(), 'codeql_databases');
|
||||
await io.mkdirP(databaseFolder);
|
||||
let tracedLanguages = {};
|
||||
let scannedLanguages = [];
|
||||
const databaseFolder = util.getCodeQLDatabasesDir();
|
||||
fs.mkdirSync(databaseFolder, { recursive: true });
|
||||
let tracedLanguageConfigs = [];
|
||||
// TODO: replace this code once CodeQL supports multi-language tracing
|
||||
for (let language of languages) {
|
||||
for (let language of config.languages) {
|
||||
const languageDatabase = path.join(databaseFolder, language);
|
||||
// Init language database
|
||||
await exec.exec(codeqlSetup.cmd, ['database', 'init', languageDatabase, '--language=' + language, '--source-root=' + sourceRoot]);
|
||||
await codeql.databaseInit(languageDatabase, language, sourceRoot);
|
||||
// TODO: add better detection of 'traced languages' instead of using a hard coded list
|
||||
if (['cpp', 'java', 'csharp'].includes(language)) {
|
||||
const config = await tracerConfig(codeqlSetup, languageDatabase);
|
||||
tracedLanguages[language] = config;
|
||||
}
|
||||
else {
|
||||
scannedLanguages.push(language);
|
||||
if (codeql_1.isTracedLanguage(language)) {
|
||||
const config = await tracerConfig(codeql, languageDatabase);
|
||||
tracedLanguageConfigs.push(config);
|
||||
}
|
||||
}
|
||||
const tracedLanguageKeys = Object.keys(tracedLanguages);
|
||||
if (tracedLanguageKeys.length > 0) {
|
||||
const mainTracerConfig = concatTracerConfigs(tracedLanguages);
|
||||
if (tracedLanguageConfigs.length > 0) {
|
||||
const mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs);
|
||||
if (mainTracerConfig.spec) {
|
||||
for (let entry of Object.entries(mainTracerConfig.env)) {
|
||||
core.exportVariable(entry[0], entry[1]);
|
||||
}
|
||||
core.exportVariable('ODASA_TRACER_CONFIGURATION', mainTracerConfig.spec);
|
||||
if (process.platform === 'darwin') {
|
||||
core.exportVariable('DYLD_INSERT_LIBRARIES', path.join(codeqlSetup.tools, 'osx64', 'libtrace.dylib'));
|
||||
core.exportVariable('DYLD_INSERT_LIBRARIES', path.join(codeql.getDir(), 'tools', 'osx64', 'libtrace.dylib'));
|
||||
}
|
||||
else if (process.platform === 'win32') {
|
||||
await exec.exec('powershell', [path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
||||
path.resolve(codeqlSetup.tools, 'win64', 'tracer.exe')], { env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
|
||||
await exec.exec('powershell', [
|
||||
path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
||||
path.resolve(codeql.getDir(), 'tools', 'win64', 'tracer.exe'),
|
||||
], { env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
|
||||
}
|
||||
else {
|
||||
core.exportVariable('LD_PRELOAD', path.join(codeqlSetup.tools, 'linux64', '${LIB}trace.so'));
|
||||
core.exportVariable('LD_PRELOAD', path.join(codeql.getDir(), 'tools', 'linux64', '${LIB}trace.so'));
|
||||
}
|
||||
}
|
||||
}
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES, scannedLanguages.join(','));
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES, tracedLanguageKeys.join(','));
|
||||
// TODO: make this a "private" environment variable of the action
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_DATABASE_DIR, databaseFolder);
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_CMD, codeqlSetup.cmd);
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('init', error.message, error.stack);
|
||||
console.log(error);
|
||||
await util.sendStatusReport(await util.createStatusReportBase('init', 'failure', startedAt, error.message, error.stack));
|
||||
return;
|
||||
}
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
await util.reportActionSucceeded('init');
|
||||
await sendSuccessStatusReport(startedAt, config);
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("init action failed: " + e);
|
||||
|
||||
File diff suppressed because one or more lines are too long
10
lib/shared-environment.js
generated
10
lib/shared-environment.js
generated
@@ -1,18 +1,10 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CODEQL_ACTION_CMD = 'CODEQL_ACTION_CMD';
|
||||
exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
|
||||
exports.CODEQL_ACTION_LANGUAGES = 'CODEQL_ACTION_LANGUAGES';
|
||||
exports.CODEQL_ACTION_ANALYSIS_KEY = 'CODEQL_ACTION_ANALYSIS_KEY';
|
||||
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
|
||||
exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
|
||||
exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
||||
// The time at which the first action (normally init) started executing.
|
||||
// If a workflow invokes a different action without first invoking the init
|
||||
// action (i.e. the upload action is being used by a third-party integrator)
|
||||
// then this variable will be assigned the start time of the action invoked
|
||||
// rather that the init action.
|
||||
exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
|
||||
// Populated when the init action completes successfully
|
||||
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
|
||||
exports.CODEQL_WORKFLOW_STARTED_AT = 'CODEQL_WORKFLOW_STARTED_AT';
|
||||
//# sourceMappingURL=shared-environment.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,iBAAiB,GAAG,mBAAmB,CAAC;AACxC,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,uBAAuB,GAAG,yBAAyB,CAAC;AACpD,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,+BAA+B,GAAG,iCAAiC,CAAC;AACpE,QAAA,8BAA8B,GAAG,gCAAgC,CAAC;AAC/E,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,wBAAwB,GAAG,0BAA0B,CAAC;AACnE,wDAAwD;AAC3C,QAAA,4BAA4B,GAAG,8BAA8B,CAAC"}
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"}
|
||||
22
lib/test-utils.js
generated
Normal file
22
lib/test-utils.js
generated
Normal file
@@ -0,0 +1,22 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
function silenceDebugOutput(test) {
|
||||
const typedTest = test;
|
||||
typedTest.beforeEach(t => {
|
||||
const processStdoutWrite = process.stdout.write.bind(process.stdout);
|
||||
t.context.write = processStdoutWrite;
|
||||
process.stdout.write = (str, encoding, cb) => {
|
||||
// Core library will directly call process.stdout.write for commands
|
||||
// We don't want :: commands to be executed by the runner during tests
|
||||
if (!str.match(/^::/)) {
|
||||
processStdoutWrite(str, encoding, cb);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
});
|
||||
typedTest.afterEach(t => {
|
||||
process.stdout.write = t.context.write;
|
||||
});
|
||||
}
|
||||
exports.silenceDebugOutput = silenceDebugOutput;
|
||||
//# sourceMappingURL=test-utils.js.map
|
||||
1
lib/test-utils.js.map
Normal file
1
lib/test-utils.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"test-utils.js","sourceRoot":"","sources":["../src/test-utils.ts"],"names":[],"mappings":";;AAEA,SAAgB,kBAAkB,CAAC,IAAwB;IACzD,MAAM,SAAS,GAAG,IAAmC,CAAC;IAEtD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACrB,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,KAAK,GAAG,kBAAkB,CAAC;QACrC,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,GAAQ,EAAE,QAAc,EAAE,EAA0B,EAAE,EAAE;YAC5E,oEAAoE;YACpE,sEAAsE;YACtE,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACnB,kBAAkB,CAAC,GAAG,EAAE,QAAQ,EAAE,EAAE,CAAC,CAAC;aACzC;YACD,OAAO,IAAI,CAAC;QAChB,CAAC,CAAC;IACN,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE;QACpB,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC;IAC3C,CAAC,CAAC,CAAC;AACL,CAAC;AAnBD,gDAmBC"}
|
||||
75
lib/testing-utils.js
generated
Normal file
75
lib/testing-utils.js
generated
Normal file
@@ -0,0 +1,75 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const sinon_1 = __importDefault(require("sinon"));
|
||||
const CodeQL = __importStar(require("./codeql"));
|
||||
function wrapOutput(context) {
|
||||
// Function signature taken from Socket.write.
|
||||
// Note there are two overloads:
|
||||
// write(buffer: Uint8Array | string, cb?: (err?: Error) => void): boolean;
|
||||
// write(str: Uint8Array | string, encoding?: string, cb?: (err?: Error) => void): boolean;
|
||||
return (chunk, encoding, cb) => {
|
||||
// Work out which method overload we are in
|
||||
if (cb === undefined && typeof encoding === 'function') {
|
||||
cb = encoding;
|
||||
encoding = undefined;
|
||||
}
|
||||
// Record the output
|
||||
if (typeof chunk === 'string') {
|
||||
context.testOutput += chunk;
|
||||
}
|
||||
else {
|
||||
context.testOutput += new TextDecoder(encoding || 'utf-8').decode(chunk);
|
||||
}
|
||||
// Satisfy contract by calling callback when done
|
||||
if (cb !== undefined && typeof cb === 'function') {
|
||||
cb();
|
||||
}
|
||||
return true;
|
||||
};
|
||||
}
|
||||
function setupTests(test) {
|
||||
const typedTest = test;
|
||||
typedTest.beforeEach(t => {
|
||||
// Set an empty CodeQL object so that all method calls will fail
|
||||
// unless the test explicitly sets one up.
|
||||
CodeQL.setCodeQL({});
|
||||
// Replace stdout and stderr so we can record output during tests
|
||||
t.context.testOutput = "";
|
||||
const processStdoutWrite = process.stdout.write.bind(process.stdout);
|
||||
t.context.stdoutWrite = processStdoutWrite;
|
||||
process.stdout.write = wrapOutput(t.context);
|
||||
const processStderrWrite = process.stderr.write.bind(process.stderr);
|
||||
t.context.stderrWrite = processStderrWrite;
|
||||
process.stderr.write = wrapOutput(t.context);
|
||||
// Many tests modify environment variables. Take a copy now so that
|
||||
// we reset them after the test to keep tests independent of each other.
|
||||
// process.env only has strings fields, so a shallow copy is fine.
|
||||
t.context.env = {};
|
||||
Object.assign(t.context.env, process.env);
|
||||
});
|
||||
typedTest.afterEach.always(t => {
|
||||
// Restore stdout and stderr
|
||||
// The captured output is only replayed if the test failed
|
||||
process.stdout.write = t.context.stdoutWrite;
|
||||
process.stderr.write = t.context.stderrWrite;
|
||||
if (!t.passed) {
|
||||
process.stdout.write(t.context.testOutput);
|
||||
}
|
||||
// Undo any modifications made by sinon
|
||||
sinon_1.default.restore();
|
||||
// Undo any modifications to the env
|
||||
process.env = t.context.env;
|
||||
});
|
||||
}
|
||||
exports.setupTests = setupTests;
|
||||
//# sourceMappingURL=testing-utils.js.map
|
||||
1
lib/testing-utils.js.map
Normal file
1
lib/testing-utils.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AACA,kDAA0B;AAE1B,iDAAmC;AAInC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CAAC,KAA0B,EAAE,QAAiB,EAAE,EAA0B,EAAW,EAAE;QAC5F,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACvB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;QAC7B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,eAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC"}
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"tracer-env.js","sourceRoot":"","sources":["../src/tracer-env.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AAEzB,MAAM,GAAG,GAAG,EAAE,CAAC;AACf,KAAK,IAAI,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;IAC3C,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACrB,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACvB,IAAI,OAAO,KAAK,KAAK,WAAW,IAAI,GAAG,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;QACpF,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KACpB;CACJ;AACD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,CAAC"}
|
||||
{"version":3,"file":"tracer-env.js","sourceRoot":"","sources":["../src/tracer-env.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AAEzB,MAAM,GAAG,GAAG,EAAE,CAAC;AACf,KAAK,IAAI,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;IAC7C,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACrB,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACvB,IAAI,OAAO,KAAK,KAAK,WAAW,IAAI,GAAG,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;QACtF,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KAClB;CACF;AACD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,CAAC"}
|
||||
192
lib/upload-lib.js
generated
192
lib/upload-lib.js
generated
@@ -11,28 +11,15 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const auth = __importStar(require("@actions/http-client/auth"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const file_url_1 = __importDefault(require("file-url"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const jsonschema = __importStar(require("jsonschema"));
|
||||
const path = __importStar(require("path"));
|
||||
const zlib_1 = __importDefault(require("zlib"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const fingerprints = __importStar(require("./fingerprints"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util = __importStar(require("./util"));
|
||||
// Construct the location of the sentinel file for detecting multiple uploads.
|
||||
// The returned location should be writable.
|
||||
async function getSentinelFilePath() {
|
||||
// Use the temp dir instead of placing next to the sarif file because of
|
||||
// issues with docker actions. The directory containing the sarif file
|
||||
// may not be writable by us.
|
||||
const uploadsTmpDir = path.join(process.env['RUNNER_TEMP'] || '/tmp/codeql-action', 'uploads');
|
||||
await io.mkdirP(uploadsTmpDir);
|
||||
// Hash the absolute path so we'll behave correctly in the unlikely
|
||||
// scenario a file is referenced twice with different paths.
|
||||
return path.join(uploadsTmpDir, 'codeql-action-upload-sentinel');
|
||||
}
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
function combineSarifFiles(sarifFiles) {
|
||||
@@ -61,37 +48,37 @@ async function uploadPayload(payload) {
|
||||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env['TEST_MODE'] === 'true' || false;
|
||||
if (testMode) {
|
||||
return true;
|
||||
return;
|
||||
}
|
||||
const githubToken = core.getInput('token');
|
||||
const ph = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY'] + '/code-scanning/analysis';
|
||||
const [owner, repo] = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
||||
// Make up to 4 attempts to upload, and sleep for these
|
||||
// number of seconds between each attempt.
|
||||
// We don't want to backoff too much to avoid wasting action
|
||||
// minutes, but just waiting a little bit could maybe help.
|
||||
const backoffPeriods = [1, 5, 15];
|
||||
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
|
||||
const res = await client.put(url, payload);
|
||||
core.debug('response status: ' + res.message.statusCode);
|
||||
const statusCode = res.message.statusCode;
|
||||
const response = await api.getApiClient().request("PUT /repos/:owner/:repo/code-scanning/analysis", ({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
data: payload,
|
||||
}));
|
||||
core.debug('response status: ' + response.status);
|
||||
const statusCode = response.status;
|
||||
if (statusCode === 202) {
|
||||
core.info("Successfully uploaded results");
|
||||
return true;
|
||||
return;
|
||||
}
|
||||
const requestID = res.message.headers["x-github-request-id"];
|
||||
const requestID = response.headers["x-github-request-id"];
|
||||
// On any other status code that's not 5xx mark the upload as failed
|
||||
if (!statusCode || statusCode < 500 || statusCode >= 600) {
|
||||
core.setFailed('Upload failed (' + requestID + '): (' + statusCode + ') ' + await res.readBody());
|
||||
return false;
|
||||
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
}
|
||||
// On a 5xx status code we may retry the request
|
||||
if (attempt < backoffPeriods.length) {
|
||||
// Log the failure as a warning but don't mark the action as failed yet
|
||||
core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
|
||||
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
|
||||
' seconds: (' + statusCode + ') ' + await res.readBody());
|
||||
' seconds: (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
// Sleep for the backoff period
|
||||
await new Promise(r => setTimeout(r, backoffPeriods[attempt] * 1000));
|
||||
continue;
|
||||
@@ -100,11 +87,12 @@ async function uploadPayload(payload) {
|
||||
// If the upload fails with 5xx then we assume it is a temporary problem
|
||||
// and not an error that the user has caused or can fix.
|
||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||
core.error('Upload failed (' + requestID + '): (' + statusCode + ') ' + await res.readBody());
|
||||
return false;
|
||||
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
}
|
||||
}
|
||||
return false;
|
||||
// This case shouldn't ever happen as the final iteration of the loop
|
||||
// will always throw an error instead of exiting to here.
|
||||
throw new Error('Upload failed');
|
||||
}
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
@@ -115,8 +103,7 @@ async function upload(input) {
|
||||
.filter(f => f.endsWith(".sarif"))
|
||||
.map(f => path.resolve(input, f));
|
||||
if (sarifFiles.length === 0) {
|
||||
core.setFailed("No SARIF files found to upload in \"" + input + "\".");
|
||||
return false;
|
||||
throw new Error("No SARIF files found to upload in \"" + input + "\".");
|
||||
}
|
||||
return await uploadFiles(sarifFiles);
|
||||
}
|
||||
@@ -125,63 +112,100 @@ async function upload(input) {
|
||||
}
|
||||
}
|
||||
exports.upload = upload;
|
||||
// Counts the number of results in the given SARIF file
|
||||
function countResultsInSarif(sarif) {
|
||||
let numResults = 0;
|
||||
for (const run of JSON.parse(sarif).runs) {
|
||||
numResults += run.results.length;
|
||||
}
|
||||
return numResults;
|
||||
}
|
||||
exports.countResultsInSarif = countResultsInSarif;
|
||||
// Validates that the given file path refers to a valid SARIF file.
|
||||
// Throws an error if the file is invalid.
|
||||
function validateSarifFileSchema(sarifFilePath) {
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
|
||||
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
if (!result.valid) {
|
||||
// Output the more verbose error messages in groups as these may be very large.
|
||||
for (const error of result.errors) {
|
||||
core.startGroup("Error details: " + error.stack);
|
||||
core.info(JSON.stringify(error, null, 2));
|
||||
core.endGroup();
|
||||
}
|
||||
// Set the main error message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const sarifErrors = result.errors.map(e => "- " + e.stack);
|
||||
throw new Error("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + sarifErrors.join("\n"));
|
||||
}
|
||||
}
|
||||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles) {
|
||||
core.startGroup("Uploading results");
|
||||
let succeeded = false;
|
||||
try {
|
||||
// Check if an upload has happened before. If so then abort.
|
||||
// This is intended to catch when the finish and upload-sarif actions
|
||||
// are used together, and then the upload-sarif action is invoked twice.
|
||||
const sentinelFile = await getSentinelFilePath();
|
||||
if (fs.existsSync(sentinelFile)) {
|
||||
core.info("Aborting as an upload has already happened from this job");
|
||||
return false;
|
||||
}
|
||||
const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRef();
|
||||
const analysisKey = await util.getAnalysisKey();
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||
let checkoutPath = core.getInput('checkout_path');
|
||||
let checkoutURI = file_url_1.default(checkoutPath);
|
||||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
return false;
|
||||
}
|
||||
let matrix = core.getInput('matrix');
|
||||
if (matrix === "null" || matrix === "") {
|
||||
matrix = undefined;
|
||||
}
|
||||
const toolNames = util.getToolNames(sarifPayload);
|
||||
const payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_key": analysisKey,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
"checkout_uri": checkoutURI,
|
||||
"environment": matrix,
|
||||
"started_at": startedAt,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
// Make the upload
|
||||
succeeded = await uploadPayload(payload);
|
||||
// Mark that we have made an upload
|
||||
fs.writeFileSync(sentinelFile, '');
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
validateSarifFileSchema(file);
|
||||
}
|
||||
const commitOid = await util.getCommitOid();
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRef();
|
||||
const analysisKey = await util.getAnalysisKey();
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||
let checkoutPath = core.getInput('checkout_path');
|
||||
let checkoutURI = file_url_1.default(checkoutPath);
|
||||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
throw new Error('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
}
|
||||
let matrix = core.getInput('matrix');
|
||||
if (matrix === "null" || matrix === "") {
|
||||
matrix = undefined;
|
||||
}
|
||||
const toolNames = util.getToolNames(sarifPayload);
|
||||
const payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_key": analysisKey,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
"checkout_uri": checkoutURI,
|
||||
"environment": matrix,
|
||||
"started_at": startedAt,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
// Log some useful debug info about the info
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
core.debug("Raw upload size: " + rawUploadSizeBytes + " bytes");
|
||||
const zippedUploadSizeBytes = zipped_sarif.length;
|
||||
core.debug("Base64 zipped upload size: " + zippedUploadSizeBytes + " bytes");
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
core.debug("Number of results in upload: " + numResultInSarif);
|
||||
if (!util.isLocalRun()) {
|
||||
// Make the upload
|
||||
await uploadPayload(payload);
|
||||
}
|
||||
else {
|
||||
core.debug("Not uploading because this is a local run.");
|
||||
}
|
||||
core.endGroup();
|
||||
return succeeded;
|
||||
return {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif,
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=upload-lib.js.map
|
||||
File diff suppressed because one or more lines are too long
25
lib/upload-lib.test.js
generated
Normal file
25
lib/upload-lib.test.js
generated
Normal file
@@ -0,0 +1,25 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const uploadLib = __importStar(require("./upload-lib"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default('validateSarifFileSchema - valid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
|
||||
t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile));
|
||||
});
|
||||
ava_1.default('validateSarifFileSchema - invalid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
|
||||
t.throws(() => uploadLib.validateSarifFileSchema(inputFile));
|
||||
});
|
||||
//# sourceMappingURL=upload-lib.test.js.map
|
||||
1
lib/upload-lib.test.js.map
Normal file
1
lib/upload-lib.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,mDAA2C;AAC3C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AAClE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/D,CAAC,CAAC,CAAC"}
|
||||
22
lib/upload-sarif.js
generated
22
lib/upload-sarif.js
generated
@@ -10,21 +10,27 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function sendSuccessStatusReport(startedAt, uploadStats) {
|
||||
const statusReportBase = await util.createStatusReportBase('upload-sarif', 'success', startedAt);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
...uploadStats,
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
if (util.should_abort('upload-sarif', false) || !await util.reportActionStarting('upload-sarif')) {
|
||||
const startedAt = new Date();
|
||||
if (!await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
if (await upload_lib.upload(core.getInput('sarif_file'))) {
|
||||
await util.reportActionSucceeded('upload-sarif');
|
||||
}
|
||||
else {
|
||||
await util.reportActionFailed('upload-sarif', 'upload');
|
||||
}
|
||||
const uploadStats = await upload_lib.upload(core.getInput('sarif_file'));
|
||||
await sendSuccessStatusReport(startedAt, uploadStats);
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('upload-sarif', error.message, error.stack);
|
||||
console.log(error);
|
||||
await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'failure', startedAt, error.message, error.stack));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"upload-sarif.js","sourceRoot":"","sources":["../src/upload-sarif.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,yDAA2C;AAC3C,6CAA+B;AAE/B,KAAK,UAAU,GAAG;IACd,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,KAAK,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,cAAc,CAAC,EAAE;QAC9F,OAAO;KACV;IAED,IAAI;QACA,IAAI,MAAM,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE;YACtD,MAAM,IAAI,CAAC,qBAAqB,CAAC,cAAc,CAAC,CAAC;SACpD;aAAM;YACH,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;SAC3D;KACJ;IAAC,OAAO,KAAK,EAAE;QACZ,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QAC1E,OAAO;KACV;AACL,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACZ,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACnB,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"upload-sarif.js","sourceRoot":"","sources":["../src/upload-sarif.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,yDAA2C;AAC3C,6CAA+B;AAI/B,KAAK,UAAU,uBAAuB,CAAC,SAAe,EAAE,WAA0C;IAChG,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,cAAc,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IACjG,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAI,WAAW;KAChB,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,cAAc,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;QAChH,OAAO;KACR;IAED,IAAI;QACF,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC,CAAC;QACzE,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KAEvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAC3D,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;QAChB,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
389
lib/util.js
generated
389
lib/util.js
generated
@@ -6,171 +6,111 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const auth = __importStar(require("@actions/http-client/auth"));
|
||||
const octokit = __importStar(require("@octokit/rest"));
|
||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
/**
|
||||
* Should the current action be aborted?
|
||||
*
|
||||
* This method should be called at the start of all CodeQL actions and they
|
||||
* should abort cleanly if this returns true without failing the action.
|
||||
* This method will call `core.setFailed` if necessary.
|
||||
* The API URL for github.com.
|
||||
*/
|
||||
function should_abort(actionName, requireInitActionHasRun) {
|
||||
// Check that required aspects of the environment are present
|
||||
const ref = process.env['GITHUB_REF'];
|
||||
if (ref === undefined) {
|
||||
core.setFailed('GITHUB_REF must be set.');
|
||||
return true;
|
||||
}
|
||||
// Should abort if called on a merge commit for a pull request.
|
||||
if (ref.startsWith('refs/pull/')) {
|
||||
core.warning('The CodeQL ' + actionName + ' action is intended for workflows triggered on `push` events, '
|
||||
+ 'but the current workflow is running on a pull request. Aborting.');
|
||||
return true;
|
||||
}
|
||||
// If the init action is required, then check the it completed successfully.
|
||||
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
|
||||
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.should_abort = should_abort;
|
||||
exports.GITHUB_DOTCOM_API_URL = "https://api.github.com";
|
||||
/**
|
||||
* Resolve the path to the workspace folder.
|
||||
* Get the API URL for the GitHub instance we are connected to.
|
||||
* May be for github.com or for an enterprise instance.
|
||||
*/
|
||||
function workspaceFolder() {
|
||||
let workspaceFolder = process.env['RUNNER_WORKSPACE'];
|
||||
if (!workspaceFolder)
|
||||
workspaceFolder = path.resolve('..');
|
||||
return workspaceFolder;
|
||||
function getInstanceAPIURL() {
|
||||
return process.env["GITHUB_API_URL"] || exports.GITHUB_DOTCOM_API_URL;
|
||||
}
|
||||
exports.workspaceFolder = workspaceFolder;
|
||||
exports.getInstanceAPIURL = getInstanceAPIURL;
|
||||
/**
|
||||
* Are we running against a GitHub Enterpise instance, as opposed to github.com.
|
||||
*/
|
||||
function isEnterprise() {
|
||||
return getInstanceAPIURL() !== exports.GITHUB_DOTCOM_API_URL;
|
||||
}
|
||||
exports.isEnterprise = isEnterprise;
|
||||
/**
|
||||
* Get an environment parameter, but throw an error if it is not set.
|
||||
*/
|
||||
function getRequiredEnvParam(paramName) {
|
||||
const value = process.env[paramName];
|
||||
if (value === undefined) {
|
||||
if (value === undefined || value.length === 0) {
|
||||
throw new Error(paramName + ' environment variable must be set');
|
||||
}
|
||||
core.debug(paramName + '=' + value);
|
||||
return value;
|
||||
}
|
||||
exports.getRequiredEnvParam = getRequiredEnvParam;
|
||||
function isLocalRun() {
|
||||
return !!process.env.CODEQL_LOCAL_RUN
|
||||
&& process.env.CODEQL_LOCAL_RUN !== 'false'
|
||||
&& process.env.CODEQL_LOCAL_RUN !== '0';
|
||||
}
|
||||
exports.isLocalRun = isLocalRun;
|
||||
/**
|
||||
* Gets the set of languages in the current repository
|
||||
* Ensures all required environment variables are set in the context of a local run.
|
||||
*/
|
||||
async function getLanguagesInRepo() {
|
||||
var _a;
|
||||
// Translate between GitHub's API names for languages and ours
|
||||
const codeqlLanguages = {
|
||||
'C': 'cpp',
|
||||
'C++': 'cpp',
|
||||
'C#': 'csharp',
|
||||
'Go': 'go',
|
||||
'Java': 'java',
|
||||
'JavaScript': 'javascript',
|
||||
'TypeScript': 'javascript',
|
||||
'Python': 'python',
|
||||
};
|
||||
let repo_nwo = (_a = process.env['GITHUB_REPOSITORY']) === null || _a === void 0 ? void 0 : _a.split("/");
|
||||
if (repo_nwo) {
|
||||
let owner = repo_nwo[0];
|
||||
let repo = repo_nwo[1];
|
||||
core.debug(`GitHub repo ${owner} ${repo}`);
|
||||
let ok = new octokit.Octokit({
|
||||
auth: core.getInput('token'),
|
||||
userAgent: "CodeQL Action",
|
||||
log: console_log_level_1.default({ level: "debug" })
|
||||
});
|
||||
const response = await ok.request("GET /repos/:owner/:repo/languages", ({
|
||||
owner,
|
||||
repo
|
||||
}));
|
||||
core.debug("Languages API response: " + JSON.stringify(response));
|
||||
// The GitHub API is going to return languages in order of popularity,
|
||||
// When we pick a language to autobuild we want to pick the most popular traced language
|
||||
// Since sets in javascript maintain insertion order, using a set here and then splatting it
|
||||
// into an array gives us an array of languages ordered by popularity
|
||||
let languages = new Set();
|
||||
for (let lang in response.data) {
|
||||
if (lang in codeqlLanguages) {
|
||||
languages.add(codeqlLanguages[lang]);
|
||||
function prepareLocalRunEnvironment() {
|
||||
if (!isLocalRun()) {
|
||||
return;
|
||||
}
|
||||
core.debug('Action is running locally.');
|
||||
if (!process.env.GITHUB_JOB) {
|
||||
core.exportVariable('GITHUB_JOB', 'UNKNOWN-JOB');
|
||||
}
|
||||
}
|
||||
exports.prepareLocalRunEnvironment = prepareLocalRunEnvironment;
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
async function getCommitOid() {
|
||||
// Try to use git to get the current commit SHA. If that fails then
|
||||
// log but otherwise silently fall back to using the SHA from the environment.
|
||||
// The only time these two values will differ is during analysis of a PR when
|
||||
// the workflow has changed the current commit to the head commit instead of
|
||||
// the merge commit, which must mean that git is available.
|
||||
// Even if this does go wrong, it's not a huge problem for the alerts to
|
||||
// reported on the merge commit.
|
||||
try {
|
||||
let commitOid = '';
|
||||
await exec.exec('git', ['rev-parse', 'HEAD'], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => { commitOid += data.toString(); },
|
||||
stderr: (data) => { process.stderr.write(data); }
|
||||
}
|
||||
}
|
||||
return [...languages];
|
||||
});
|
||||
return commitOid.trim();
|
||||
}
|
||||
else {
|
||||
return [];
|
||||
catch (e) {
|
||||
core.info("Failed to call git to get current commit. Continuing with data from environment: " + e);
|
||||
return getRequiredEnvParam('GITHUB_SHA');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get the languages to analyse.
|
||||
*
|
||||
* The result is obtained from the environment parameter CODEQL_ACTION_LANGUAGES
|
||||
* if that has been set, otherwise it is obtained from the action input parameter
|
||||
* 'languages' if that has been set, otherwise it is deduced as all languages in the
|
||||
* repo that can be analysed.
|
||||
*
|
||||
* If the languages are obtained from either of the second choices, the
|
||||
* CODEQL_ACTION_LANGUAGES environment variable will be exported with the
|
||||
* deduced list.
|
||||
*/
|
||||
async function getLanguages() {
|
||||
// Obtain from CODEQL_ACTION_LANGUAGES if set
|
||||
const langsVar = process.env[sharedEnv.CODEQL_ACTION_LANGUAGES];
|
||||
if (langsVar) {
|
||||
return langsVar.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
}
|
||||
// Obtain from action input 'languages' if set
|
||||
let languages = core.getInput('languages', { required: false })
|
||||
.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
core.info("Languages from configuration: " + JSON.stringify(languages));
|
||||
if (languages.length === 0) {
|
||||
// Obtain languages as all languages in the repo that can be analysed
|
||||
languages = await getLanguagesInRepo();
|
||||
core.info("Automatically detected languages: " + JSON.stringify(languages));
|
||||
}
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_LANGUAGES, languages.join(','));
|
||||
return languages;
|
||||
}
|
||||
exports.getLanguages = getLanguages;
|
||||
exports.getCommitOid = getCommitOid;
|
||||
/**
|
||||
* Get the path of the currently executing workflow.
|
||||
*/
|
||||
async function getWorkflowPath() {
|
||||
if (isLocalRun()) {
|
||||
return 'LOCAL';
|
||||
}
|
||||
const repo_nwo = getRequiredEnvParam('GITHUB_REPOSITORY').split("/");
|
||||
const owner = repo_nwo[0];
|
||||
const repo = repo_nwo[1];
|
||||
const run_id = getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ok = new octokit.Octokit({
|
||||
auth: core.getInput('token'),
|
||||
userAgent: "CodeQL Action",
|
||||
log: console_log_level_1.default({ level: 'debug' })
|
||||
});
|
||||
const runsResponse = await ok.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
|
||||
const run_id = Number(getRequiredEnvParam('GITHUB_RUN_ID'));
|
||||
const apiClient = api.getApiClient();
|
||||
const runsResponse = await apiClient.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
|
||||
owner,
|
||||
repo,
|
||||
run_id
|
||||
});
|
||||
const workflowUrl = runsResponse.data.workflow_url;
|
||||
const workflowResponse = await ok.request('GET ' + workflowUrl);
|
||||
const workflowResponse = await apiClient.request('GET ' + workflowUrl);
|
||||
return workflowResponse.data.path;
|
||||
}
|
||||
/**
|
||||
@@ -181,14 +121,15 @@ async function getWorkflowPath() {
|
||||
* the github API, but after that the result will be cached.
|
||||
*/
|
||||
async function getAnalysisKey() {
|
||||
let analysisKey = process.env[sharedEnv.CODEQL_ACTION_ANALYSIS_KEY];
|
||||
const analysisKeyEnvVar = 'CODEQL_ACTION_ANALYSIS_KEY';
|
||||
let analysisKey = process.env[analysisKeyEnvVar];
|
||||
if (analysisKey !== undefined) {
|
||||
return analysisKey;
|
||||
}
|
||||
const workflowPath = await getWorkflowPath();
|
||||
const jobName = getRequiredEnvParam('GITHUB_JOB');
|
||||
analysisKey = workflowPath + ':' + jobName;
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_ANALYSIS_KEY, analysisKey);
|
||||
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
||||
return analysisKey;
|
||||
}
|
||||
exports.getAnalysisKey = getAnalysisKey;
|
||||
@@ -196,8 +137,20 @@ exports.getAnalysisKey = getAnalysisKey;
|
||||
* Get the ref currently being analyzed.
|
||||
*/
|
||||
function getRef() {
|
||||
// it's in the form "refs/heads/master"
|
||||
return getRequiredEnvParam('GITHUB_REF');
|
||||
// Will be in the form "refs/heads/master" on a push event
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const ref = getRequiredEnvParam('GITHUB_REF');
|
||||
// For pull request refs we want to convert from the 'merge' ref
|
||||
// to the 'head' ref, as that is what we want to analyse.
|
||||
// There should have been some code earlier in the workflow to do
|
||||
// the checkout, but we have no way of verifying that here.
|
||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||
if (pull_ref_regex.test(ref)) {
|
||||
return ref.replace(pull_ref_regex, 'refs/pull/$1/head');
|
||||
}
|
||||
else {
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
exports.getRef = getRef;
|
||||
/**
|
||||
@@ -205,10 +158,11 @@ exports.getRef = getRef;
|
||||
*
|
||||
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
||||
* @param status The status. Must be 'success', 'failure', or 'starting'
|
||||
* @param startedAt The time this action started executing.
|
||||
* @param cause Cause of failure (only supply if status is 'failure')
|
||||
* @param exception Exception (only supply if status is 'failure')
|
||||
*/
|
||||
async function createStatusReport(actionName, status, cause, exception) {
|
||||
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
|
||||
const commitOid = process.env['GITHUB_SHA'] || '';
|
||||
const ref = getRef();
|
||||
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
|
||||
@@ -218,19 +172,23 @@ async function createStatusReport(actionName, status, cause, exception) {
|
||||
}
|
||||
const workflowName = process.env['GITHUB_WORKFLOW'] || '';
|
||||
const jobName = process.env['GITHUB_JOB'] || '';
|
||||
const languages = (await getLanguages()).sort().join(',');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT] || new Date().toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_STARTED_AT, startedAt);
|
||||
const analysis_key = await getAnalysisKey();
|
||||
let workflowStartedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
|
||||
if (workflowStartedAt === undefined) {
|
||||
workflowStartedAt = actionStartedAt.toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||
}
|
||||
let statusReport = {
|
||||
workflow_run_id: workflowRunID,
|
||||
workflow_name: workflowName,
|
||||
job_name: jobName,
|
||||
languages: languages,
|
||||
analysis_key: analysis_key,
|
||||
commit_oid: commitOid,
|
||||
ref: ref,
|
||||
action_name: actionName,
|
||||
action_oid: "unknown",
|
||||
started_at: startedAt,
|
||||
started_at: workflowStartedAt,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
status: status
|
||||
};
|
||||
// Add optional parameters
|
||||
@@ -240,7 +198,7 @@ async function createStatusReport(actionName, status, cause, exception) {
|
||||
if (exception) {
|
||||
statusReport.exception = exception;
|
||||
}
|
||||
if (status === 'success' || status === 'failure') {
|
||||
if (status === 'success' || status === 'failure' || status === 'aborted') {
|
||||
statusReport.completed_at = new Date().toISOString();
|
||||
}
|
||||
let matrix = core.getInput('matrix');
|
||||
@@ -249,72 +207,53 @@ async function createStatusReport(actionName, status, cause, exception) {
|
||||
}
|
||||
return statusReport;
|
||||
}
|
||||
exports.createStatusReportBase = createStatusReportBase;
|
||||
/**
|
||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||
*
|
||||
* Returns the status code of the response to the status request, or
|
||||
* undefined if the given statusReport is undefined or no response was
|
||||
* received.
|
||||
* Optionally checks the response from the API endpoint and sets the action
|
||||
* as failed if the status report failed. This is only expected to be used
|
||||
* when sending a 'starting' report.
|
||||
*
|
||||
* Returns whether sending the status report was successful of not.
|
||||
*/
|
||||
async function sendStatusReport(statusReport) {
|
||||
var _a;
|
||||
async function sendStatusReport(statusReport, ignoreFailures) {
|
||||
if (isEnterprise()) {
|
||||
core.debug("Not sending status report to GitHub Enterprise");
|
||||
return true;
|
||||
}
|
||||
if (isLocalRun()) {
|
||||
core.debug("Not sending status report because this is a local run");
|
||||
return true;
|
||||
}
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
core.debug('Sending status report: ' + statusReportJSON);
|
||||
const githubToken = core.getInput('token');
|
||||
const ph = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Status Report', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY']
|
||||
+ '/code-scanning/analysis/status';
|
||||
const res = await client.put(url, statusReportJSON);
|
||||
return (_a = res.message) === null || _a === void 0 ? void 0 : _a.statusCode;
|
||||
}
|
||||
/**
|
||||
* Send a status report that an action is starting.
|
||||
*
|
||||
* If the action is `init` then this also records the start time in the environment,
|
||||
* and ensures that the analysed languages are also recorded in the envirenment.
|
||||
*
|
||||
* Returns true unless a problem occurred and the action should abort.
|
||||
*/
|
||||
async function reportActionStarting(action) {
|
||||
const statusCode = await sendStatusReport(await createStatusReport(action, 'starting'));
|
||||
// If the status report request fails with a 403 or a 404, then this is a deliberate
|
||||
// message from the endpoint that the SARIF upload can be expected to fail too,
|
||||
// so the action should fail to avoid wasting actions minutes.
|
||||
//
|
||||
// Other failure responses (or lack thereof) could be transitory and should not
|
||||
// cause the action to fail.
|
||||
if (statusCode === 403) {
|
||||
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
|
||||
return false;
|
||||
}
|
||||
if (statusCode === 404) {
|
||||
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
|
||||
return false;
|
||||
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
|
||||
const [owner, repo] = nwo.split("/");
|
||||
const statusResponse = await api.getApiClient().request('PUT /repos/:owner/:repo/code-scanning/analysis/status', {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
data: statusReportJSON,
|
||||
});
|
||||
if (!ignoreFailures) {
|
||||
// If the status report request fails with a 403 or a 404, then this is a deliberate
|
||||
// message from the endpoint that the SARIF upload can be expected to fail too,
|
||||
// so the action should fail to avoid wasting actions minutes.
|
||||
//
|
||||
// Other failure responses (or lack thereof) could be transitory and should not
|
||||
// cause the action to fail.
|
||||
if (statusResponse.status === 403) {
|
||||
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
|
||||
return false;
|
||||
}
|
||||
if (statusResponse.status === 404) {
|
||||
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
exports.reportActionStarting = reportActionStarting;
|
||||
/**
|
||||
* Report that an action has failed.
|
||||
*
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
async function reportActionFailed(action, cause, exception) {
|
||||
await sendStatusReport(await createStatusReport(action, 'failure', cause, exception));
|
||||
}
|
||||
exports.reportActionFailed = reportActionFailed;
|
||||
/**
|
||||
* Report that an action has succeeded.
|
||||
*
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
async function reportActionSucceeded(action) {
|
||||
await sendStatusReport(await createStatusReport(action, 'success'));
|
||||
}
|
||||
exports.reportActionSucceeded = reportActionSucceeded;
|
||||
exports.sendStatusReport = sendStatusReport;
|
||||
/**
|
||||
* Get the array of all the tool names contained in the given sarif contents.
|
||||
*
|
||||
@@ -337,8 +276,70 @@ exports.getToolNames = getToolNames;
|
||||
// Mostly intended for use within tests.
|
||||
async function withTmpDir(body) {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
|
||||
await body(tmpDir);
|
||||
const realSubdir = path.join(tmpDir, 'real');
|
||||
fs.mkdirSync(realSubdir);
|
||||
const symlinkSubdir = path.join(tmpDir, 'symlink');
|
||||
fs.symlinkSync(realSubdir, symlinkSubdir, 'dir');
|
||||
const result = await body(symlinkSubdir);
|
||||
fs.rmdirSync(tmpDir, { recursive: true });
|
||||
return result;
|
||||
}
|
||||
exports.withTmpDir = withTmpDir;
|
||||
/**
|
||||
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
||||
* specified, the total available memory will be used minus 256 MB.
|
||||
*
|
||||
* @returns string
|
||||
*/
|
||||
function getMemoryFlag() {
|
||||
let memoryToUseMegaBytes;
|
||||
const memoryToUseString = core.getInput("ram");
|
||||
if (memoryToUseString) {
|
||||
memoryToUseMegaBytes = Number(memoryToUseString);
|
||||
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
|
||||
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
|
||||
}
|
||||
}
|
||||
else {
|
||||
const totalMemoryBytes = os.totalmem();
|
||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||
const systemReservedMemoryMegaBytes = 256;
|
||||
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
|
||||
}
|
||||
return "--ram=" + Math.floor(memoryToUseMegaBytes);
|
||||
}
|
||||
exports.getMemoryFlag = getMemoryFlag;
|
||||
/**
|
||||
* Get the codeql `--threads` value specified for the `threads` input. The value
|
||||
* defaults to 1. The value will be capped to the number of available CPUs.
|
||||
*
|
||||
* @returns string
|
||||
*/
|
||||
function getThreadsFlag() {
|
||||
let numThreads = 1;
|
||||
const numThreadsString = core.getInput("threads");
|
||||
if (numThreadsString) {
|
||||
numThreads = Number(numThreadsString);
|
||||
if (Number.isNaN(numThreads)) {
|
||||
throw new Error(`Invalid threads setting "${numThreadsString}", specified.`);
|
||||
}
|
||||
const maxThreads = os.cpus().length;
|
||||
if (numThreads > maxThreads) {
|
||||
numThreads = maxThreads;
|
||||
}
|
||||
const minThreads = -maxThreads;
|
||||
if (numThreads < minThreads) {
|
||||
numThreads = minThreads;
|
||||
}
|
||||
}
|
||||
return `--threads=${numThreads}`;
|
||||
}
|
||||
exports.getThreadsFlag = getThreadsFlag;
|
||||
/**
|
||||
* Get the directory where CodeQL databases should be placed.
|
||||
*/
|
||||
function getCodeQLDatabasesDir() {
|
||||
return path.resolve(getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
|
||||
}
|
||||
exports.getCodeQLDatabasesDir = getCodeQLDatabasesDir;
|
||||
//# sourceMappingURL=util.js.map
|
||||
File diff suppressed because one or more lines are too long
74
lib/util.test.js
generated
74
lib/util.test.js
generated
@@ -12,10 +12,84 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default('getToolNames', t => {
|
||||
const input = fs.readFileSync(__dirname + '/../src/testdata/tool-names.sarif', 'utf8');
|
||||
const toolNames = util.getToolNames(input);
|
||||
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
|
||||
});
|
||||
ava_1.default('getMemoryFlag() should return the correct --ram flag', t => {
|
||||
const totalMem = Math.floor(os.totalmem() / (1024 * 1024));
|
||||
const tests = {
|
||||
"": `--ram=${totalMem - 256}`,
|
||||
"512": "--ram=512",
|
||||
};
|
||||
for (const [input, expectedFlag] of Object.entries(tests)) {
|
||||
process.env['INPUT_RAM'] = input;
|
||||
const flag = util.getMemoryFlag();
|
||||
t.deepEqual(flag, expectedFlag);
|
||||
}
|
||||
});
|
||||
ava_1.default('getMemoryFlag() throws if the ram input is < 0 or NaN', t => {
|
||||
for (const input of ["-1", "hello!"]) {
|
||||
process.env['INPUT_RAM'] = input;
|
||||
t.throws(util.getMemoryFlag);
|
||||
}
|
||||
});
|
||||
ava_1.default('getThreadsFlag() should return the correct --threads flag', t => {
|
||||
const numCpus = os.cpus().length;
|
||||
const tests = {
|
||||
"0": "--threads=0",
|
||||
"1": "--threads=1",
|
||||
[`${numCpus + 1}`]: `--threads=${numCpus}`,
|
||||
[`${-numCpus - 1}`]: `--threads=${-numCpus}`
|
||||
};
|
||||
for (const [input, expectedFlag] of Object.entries(tests)) {
|
||||
process.env['INPUT_THREADS'] = input;
|
||||
const flag = util.getThreadsFlag();
|
||||
t.deepEqual(flag, expectedFlag);
|
||||
}
|
||||
});
|
||||
ava_1.default('getThreadsFlag() throws if the threads input is not an integer', t => {
|
||||
process.env['INPUT_THREADS'] = "hello!";
|
||||
t.throws(util.getThreadsFlag);
|
||||
});
|
||||
ava_1.default('getRef() throws on the empty string', t => {
|
||||
process.env["GITHUB_REF"] = "";
|
||||
t.throws(util.getRef);
|
||||
});
|
||||
ava_1.default('isLocalRun() runs correctly', t => {
|
||||
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
|
||||
process.env.CODEQL_LOCAL_RUN = '';
|
||||
t.assert(!util.isLocalRun());
|
||||
process.env.CODEQL_LOCAL_RUN = 'false';
|
||||
t.assert(!util.isLocalRun());
|
||||
process.env.CODEQL_LOCAL_RUN = '0';
|
||||
t.assert(!util.isLocalRun());
|
||||
process.env.CODEQL_LOCAL_RUN = 'true';
|
||||
t.assert(util.isLocalRun());
|
||||
process.env.CODEQL_LOCAL_RUN = 'hucairz';
|
||||
t.assert(util.isLocalRun());
|
||||
process.env.CODEQL_LOCAL_RUN = origLocalRun;
|
||||
});
|
||||
ava_1.default('prepareEnvironment() when a local run', t => {
|
||||
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
|
||||
process.env.CODEQL_LOCAL_RUN = 'false';
|
||||
process.env.GITHUB_JOB = 'YYY';
|
||||
util.prepareLocalRunEnvironment();
|
||||
// unchanged
|
||||
t.deepEqual(process.env.GITHUB_JOB, 'YYY');
|
||||
process.env.CODEQL_LOCAL_RUN = 'true';
|
||||
util.prepareLocalRunEnvironment();
|
||||
// unchanged
|
||||
t.deepEqual(process.env.GITHUB_JOB, 'YYY');
|
||||
process.env.GITHUB_JOB = '';
|
||||
util.prepareLocalRunEnvironment();
|
||||
// updated
|
||||
t.deepEqual(process.env.GITHUB_JOB, 'UNKNOWN-JOB');
|
||||
process.env.CODEQL_LOCAL_RUN = origLocalRun;
|
||||
});
|
||||
//# sourceMappingURL=util.test.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"util.test.js","sourceRoot":"","sources":["../src/util.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AAEzB,6CAA+B;AAE/B,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACvB,MAAM,KAAK,GAAG,EAAE,CAAC,YAAY,CAAC,SAAS,GAAG,mCAAmC,EAAE,MAAM,CAAC,CAAC;IACvF,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,+BAA+B,EAAE,QAAQ,CAAC,CAAC,CAAC;AACtE,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"util.test.js","sourceRoot":"","sources":["../src/util.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,uCAAyB;AAEzB,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACvB,MAAM,KAAK,GAAG,EAAE,CAAC,YAAY,CAAC,SAAS,GAAG,mCAAmC,EAAE,MAAM,CAAC,CAAC;IACvF,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,+BAA+B,EAAE,QAAQ,CAAC,CAAC,CAAC;AACtE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,sDAAsD,EAAE,CAAC,CAAC,EAAE;IAE/D,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC;IAE3D,MAAM,KAAK,GAAG;QACZ,EAAE,EAAE,SAAS,QAAQ,GAAG,GAAG,EAAE;QAC7B,KAAK,EAAE,WAAW;KACnB,CAAC;IAEF,KAAK,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAEzD,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC;QAEjC,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,EAAE,CAAC;QAClC,CAAC,CAAC,SAAS,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;KACjC;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,uDAAuD,EAAE,CAAC,CAAC,EAAE;IAChE,KAAK,MAAM,KAAK,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,EAAE;QACpC,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC;QACjC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;KAC9B;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,2DAA2D,EAAE,CAAC,CAAC,EAAE;IAEpE,MAAM,OAAO,GAAG,EAAE,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC;IAEjC,MAAM,KAAK,GAAG;QACZ,GAAG,EAAE,aAAa;QAClB,GAAG,EAAE,aAAa;QAClB,CAAC,GAAG,OAAO,GAAG,CAAC,EAAE,CAAC,EAAE,aAAa,OAAO,EAAE;QAC1C,CAAC,GAAG,CAAC,OAAO,GAAG,CAAC,EAAE,CAAC,EAAE,aAAa,CAAC,OAAO,EAAE;KAC7C,CAAC;IAEF,KAAK,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAEzD,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,GAAG,KAAK,CAAC;QAErC,MAAM,IAAI,GAAG,IAAI,CAAC,cAAc,EAAE,CAAC;QACnC,CAAC,CAAC,SAAS,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;KACjC;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,gEAAgE,EAAE,CAAC,CAAC,EAAE;IACzE,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,GAAG,QAAQ,CAAC;IACxC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;AAChC,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE;IAC9C,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC;IAC/B,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACxB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,6BAA6B,EAAE,CAAC,CAAC,EAAE;IACtC,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC;IAElD,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,EAAE,CAAC;IAClC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAE7B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,OAAO,CAAC;IACvC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAE7B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,GAAG,CAAC;IACnC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAE7B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,MAAM,CAAC;IACtC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAE5B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,SAAS,CAAC;IACzC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAE5B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,YAAY,CAAC;AAC9C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,uCAAuC,EAAE,CAAC,CAAC,EAAE;IAChD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC;IAElD,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,OAAO,CAAC;IACvC,OAAO,CAAC,GAAG,CAAC,UAAU,GAAG,KAAK,CAAC;IAE/B,IAAI,CAAC,0BAA0B,EAAE,CAAC;IAElC,YAAY;IACZ,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;IAE3C,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,MAAM,CAAC;IAEtC,IAAI,CAAC,0BAA0B,EAAE,CAAC;IAElC,YAAY;IACZ,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;IAE3C,OAAO,CAAC,GAAG,CAAC,UAAU,GAAG,EAAE,CAAC;IAE5B,IAAI,CAAC,0BAA0B,EAAE,CAAC;IAElC,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAEnD,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,YAAY,CAAC;AAC9C,CAAC,CAAC,CAAC"}
|
||||
2
node_modules/.bin/semver
generated
vendored
2
node_modules/.bin/semver
generated
vendored
@@ -1 +1 @@
|
||||
../semver/bin/semver
|
||||
../semver/bin/semver.js
|
||||
74
node_modules/@actions/github/README.md
generated
vendored
Normal file
74
node_modules/@actions/github/README.md
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
# `@actions/github`
|
||||
|
||||
> A hydrated Octokit client.
|
||||
|
||||
## Usage
|
||||
|
||||
Returns an authenticated Octokit client that follows the machine [proxy settings](https://help.github.com/en/actions/hosting-your-own-runners/using-a-proxy-server-with-self-hosted-runners). See https://octokit.github.io/rest.js for the API.
|
||||
|
||||
```js
|
||||
const github = require('@actions/github');
|
||||
const core = require('@actions/core');
|
||||
|
||||
async function run() {
|
||||
// This should be a token with access to your repository scoped in as a secret.
|
||||
// The YML workflow will need to set myToken with the GitHub Secret Token
|
||||
// myToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
// https://help.github.com/en/actions/automating-your-workflow-with-github-actions/authenticating-with-the-github_token#about-the-github_token-secret
|
||||
const myToken = core.getInput('myToken');
|
||||
|
||||
const octokit = new github.GitHub(myToken);
|
||||
|
||||
const { data: pullRequest } = await octokit.pulls.get({
|
||||
owner: 'octokit',
|
||||
repo: 'rest.js',
|
||||
pull_number: 123,
|
||||
mediaType: {
|
||||
format: 'diff'
|
||||
}
|
||||
});
|
||||
|
||||
console.log(pullRequest);
|
||||
}
|
||||
|
||||
run();
|
||||
```
|
||||
|
||||
You can pass client options, as specified by [Octokit](https://octokit.github.io/rest.js/), as a second argument to the `GitHub` constructor.
|
||||
|
||||
You can also make GraphQL requests. See https://github.com/octokit/graphql.js for the API.
|
||||
|
||||
```js
|
||||
const result = await octokit.graphql(query, variables);
|
||||
```
|
||||
|
||||
Finally, you can get the context of the current action:
|
||||
|
||||
```js
|
||||
const github = require('@actions/github');
|
||||
|
||||
const context = github.context;
|
||||
|
||||
const newIssue = await octokit.issues.create({
|
||||
...context.repo,
|
||||
title: 'New issue!',
|
||||
body: 'Hello Universe!'
|
||||
});
|
||||
```
|
||||
|
||||
## Webhook payload typescript definitions
|
||||
|
||||
The npm module `@octokit/webhooks` provides type definitions for the response payloads. You can cast the payload to these types for better type information.
|
||||
|
||||
First, install the npm module `npm install @octokit/webhooks`
|
||||
|
||||
Then, assert the type based on the eventName
|
||||
```ts
|
||||
import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import * as Webhooks from '@octokit/webhooks'
|
||||
if (github.context.eventName === 'push') {
|
||||
const pushPayload = github.context.payload as Webhooks.WebhookPayloadPush
|
||||
core.info(`The head commit is: ${pushPayload.head}`)
|
||||
}
|
||||
```
|
||||
26
node_modules/@actions/github/lib/context.d.ts
generated
vendored
Normal file
26
node_modules/@actions/github/lib/context.d.ts
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
import { WebhookPayload } from './interfaces';
|
||||
export declare class Context {
|
||||
/**
|
||||
* Webhook payload object that triggered the workflow
|
||||
*/
|
||||
payload: WebhookPayload;
|
||||
eventName: string;
|
||||
sha: string;
|
||||
ref: string;
|
||||
workflow: string;
|
||||
action: string;
|
||||
actor: string;
|
||||
/**
|
||||
* Hydrate the context from the environment
|
||||
*/
|
||||
constructor();
|
||||
get issue(): {
|
||||
owner: string;
|
||||
repo: string;
|
||||
number: number;
|
||||
};
|
||||
get repo(): {
|
||||
owner: string;
|
||||
repo: string;
|
||||
};
|
||||
}
|
||||
46
node_modules/@actions/github/lib/context.js
generated
vendored
Normal file
46
node_modules/@actions/github/lib/context.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = require("fs");
|
||||
const os_1 = require("os");
|
||||
class Context {
|
||||
/**
|
||||
* Hydrate the context from the environment
|
||||
*/
|
||||
constructor() {
|
||||
this.payload = {};
|
||||
if (process.env.GITHUB_EVENT_PATH) {
|
||||
if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
|
||||
this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
|
||||
}
|
||||
else {
|
||||
const path = process.env.GITHUB_EVENT_PATH;
|
||||
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
|
||||
}
|
||||
}
|
||||
this.eventName = process.env.GITHUB_EVENT_NAME;
|
||||
this.sha = process.env.GITHUB_SHA;
|
||||
this.ref = process.env.GITHUB_REF;
|
||||
this.workflow = process.env.GITHUB_WORKFLOW;
|
||||
this.action = process.env.GITHUB_ACTION;
|
||||
this.actor = process.env.GITHUB_ACTOR;
|
||||
}
|
||||
get issue() {
|
||||
const payload = this.payload;
|
||||
return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
|
||||
}
|
||||
get repo() {
|
||||
if (process.env.GITHUB_REPOSITORY) {
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
|
||||
return { owner, repo };
|
||||
}
|
||||
if (this.payload.repository) {
|
||||
return {
|
||||
owner: this.payload.repository.owner.login,
|
||||
repo: this.payload.repository.name
|
||||
};
|
||||
}
|
||||
throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'");
|
||||
}
|
||||
}
|
||||
exports.Context = Context;
|
||||
//# sourceMappingURL=context.js.map
|
||||
1
node_modules/@actions/github/lib/context.js.map
generated
vendored
Normal file
1
node_modules/@actions/github/lib/context.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"context.js","sourceRoot":"","sources":["../src/context.ts"],"names":[],"mappings":";;AAEA,2BAA2C;AAC3C,2BAAsB;AAEtB,MAAa,OAAO;IAalB;;OAEG;IACH;QACE,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;QACjB,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,IAAI,eAAU,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;gBAC7C,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,KAAK,CACvB,iBAAY,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,EAAC,QAAQ,EAAE,MAAM,EAAC,CAAC,CAChE,CAAA;aACF;iBAAM;gBACL,MAAM,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAA;gBAC1C,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,qBAAqB,IAAI,kBAAkB,QAAG,EAAE,CAAC,CAAA;aACvE;SACF;QACD,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,iBAA2B,CAAA;QACxD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,eAAyB,CAAA;QACrD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,aAAuB,CAAA;QACjD,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,YAAsB,CAAA;IACjD,CAAC;IAED,IAAI,KAAK;QACP,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAA;QAE5B,uCACK,IAAI,CAAC,IAAI,KACZ,MAAM,EAAE,CAAC,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,YAAY,IAAI,OAAO,CAAC,CAAC,MAAM,IAClE;IACH,CAAC;IAED,IAAI,IAAI;QACN,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;YAC9D,OAAO,EAAC,KAAK,EAAE,IAAI,EAAC,CAAA;SACrB;QAED,IAAI,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YAC3B,OAAO;gBACL,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK;gBAC1C,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI;aACnC,CAAA;SACF;QAED,MAAM,IAAI,KAAK,CACb,kFAAkF,CACnF,CAAA;IACH,CAAC;CACF;AA9DD,0BA8DC"}
|
||||
27
node_modules/@actions/github/lib/github.d.ts
generated
vendored
Normal file
27
node_modules/@actions/github/lib/github.d.ts
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
import { graphql as GraphQL } from '@octokit/graphql/dist-types/types';
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import * as Context from './context';
|
||||
export declare const context: Context.Context;
|
||||
export declare class GitHub extends Octokit {
|
||||
graphql: GraphQL;
|
||||
/**
|
||||
* Sets up the REST client and GraphQL client with auth and proxy support.
|
||||
* The parameter `token` or `opts.auth` must be supplied. The GraphQL client
|
||||
* authorization is not setup when `opts.auth` is a function or object.
|
||||
*
|
||||
* @param token Auth token
|
||||
* @param opts Octokit options
|
||||
*/
|
||||
constructor(token: string, opts?: Omit<Octokit.Options, 'auth'>);
|
||||
constructor(opts: Octokit.Options);
|
||||
/**
|
||||
* Disambiguates the constructor overload parameters
|
||||
*/
|
||||
private static disambiguate;
|
||||
private static getOctokitOptions;
|
||||
private static getGraphQL;
|
||||
private static getAuthString;
|
||||
private static getProxyAgent;
|
||||
private static getApiBaseUrl;
|
||||
private static getGraphQLBaseUrl;
|
||||
}
|
||||
108
node_modules/@actions/github/lib/github.js
generated
vendored
Normal file
108
node_modules/@actions/github/lib/github.js
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// Originally pulled from https://github.com/JasonEtco/actions-toolkit/blob/master/src/github.ts
|
||||
const graphql_1 = require("@octokit/graphql");
|
||||
const rest_1 = require("@octokit/rest");
|
||||
const Context = __importStar(require("./context"));
|
||||
const httpClient = __importStar(require("@actions/http-client"));
|
||||
// We need this in order to extend Octokit
|
||||
rest_1.Octokit.prototype = new rest_1.Octokit();
|
||||
exports.context = new Context.Context();
|
||||
class GitHub extends rest_1.Octokit {
|
||||
constructor(token, opts) {
|
||||
super(GitHub.getOctokitOptions(GitHub.disambiguate(token, opts)));
|
||||
this.graphql = GitHub.getGraphQL(GitHub.disambiguate(token, opts));
|
||||
}
|
||||
/**
|
||||
* Disambiguates the constructor overload parameters
|
||||
*/
|
||||
static disambiguate(token, opts) {
|
||||
return [
|
||||
typeof token === 'string' ? token : '',
|
||||
typeof token === 'object' ? token : opts || {}
|
||||
];
|
||||
}
|
||||
static getOctokitOptions(args) {
|
||||
const token = args[0];
|
||||
const options = Object.assign({}, args[1]); // Shallow clone - don't mutate the object provided by the caller
|
||||
// Base URL - GHES or Dotcom
|
||||
options.baseUrl = options.baseUrl || this.getApiBaseUrl();
|
||||
// Auth
|
||||
const auth = GitHub.getAuthString(token, options);
|
||||
if (auth) {
|
||||
options.auth = auth;
|
||||
}
|
||||
// Proxy
|
||||
const agent = GitHub.getProxyAgent(options.baseUrl, options);
|
||||
if (agent) {
|
||||
// Shallow clone - don't mutate the object provided by the caller
|
||||
options.request = options.request ? Object.assign({}, options.request) : {};
|
||||
// Set the agent
|
||||
options.request.agent = agent;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
static getGraphQL(args) {
|
||||
const defaults = {};
|
||||
defaults.baseUrl = this.getGraphQLBaseUrl();
|
||||
const token = args[0];
|
||||
const options = args[1];
|
||||
// Authorization
|
||||
const auth = this.getAuthString(token, options);
|
||||
if (auth) {
|
||||
defaults.headers = {
|
||||
authorization: auth
|
||||
};
|
||||
}
|
||||
// Proxy
|
||||
const agent = GitHub.getProxyAgent(defaults.baseUrl, options);
|
||||
if (agent) {
|
||||
defaults.request = { agent };
|
||||
}
|
||||
return graphql_1.graphql.defaults(defaults);
|
||||
}
|
||||
static getAuthString(token, options) {
|
||||
// Validate args
|
||||
if (!token && !options.auth) {
|
||||
throw new Error('Parameter token or opts.auth is required');
|
||||
}
|
||||
else if (token && options.auth) {
|
||||
throw new Error('Parameters token and opts.auth may not both be specified');
|
||||
}
|
||||
return typeof options.auth === 'string' ? options.auth : `token ${token}`;
|
||||
}
|
||||
static getProxyAgent(destinationUrl, options) {
|
||||
var _a;
|
||||
if (!((_a = options.request) === null || _a === void 0 ? void 0 : _a.agent)) {
|
||||
if (httpClient.getProxyUrl(destinationUrl)) {
|
||||
const hc = new httpClient.HttpClient();
|
||||
return hc.getAgent(destinationUrl);
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
static getApiBaseUrl() {
|
||||
return process.env['GITHUB_API_URL'] || 'https://api.github.com';
|
||||
}
|
||||
static getGraphQLBaseUrl() {
|
||||
let url = process.env['GITHUB_GRAPHQL_URL'] || 'https://api.github.com/graphql';
|
||||
// Shouldn't be a trailing slash, but remove if so
|
||||
if (url.endsWith('/')) {
|
||||
url = url.substr(0, url.length - 1);
|
||||
}
|
||||
// Remove trailing "/graphql"
|
||||
if (url.toUpperCase().endsWith('/GRAPHQL')) {
|
||||
url = url.substr(0, url.length - '/graphql'.length);
|
||||
}
|
||||
return url;
|
||||
}
|
||||
}
|
||||
exports.GitHub = GitHub;
|
||||
//# sourceMappingURL=github.js.map
|
||||
1
node_modules/@actions/github/lib/github.js.map
generated
vendored
Normal file
1
node_modules/@actions/github/lib/github.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"github.js","sourceRoot":"","sources":["../src/github.ts"],"names":[],"mappings":";;;;;;;;;AAAA,gGAAgG;AAChG,8CAAwC;AAUxC,wCAAqC;AACrC,mDAAoC;AAEpC,iEAAkD;AAElD,0CAA0C;AAC1C,cAAO,CAAC,SAAS,GAAG,IAAI,cAAO,EAAE,CAAA;AAEpB,QAAA,OAAO,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,CAAA;AAE5C,MAAa,MAAO,SAAQ,cAAO;IAiBjC,YAAY,KAA+B,EAAE,IAAsB;QACjE,KAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC,MAAM,CAAC,YAAY,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,CAAC,CAAA;QAEjE,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,YAAY,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,CAAA;IACpE,CAAC;IAED;;OAEG;IACK,MAAM,CAAC,YAAY,CACzB,KAA+B,EAC/B,IAAsB;QAEtB,OAAO;YACL,OAAO,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;YACtC,OAAO,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE;SAC/C,CAAA;IACH,CAAC;IAEO,MAAM,CAAC,iBAAiB,CAC9B,IAA+B;QAE/B,MAAM,KAAK,GAAG,IAAI,CAAC,CAAC,CAAC,CAAA;QACrB,MAAM,OAAO,qBAAO,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA,CAAC,iEAAiE;QAE9F,4BAA4B;QAC5B,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,IAAI,CAAC,aAAa,EAAE,CAAA;QAEzD,OAAO;QACP,MAAM,IAAI,GAAG,MAAM,CAAC,aAAa,CAAC,KAAK,EAAE,OAAO,CAAC,CAAA;QACjD,IAAI,IAAI,EAAE;YACR,OAAO,CAAC,IAAI,GAAG,IAAI,CAAA;SACpB;QAED,QAAQ;QACR,MAAM,KAAK,GAAG,MAAM,CAAC,aAAa,CAAC,OAAO,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;QAC5D,IAAI,KAAK,EAAE;YACT,iEAAiE;YACjE,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,mBAAK,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC,EAAE,CAAA;YAE7D,gBAAgB;YAChB,OAAO,CAAC,OAAO,CAAC,KAAK,GAAG,KAAK,CAAA;SAC9B;QAED,OAAO,OAAO,CAAA;IAChB,CAAC;IAEO,MAAM,CAAC,UAAU,CAAC,IAA+B;QACvD,MAAM,QAAQ,GAA6B,EAAE,CAAA;QAC7C,QAAQ,CAAC,OAAO,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAA;QAC3C,MAAM,KAAK,GAAG,IAAI,CAAC,CAAC,CAAC,CAAA;QACrB,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,CAAA;QAEvB,gBAAgB;QAChB,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,EAAE,OAAO,CAAC,CAAA;QAC/C,IAAI,IAAI,EAAE;YACR,QAAQ,CAAC,OAAO,GAAG;gBACjB,aAAa,EAAE,IAAI;aACpB,CAAA;SACF;QAED,QAAQ;QACR,MAAM,KAAK,GAAG,MAAM,CAAC,aAAa,CAAC,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;QAC7D,IAAI,KAAK,EAAE;YACT,QAAQ,CAAC,OAAO,GAAG,EAAC,KAAK,EAAC,CAAA;SAC3B;QAED,OAAO,iBAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAA;IACnC,CAAC;IAEO,MAAM,CAAC,aAAa,CAC1B,KAAa,EACb,OAAwB;QAExB,gBAAgB;QAChB,IAAI,CAAC,KAAK,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAA;SAC5D;aAAM,IAAI,KAAK,IAAI,OAAO,CAAC,IAAI,EAAE;YAChC,MAAM,IAAI,KAAK,CACb,0DAA0D,CAC3D,CAAA;SACF;QAED,OAAO,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,KAAK,EAAE,CAAA;IAC3E,CAAC;IAEO,MAAM,CAAC,aAAa,CAC1B,cAAsB,EACtB,OAAwB;;QAExB,IAAI,QAAC,OAAO,CAAC,OAAO,0CAAE,KAAK,CAAA,EAAE;YAC3B,IAAI,UAAU,CAAC,WAAW,CAAC,cAAc,CAAC,EAAE;gBAC1C,MAAM,EAAE,GAAG,IAAI,UAAU,CAAC,UAAU,EAAE,CAAA;gBACtC,OAAO,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAA;aACnC;SACF;QAED,OAAO,SAAS,CAAA;IAClB,CAAC;IAEO,MAAM,CAAC,aAAa;QAC1B,OAAO,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,IAAI,wBAAwB,CAAA;IAClE,CAAC;IAEO,MAAM,CAAC,iBAAiB;QAC9B,IAAI,GAAG,GACL,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,IAAI,gCAAgC,CAAA;QAEvE,kDAAkD;QAClD,IAAI,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YACrB,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;SACpC;QAED,6BAA6B;QAC7B,IAAI,GAAG,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE;YAC1C,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,MAAM,GAAG,UAAU,CAAC,MAAM,CAAC,CAAA;SACpD;QACD,OAAO,GAAG,CAAA;IACZ,CAAC;CACF;AAxID,wBAwIC"}
|
||||
36
node_modules/@actions/github/lib/interfaces.d.ts
generated
vendored
Normal file
36
node_modules/@actions/github/lib/interfaces.d.ts
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
export interface PayloadRepository {
|
||||
[key: string]: any;
|
||||
full_name?: string;
|
||||
name: string;
|
||||
owner: {
|
||||
[key: string]: any;
|
||||
login: string;
|
||||
name?: string;
|
||||
};
|
||||
html_url?: string;
|
||||
}
|
||||
export interface WebhookPayload {
|
||||
[key: string]: any;
|
||||
repository?: PayloadRepository;
|
||||
issue?: {
|
||||
[key: string]: any;
|
||||
number: number;
|
||||
html_url?: string;
|
||||
body?: string;
|
||||
};
|
||||
pull_request?: {
|
||||
[key: string]: any;
|
||||
number: number;
|
||||
html_url?: string;
|
||||
body?: string;
|
||||
};
|
||||
sender?: {
|
||||
[key: string]: any;
|
||||
type: string;
|
||||
};
|
||||
action?: string;
|
||||
installation?: {
|
||||
id: number;
|
||||
[key: string]: any;
|
||||
};
|
||||
}
|
||||
4
node_modules/@actions/github/lib/interfaces.js
generated
vendored
Normal file
4
node_modules/@actions/github/lib/interfaces.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
"use strict";
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=interfaces.js.map
|
||||
1
node_modules/@actions/github/lib/interfaces.js.map
generated
vendored
Normal file
1
node_modules/@actions/github/lib/interfaces.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../src/interfaces.ts"],"names":[],"mappings":";AAAA,uDAAuD"}
|
||||
@@ -99,17 +99,14 @@ If your target runtime environments supports async iterators (such as most moder
|
||||
|
||||
```js
|
||||
const parameters = {
|
||||
owner: "octocat",
|
||||
repo: "hello-world",
|
||||
since: "2010-10-01",
|
||||
per_page: 100
|
||||
};
|
||||
for await (const response of octokit.paginate.iterator(
|
||||
"GET /repos/:owner/:repo/issues",
|
||||
parameters
|
||||
)) {
|
||||
owner: "octocat",
|
||||
repo: "hello-world",
|
||||
since: "2010-10-01",
|
||||
per_page: 100
|
||||
}
|
||||
for await (const response of octokit.paginate.iterator("GET /repos/:owner/:repo/issues", parameters)) {
|
||||
// do whatever you want with each response, break out of the loop, etc.
|
||||
console.log(response.data.title);
|
||||
console.log(response.data.title)
|
||||
}
|
||||
```
|
||||
|
||||
@@ -2,26 +2,32 @@
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
const VERSION = "2.0.2";
|
||||
const VERSION = "1.1.2";
|
||||
|
||||
/**
|
||||
* Some “list” response that can be paginated have a different response structure
|
||||
*
|
||||
* They have a `total_count` key in the response (search also has `incomplete_results`,
|
||||
* /installation/repositories also has `repository_selection`), as well as a key with
|
||||
* the list of the items which name varies from endpoint to endpoint.
|
||||
* the list of the items which name varies from endpoint to endpoint:
|
||||
*
|
||||
* - https://developer.github.com/v3/search/#example (key `items`)
|
||||
* - https://developer.github.com/v3/checks/runs/#response-3 (key: `check_runs`)
|
||||
* - https://developer.github.com/v3/checks/suites/#response-1 (key: `check_suites`)
|
||||
* - https://developer.github.com/v3/apps/installations/#list-repositories (key: `repositories`)
|
||||
* - https://developer.github.com/v3/apps/installations/#list-installations-for-a-user (key `installations`)
|
||||
*
|
||||
* Octokit normalizes these responses so that paginated results are always returned following
|
||||
* the same structure. One challenge is that if the list response has only one page, no Link
|
||||
* header is provided, so this header alone is not sufficient to check wether a response is
|
||||
* paginated or not.
|
||||
*
|
||||
* We check if a "total_count" key is present in the response data, but also make sure that
|
||||
* a "url" property is not, as the "Get the combined status for a specific ref" endpoint would
|
||||
* otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
|
||||
* paginated or not. For the exceptions with the namespace, a fallback check for the route
|
||||
* paths has to be added in order to normalize the response. We cannot check for the total_count
|
||||
* property because it also exists in the response of Get the combined status for a specific ref.
|
||||
*/
|
||||
const REGEX = [/^\/search\//, /^\/repos\/[^/]+\/[^/]+\/commits\/[^/]+\/(check-runs|check-suites)([^/]|$)/, /^\/installation\/repositories([^/]|$)/, /^\/user\/installations([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/secrets([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/workflows(\/[^/]+\/runs)?([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/runs(\/[^/]+\/(artifacts|jobs))?([^/]|$)/];
|
||||
function normalizePaginatedListResponse(octokit, url, response) {
|
||||
const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
|
||||
const path = url.replace(octokit.request.endpoint.DEFAULTS.baseUrl, "");
|
||||
const responseNeedsNormalization = REGEX.find(regex => regex.test(path));
|
||||
if (!responseNeedsNormalization) return; // keep the additional properties intact as there is currently no other way
|
||||
// to retrieve the same information.
|
||||
|
||||
@@ -44,6 +50,13 @@ function normalizePaginatedListResponse(octokit, url, response) {
|
||||
}
|
||||
|
||||
response.data.total_count = totalCount;
|
||||
Object.defineProperty(response.data, namespaceKey, {
|
||||
get() {
|
||||
octokit.log.warn(`[@octokit/paginate-rest] "response.data.${namespaceKey}" is deprecated for "GET ${path}". Get the results directly from "response.data"`);
|
||||
return Array.from(data);
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function iterator(octokit, route, parameters) {
|
||||
1
node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js.map
generated
vendored
Normal file
1
node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
@@ -3,19 +3,33 @@
|
||||
*
|
||||
* They have a `total_count` key in the response (search also has `incomplete_results`,
|
||||
* /installation/repositories also has `repository_selection`), as well as a key with
|
||||
* the list of the items which name varies from endpoint to endpoint.
|
||||
* the list of the items which name varies from endpoint to endpoint:
|
||||
*
|
||||
* - https://developer.github.com/v3/search/#example (key `items`)
|
||||
* - https://developer.github.com/v3/checks/runs/#response-3 (key: `check_runs`)
|
||||
* - https://developer.github.com/v3/checks/suites/#response-1 (key: `check_suites`)
|
||||
* - https://developer.github.com/v3/apps/installations/#list-repositories (key: `repositories`)
|
||||
* - https://developer.github.com/v3/apps/installations/#list-installations-for-a-user (key `installations`)
|
||||
*
|
||||
* Octokit normalizes these responses so that paginated results are always returned following
|
||||
* the same structure. One challenge is that if the list response has only one page, no Link
|
||||
* header is provided, so this header alone is not sufficient to check wether a response is
|
||||
* paginated or not.
|
||||
*
|
||||
* We check if a "total_count" key is present in the response data, but also make sure that
|
||||
* a "url" property is not, as the "Get the combined status for a specific ref" endpoint would
|
||||
* otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
|
||||
* paginated or not. For the exceptions with the namespace, a fallback check for the route
|
||||
* paths has to be added in order to normalize the response. We cannot check for the total_count
|
||||
* property because it also exists in the response of Get the combined status for a specific ref.
|
||||
*/
|
||||
const REGEX = [
|
||||
/^\/search\//,
|
||||
/^\/repos\/[^/]+\/[^/]+\/commits\/[^/]+\/(check-runs|check-suites)([^/]|$)/,
|
||||
/^\/installation\/repositories([^/]|$)/,
|
||||
/^\/user\/installations([^/]|$)/,
|
||||
/^\/repos\/[^/]+\/[^/]+\/actions\/secrets([^/]|$)/,
|
||||
/^\/repos\/[^/]+\/[^/]+\/actions\/workflows(\/[^/]+\/runs)?([^/]|$)/,
|
||||
/^\/repos\/[^/]+\/[^/]+\/actions\/runs(\/[^/]+\/(artifacts|jobs))?([^/]|$)/
|
||||
];
|
||||
export function normalizePaginatedListResponse(octokit, url, response) {
|
||||
const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
|
||||
const path = url.replace(octokit.request.endpoint.DEFAULTS.baseUrl, "");
|
||||
const responseNeedsNormalization = REGEX.find(regex => regex.test(path));
|
||||
if (!responseNeedsNormalization)
|
||||
return;
|
||||
// keep the additional properties intact as there is currently no other way
|
||||
@@ -36,4 +50,10 @@ export function normalizePaginatedListResponse(octokit, url, response) {
|
||||
response.data.repository_selection = repositorySelection;
|
||||
}
|
||||
response.data.total_count = totalCount;
|
||||
Object.defineProperty(response.data, namespaceKey, {
|
||||
get() {
|
||||
octokit.log.warn(`[@octokit/paginate-rest] "response.data.${namespaceKey}" is deprecated for "GET ${path}". Get the results directly from "response.data"`);
|
||||
return Array.from(data);
|
||||
}
|
||||
});
|
||||
}
|
||||
1
node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/dist-src/version.js
generated
vendored
Normal file
1
node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/dist-src/version.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export const VERSION = "1.1.2";
|
||||
@@ -3,16 +3,20 @@
|
||||
*
|
||||
* They have a `total_count` key in the response (search also has `incomplete_results`,
|
||||
* /installation/repositories also has `repository_selection`), as well as a key with
|
||||
* the list of the items which name varies from endpoint to endpoint.
|
||||
* the list of the items which name varies from endpoint to endpoint:
|
||||
*
|
||||
* - https://developer.github.com/v3/search/#example (key `items`)
|
||||
* - https://developer.github.com/v3/checks/runs/#response-3 (key: `check_runs`)
|
||||
* - https://developer.github.com/v3/checks/suites/#response-1 (key: `check_suites`)
|
||||
* - https://developer.github.com/v3/apps/installations/#list-repositories (key: `repositories`)
|
||||
* - https://developer.github.com/v3/apps/installations/#list-installations-for-a-user (key `installations`)
|
||||
*
|
||||
* Octokit normalizes these responses so that paginated results are always returned following
|
||||
* the same structure. One challenge is that if the list response has only one page, no Link
|
||||
* header is provided, so this header alone is not sufficient to check wether a response is
|
||||
* paginated or not.
|
||||
*
|
||||
* We check if a "total_count" key is present in the response data, but also make sure that
|
||||
* a "url" property is not, as the "Get the combined status for a specific ref" endpoint would
|
||||
* otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
|
||||
* paginated or not. For the exceptions with the namespace, a fallback check for the route
|
||||
* paths has to be added in order to normalize the response. We cannot check for the total_count
|
||||
* property because it also exists in the response of Get the combined status for a specific ref.
|
||||
*/
|
||||
import { Octokit } from "@octokit/core";
|
||||
import { OctokitResponse } from "./types";
|
||||
1
node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/dist-types/version.d.ts
generated
vendored
Normal file
1
node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/dist-types/version.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export declare const VERSION = "1.1.2";
|
||||
@@ -1,23 +1,37 @@
|
||||
const VERSION = "2.0.2";
|
||||
const VERSION = "1.1.2";
|
||||
|
||||
/**
|
||||
* Some “list” response that can be paginated have a different response structure
|
||||
*
|
||||
* They have a `total_count` key in the response (search also has `incomplete_results`,
|
||||
* /installation/repositories also has `repository_selection`), as well as a key with
|
||||
* the list of the items which name varies from endpoint to endpoint.
|
||||
* the list of the items which name varies from endpoint to endpoint:
|
||||
*
|
||||
* - https://developer.github.com/v3/search/#example (key `items`)
|
||||
* - https://developer.github.com/v3/checks/runs/#response-3 (key: `check_runs`)
|
||||
* - https://developer.github.com/v3/checks/suites/#response-1 (key: `check_suites`)
|
||||
* - https://developer.github.com/v3/apps/installations/#list-repositories (key: `repositories`)
|
||||
* - https://developer.github.com/v3/apps/installations/#list-installations-for-a-user (key `installations`)
|
||||
*
|
||||
* Octokit normalizes these responses so that paginated results are always returned following
|
||||
* the same structure. One challenge is that if the list response has only one page, no Link
|
||||
* header is provided, so this header alone is not sufficient to check wether a response is
|
||||
* paginated or not.
|
||||
*
|
||||
* We check if a "total_count" key is present in the response data, but also make sure that
|
||||
* a "url" property is not, as the "Get the combined status for a specific ref" endpoint would
|
||||
* otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
|
||||
* paginated or not. For the exceptions with the namespace, a fallback check for the route
|
||||
* paths has to be added in order to normalize the response. We cannot check for the total_count
|
||||
* property because it also exists in the response of Get the combined status for a specific ref.
|
||||
*/
|
||||
const REGEX = [
|
||||
/^\/search\//,
|
||||
/^\/repos\/[^/]+\/[^/]+\/commits\/[^/]+\/(check-runs|check-suites)([^/]|$)/,
|
||||
/^\/installation\/repositories([^/]|$)/,
|
||||
/^\/user\/installations([^/]|$)/,
|
||||
/^\/repos\/[^/]+\/[^/]+\/actions\/secrets([^/]|$)/,
|
||||
/^\/repos\/[^/]+\/[^/]+\/actions\/workflows(\/[^/]+\/runs)?([^/]|$)/,
|
||||
/^\/repos\/[^/]+\/[^/]+\/actions\/runs(\/[^/]+\/(artifacts|jobs))?([^/]|$)/
|
||||
];
|
||||
function normalizePaginatedListResponse(octokit, url, response) {
|
||||
const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
|
||||
const path = url.replace(octokit.request.endpoint.DEFAULTS.baseUrl, "");
|
||||
const responseNeedsNormalization = REGEX.find(regex => regex.test(path));
|
||||
if (!responseNeedsNormalization)
|
||||
return;
|
||||
// keep the additional properties intact as there is currently no other way
|
||||
@@ -38,6 +52,12 @@ function normalizePaginatedListResponse(octokit, url, response) {
|
||||
response.data.repository_selection = repositorySelection;
|
||||
}
|
||||
response.data.total_count = totalCount;
|
||||
Object.defineProperty(response.data, namespaceKey, {
|
||||
get() {
|
||||
octokit.log.warn(`[@octokit/paginate-rest] "response.data.${namespaceKey}" is deprecated for "GET ${path}". Get the results directly from "response.data"`);
|
||||
return Array.from(data);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function iterator(octokit, route, parameters) {
|
||||
1
node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js.map
generated
vendored
Normal file
1
node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@octokit/plugin-paginate-rest",
|
||||
"description": "Octokit plugin to paginate REST API endpoint responses",
|
||||
"version": "2.0.2",
|
||||
"version": "1.1.2",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"dist-*/",
|
||||
@@ -22,18 +22,18 @@
|
||||
"devDependencies": {
|
||||
"@octokit/core": "^2.0.0",
|
||||
"@pika/pack": "^0.5.0",
|
||||
"@pika/plugin-build-node": "^0.9.0",
|
||||
"@pika/plugin-build-web": "^0.9.0",
|
||||
"@pika/plugin-ts-standard-pkg": "^0.9.0",
|
||||
"@pika/plugin-build-node": "^0.8.1",
|
||||
"@pika/plugin-build-web": "^0.8.1",
|
||||
"@pika/plugin-ts-standard-pkg": "^0.8.1",
|
||||
"@types/fetch-mock": "^7.3.1",
|
||||
"@types/jest": "^25.1.0",
|
||||
"@types/jest": "^24.0.18",
|
||||
"@types/node": "^13.1.0",
|
||||
"fetch-mock": "^9.0.0",
|
||||
"fetch-mock": "^8.0.0",
|
||||
"jest": "^24.9.0",
|
||||
"prettier": "^1.18.2",
|
||||
"semantic-release": "^17.0.0",
|
||||
"semantic-release-plugin-update-version-in-files": "^1.0.0",
|
||||
"ts-jest": "^25.1.0",
|
||||
"ts-jest": "^24.1.0",
|
||||
"typescript": "^3.7.2"
|
||||
},
|
||||
"publishConfig": {
|
||||
2717
node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/README.md
generated
vendored
Normal file
2717
node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/README.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
13196
node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js
generated
vendored
Normal file
13196
node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map
generated
vendored
Normal file
1
node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user