mirror of
https://github.com/github/codeql-action.git
synced 2025-12-12 10:44:43 +08:00
Compare commits
330 Commits
pre
...
python-set
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f65b956f54 | ||
|
|
42f07e2f87 | ||
|
|
d190a018d6 | ||
|
|
4d4cc33d97 | ||
|
|
1c95faf847 | ||
|
|
9769e4a6df | ||
|
|
315a9f4b3c | ||
|
|
0446cb0aff | ||
|
|
29cf06569d | ||
|
|
ee63f4ee4b | ||
|
|
5b4f4e40af | ||
|
|
58a0034549 | ||
|
|
c7c1aa8045 | ||
|
|
b673c57b89 | ||
|
|
d138b00811 | ||
|
|
b86c3701ed | ||
|
|
7bb6ac6c60 | ||
|
|
d1d80761ef | ||
|
|
7a78ec0a54 | ||
|
|
da3d6d25eb | ||
|
|
c3dcf26eaf | ||
|
|
189b6ef4bf | ||
|
|
1a4c658bbf | ||
|
|
ec154779ac | ||
|
|
ca775cfb2e | ||
|
|
fb9f2af49f | ||
|
|
60126bfb39 | ||
|
|
24367a89b5 | ||
|
|
70980b9f32 | ||
|
|
bf5b437adb | ||
|
|
b6efd2e6de | ||
|
|
8a6b404471 | ||
|
|
d781c667b1 | ||
|
|
56417be251 | ||
|
|
abf6f239fa | ||
|
|
9fb69dda17 | ||
|
|
dcebdd6441 | ||
|
|
56e74b9096 | ||
|
|
13ee335beb | ||
|
|
07caa0f5cf | ||
|
|
f77ab09bf4 | ||
|
|
8d908eeab3 | ||
|
|
cfcff89771 | ||
|
|
fe3dbb7e64 | ||
|
|
1aeb7665e7 | ||
|
|
0086c2ecdb | ||
|
|
9da537eb33 | ||
|
|
5ab09ae291 | ||
|
|
c41d287cae | ||
|
|
8947510a57 | ||
|
|
5d84e87b3d | ||
|
|
9bc459c5f1 | ||
|
|
77e9a735f6 | ||
|
|
57a57713c3 | ||
|
|
a0bf50cb7b | ||
|
|
72803c4251 | ||
|
|
eaf6649611 | ||
|
|
55a6f9e0a8 | ||
|
|
dfed1f7eea | ||
|
|
580e603e94 | ||
|
|
de7ff148e5 | ||
|
|
480467971e | ||
|
|
e2a8f32427 | ||
|
|
260a93fe06 | ||
|
|
dc2678801a | ||
|
|
c953f77bb6 | ||
|
|
aa6c2c5bda | ||
|
|
a52f1a55ed | ||
|
|
1bb294af6b | ||
|
|
25a0a6baed | ||
|
|
4b37db72e4 | ||
|
|
04b2540e30 | ||
|
|
e0299c3c04 | ||
|
|
0e3f8311ed | ||
|
|
ca76a2ca94 | ||
|
|
aad14bf2cb | ||
|
|
a08742f199 | ||
|
|
6afe41036b | ||
|
|
ee4cc86b19 | ||
|
|
0607771cc2 | ||
|
|
151d531bd0 | ||
|
|
51becd2cf8 | ||
|
|
a66f2b0b11 | ||
|
|
504c8cfc6f | ||
|
|
a0d4330434 | ||
|
|
7c00663f08 | ||
|
|
f8c87948ab | ||
|
|
9566d8c220 | ||
|
|
366d8a32d1 | ||
|
|
bb9ed79f3d | ||
|
|
17548064f9 | ||
|
|
ef507971e7 | ||
|
|
96d02d50f7 | ||
|
|
0fdc2c71e4 | ||
|
|
28944b580b | ||
|
|
388403b46e | ||
|
|
32c9898fa4 | ||
|
|
56292b1fa3 | ||
|
|
50a2815790 | ||
|
|
a19d19e0a3 | ||
|
|
153a598a97 | ||
|
|
f4cf65ca2d | ||
|
|
b0af5695e6 | ||
|
|
43c1bea680 | ||
|
|
6846c702da | ||
|
|
559e2600c1 | ||
|
|
5bb9e6e131 | ||
|
|
464ce1b43a | ||
|
|
8530f5b76a | ||
|
|
a67896b792 | ||
|
|
b3ffa760ab | ||
|
|
3871ca717b | ||
|
|
a091618158 | ||
|
|
04adf2bf60 | ||
|
|
74c48f71fa | ||
|
|
b8ac06a9c8 | ||
|
|
7581ac8b17 | ||
|
|
af252d2f0d | ||
|
|
cb384e776b | ||
|
|
403832b950 | ||
|
|
52e52435f7 | ||
|
|
bc21c8f6f3 | ||
|
|
9a784b1f57 | ||
|
|
b3c9d6f3a9 | ||
|
|
f009c4c924 | ||
|
|
852d99d8e2 | ||
|
|
052d39e909 | ||
|
|
107d8ffc4c | ||
|
|
88231094bf | ||
|
|
211ad30f72 | ||
|
|
350bf488da | ||
|
|
a1d945f14f | ||
|
|
bd4042802d | ||
|
|
02776246bf | ||
|
|
66be268a09 | ||
|
|
56f06c77fd | ||
|
|
98f8945cfb | ||
|
|
a30a5ba788 | ||
|
|
9133b2b54d | ||
|
|
8e098cbb87 | ||
|
|
7ae8c32cbe | ||
|
|
beedd317d2 | ||
|
|
8a67191278 | ||
|
|
1ce7f98898 | ||
|
|
6d413dd723 | ||
|
|
31996935e6 | ||
|
|
bcb5b28954 | ||
|
|
8622312249 | ||
|
|
c0c67ce80f | ||
|
|
bc9591a12b | ||
|
|
dcba70915d | ||
|
|
2758bd30c8 | ||
|
|
f4001a0790 | ||
|
|
d55f711b71 | ||
|
|
2845a93f4c | ||
|
|
74f864bee1 | ||
|
|
38c231113e | ||
|
|
34c941dc31 | ||
|
|
5eccb79587 | ||
|
|
11a9af0387 | ||
|
|
6d036cef6f | ||
|
|
f9768ac4ba | ||
|
|
3ff198f23b | ||
|
|
ff8fe44e0c | ||
|
|
3f2a60be8a | ||
|
|
4c6749115a | ||
|
|
608ed15968 | ||
|
|
14f179f70b | ||
|
|
dc4009c7ed | ||
|
|
14d602cced | ||
|
|
24096a1cb3 | ||
|
|
74d434c5ca | ||
|
|
fff3de9938 | ||
|
|
1aae76b906 | ||
|
|
013c02758e | ||
|
|
0b53ebbc36 | ||
|
|
6de3e1cde4 | ||
|
|
c9d0312cb7 | ||
|
|
0cdf645694 | ||
|
|
d00417a341 | ||
|
|
7928587bdf | ||
|
|
87ecd0d0cc | ||
|
|
7e2e297e07 | ||
|
|
b97097aaed | ||
|
|
8a8a49d3c5 | ||
|
|
fcb696ec59 | ||
|
|
c2d2dfdcdd | ||
|
|
042ab541fd | ||
|
|
19faafba94 | ||
|
|
476c8a44ba | ||
|
|
f9ef310b75 | ||
|
|
6bd7f17e0e | ||
|
|
582fd14a81 | ||
|
|
8425341ae0 | ||
|
|
1f2cca021a | ||
|
|
fa9e0ac2a6 | ||
|
|
a9de5b50d7 | ||
|
|
af4edf6546 | ||
|
|
5a97f7e980 | ||
|
|
d4fb7fc762 | ||
|
|
00ebedc522 | ||
|
|
840dc5ee9a | ||
|
|
c1add46efa | ||
|
|
e35c90f53d | ||
|
|
6db8182349 | ||
|
|
202704856d | ||
|
|
5ea736059a | ||
|
|
b4610ac367 | ||
|
|
a0d60d5d9e | ||
|
|
f18fffbea8 | ||
|
|
655c4497ce | ||
|
|
d7a2025f2d | ||
|
|
22501fd7c8 | ||
|
|
07e22b1f4a | ||
|
|
3c2191ffdd | ||
|
|
28abced8ca | ||
|
|
50dcaaf00d | ||
|
|
30f7117e6a | ||
|
|
28a878efc3 | ||
|
|
d518039a6b | ||
|
|
855f965205 | ||
|
|
2909e97a32 | ||
|
|
4997c3ff4d | ||
|
|
0bd4da3a6c | ||
|
|
98ad2fc49d | ||
|
|
3ca3147cd4 | ||
|
|
96da037d49 | ||
|
|
da1e237d1e | ||
|
|
054f867322 | ||
|
|
1e600686e7 | ||
|
|
cd1625a162 | ||
|
|
8788e5aa59 | ||
|
|
8fb9090674 | ||
|
|
10a2fd615f | ||
|
|
8b71cf3e5f | ||
|
|
ae301902e1 | ||
|
|
ddee374101 | ||
|
|
080dc8c3f0 | ||
|
|
6d1f969b1c | ||
|
|
ff40939f66 | ||
|
|
7b32c3c950 | ||
|
|
90c07ef21d | ||
|
|
852b9186d6 | ||
|
|
63f52e71c0 | ||
|
|
3a883af8a6 | ||
|
|
886b7d3e6e | ||
|
|
4e12efc7c3 | ||
|
|
5c5f422edb | ||
|
|
97ef91227e | ||
|
|
25e5256866 | ||
|
|
5ec6b7524f | ||
|
|
b366432cb3 | ||
|
|
fa0a733046 | ||
|
|
0e6df42024 | ||
|
|
58c1abf92e | ||
|
|
6507fba7ec | ||
|
|
aa54af7018 | ||
|
|
74c9991849 | ||
|
|
f49335fc3b | ||
|
|
d7b9f5a097 | ||
|
|
572c8bbc0c | ||
|
|
0347b72305 | ||
|
|
27cc8b23fe | ||
|
|
584df475ca | ||
|
|
cd95d34497 | ||
|
|
88c1b7fb89 | ||
|
|
51b42fcf78 | ||
|
|
015ead73d9 | ||
|
|
c351304778 | ||
|
|
96901ac7d8 | ||
|
|
cc471c2014 | ||
|
|
c88fb695ab | ||
|
|
ec4d38a9a5 | ||
|
|
15bd158ded | ||
|
|
256c63a715 | ||
|
|
a76042ab4a | ||
|
|
1477a43cc8 | ||
|
|
f17ebc80bd | ||
|
|
c0d9de18c0 | ||
|
|
52cd1f2261 | ||
|
|
3455736978 | ||
|
|
f668f5fc74 | ||
|
|
3aa3d6a2b6 | ||
|
|
538cbdd614 | ||
|
|
49575f87c4 | ||
|
|
5a800ccbfa | ||
|
|
cc2c18d6a8 | ||
|
|
4c11b3d9bf | ||
|
|
a511aca9f1 | ||
|
|
c3847056c5 | ||
|
|
189a899282 | ||
|
|
c5ecb82753 | ||
|
|
4dc964d906 | ||
|
|
dc27ff90bd | ||
|
|
cf266cbf27 | ||
|
|
1f29db50bb | ||
|
|
c979850d28 | ||
|
|
baa9c9e0df | ||
|
|
d966ea2f52 | ||
|
|
6bab450a9a | ||
|
|
583f8a923c | ||
|
|
b73b259103 | ||
|
|
4fff14bba4 | ||
|
|
ab918b676b | ||
|
|
290b34d5df | ||
|
|
dcd81b5847 | ||
|
|
d90fca396a | ||
|
|
5218f937b3 | ||
|
|
984552a36e | ||
|
|
d46c1c7f29 | ||
|
|
43e27012da | ||
|
|
3d3dccf92d | ||
|
|
8ff10b4a6b | ||
|
|
d68eb11bae | ||
|
|
4e9886ad2b | ||
|
|
1fe0932cc2 | ||
|
|
5bceb2be38 | ||
|
|
129ce28897 | ||
|
|
a23cb1d61a | ||
|
|
0c4fc16b49 | ||
|
|
b6a0306228 | ||
|
|
e52e34ba17 | ||
|
|
cffc0f7b4e | ||
|
|
5d2700f9cb | ||
|
|
1da651c219 | ||
|
|
26e955cfa3 | ||
|
|
546d5a8843 | ||
|
|
43de3a9949 | ||
|
|
7963db13d8 | ||
|
|
f237316c5a |
10
.editorconfig
Normal file
10
.editorconfig
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
end_of_line = lf
|
||||||
|
insert_final_newline = true
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
|
[*.ts]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
blank_issues_enabled: true
|
||||||
|
contact_links:
|
||||||
|
- name: Contact GitHub Support
|
||||||
|
url: https://support.github.com/contact?subject=Code+Scanning+Beta+Support&tags=code-scanning-support
|
||||||
|
about: Contact Support about code scanning
|
||||||
12
.github/codeql/codeql-config.yml
vendored
12
.github/codeql/codeql-config.yml
vendored
@@ -1,4 +1,14 @@
|
|||||||
me: "CodeQL config"
|
name: "CodeQL config"
|
||||||
queries:
|
queries:
|
||||||
- name: Run custom queries
|
- name: Run custom queries
|
||||||
uses: ./queries
|
uses: ./queries
|
||||||
|
# Run all extra query suites, both because we want to
|
||||||
|
# and because it'll act as extra testing. This is why
|
||||||
|
# we include both even though one is a superset of the
|
||||||
|
# other, because we're testing the parsing logic and
|
||||||
|
# that the suites exist in the codeql bundle.
|
||||||
|
- uses: security-extended
|
||||||
|
- uses: security-and-quality
|
||||||
|
paths-ignore:
|
||||||
|
- tests
|
||||||
|
- lib
|
||||||
|
|||||||
3
.github/pull_request_template.md
vendored
3
.github/pull_request_template.md
vendored
@@ -1,7 +1,4 @@
|
|||||||
### Merge / deployment checklist
|
### Merge / deployment checklist
|
||||||
|
|
||||||
- Run test builds as necessary. Can be on this repository or elsewhere as needed in order to test the change - please include links to tests in other repos!
|
|
||||||
- [ ] CodeQL using init/analyze actions
|
|
||||||
- [ ] 3rd party tool using upload action
|
|
||||||
- [ ] Confirm this change is backwards compatible with existing workflows.
|
- [ ] Confirm this change is backwards compatible with existing workflows.
|
||||||
- [ ] Confirm the [readme](https://github.com/github/codeql-action/blob/master/README.md) has been updated if necessary.
|
- [ ] Confirm the [readme](https://github.com/github/codeql-action/blob/master/README.md) has been updated if necessary.
|
||||||
|
|||||||
178
.github/update-release-branch.py
vendored
Normal file
178
.github/update-release-branch.py
vendored
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
import datetime
|
||||||
|
from github import Github
|
||||||
|
import random
|
||||||
|
import requests
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# The branch being merged from.
|
||||||
|
# This is the one that contains day-to-day development work.
|
||||||
|
MAIN_BRANCH = 'main'
|
||||||
|
# The branch being merged into.
|
||||||
|
# This is the release branch that users reference.
|
||||||
|
LATEST_RELEASE_BRANCH = 'v1'
|
||||||
|
# Name of the remote
|
||||||
|
ORIGIN = 'origin'
|
||||||
|
|
||||||
|
# Runs git with the given args and returns the stdout.
|
||||||
|
# Raises an error if git does not exit successfully.
|
||||||
|
def run_git(*args):
|
||||||
|
cmd = ['git', *args]
|
||||||
|
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
if (p.returncode != 0):
|
||||||
|
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
|
||||||
|
return p.stdout.decode('ascii')
|
||||||
|
|
||||||
|
# Returns true if the given branch exists on the origin remote
|
||||||
|
def branch_exists_on_remote(branch_name):
|
||||||
|
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
||||||
|
|
||||||
|
# Opens a PR from the given branch to the release branch
|
||||||
|
def open_pr(repo, all_commits, short_main_sha, branch_name):
|
||||||
|
# Sort the commits into the pull requests that introduced them,
|
||||||
|
# and any commits that don't have a pull request
|
||||||
|
pull_requests = []
|
||||||
|
commits_without_pull_requests = []
|
||||||
|
for commit in all_commits:
|
||||||
|
pr = get_pr_for_commit(repo, commit)
|
||||||
|
|
||||||
|
if pr is None:
|
||||||
|
commits_without_pull_requests.append(commit)
|
||||||
|
elif not any(p for p in pull_requests if p.number == pr.number):
|
||||||
|
pull_requests.append(pr)
|
||||||
|
|
||||||
|
print('Found ' + str(len(pull_requests)) + ' pull requests')
|
||||||
|
print('Found ' + str(len(commits_without_pull_requests)) + ' commits not in a pull request')
|
||||||
|
|
||||||
|
# Sort PRs and commits by age
|
||||||
|
pull_requests = sorted(pull_requests, key=lambda pr: pr.number)
|
||||||
|
commits_without_pull_requests = sorted(commits_without_pull_requests, key=lambda c: c.commit.author.date)
|
||||||
|
|
||||||
|
# Start constructing the body text
|
||||||
|
body = 'Merging ' + short_main_sha + ' into ' + LATEST_RELEASE_BRANCH
|
||||||
|
|
||||||
|
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
|
||||||
|
body += '\n\nConductor for this PR is @' + conductor
|
||||||
|
|
||||||
|
# List all PRs merged
|
||||||
|
if len(pull_requests) > 0:
|
||||||
|
body += '\n\nContains the following pull requests:'
|
||||||
|
for pr in pull_requests:
|
||||||
|
merger = get_merger_of_pr(repo, pr)
|
||||||
|
body += '\n- #' + str(pr.number)
|
||||||
|
body += ' - ' + pr.title
|
||||||
|
body += ' (@' + merger + ')'
|
||||||
|
|
||||||
|
# List all commits not part of a PR
|
||||||
|
if len(commits_without_pull_requests) > 0:
|
||||||
|
body += '\n\nContains the following commits not from a pull request:'
|
||||||
|
for commit in commits_without_pull_requests:
|
||||||
|
body += '\n- ' + commit.sha
|
||||||
|
body += ' - ' + get_truncated_commit_message(commit)
|
||||||
|
body += ' (@' + commit.author.login + ')'
|
||||||
|
|
||||||
|
title = 'Merge ' + MAIN_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
|
||||||
|
|
||||||
|
# Create the pull request
|
||||||
|
pr = repo.create_pull(title=title, body=body, head=branch_name, base=LATEST_RELEASE_BRANCH)
|
||||||
|
print('Created PR #' + str(pr.number))
|
||||||
|
|
||||||
|
# Assign the conductor
|
||||||
|
pr.add_to_assignees(conductor)
|
||||||
|
print('Assigned PR to ' + conductor)
|
||||||
|
|
||||||
|
# Gets the person who should be in charge of the mergeback PR
|
||||||
|
def get_conductor(repo, pull_requests, other_commits):
|
||||||
|
# If there are any PRs then use whoever merged the last one
|
||||||
|
if len(pull_requests) > 0:
|
||||||
|
return get_merger_of_pr(repo, pull_requests[-1])
|
||||||
|
|
||||||
|
# Otherwise take the author of the latest commit
|
||||||
|
return other_commits[-1].author.login
|
||||||
|
|
||||||
|
# Gets a list of the SHAs of all commits that have happened on main
|
||||||
|
# since the release branched off.
|
||||||
|
# This will not include any commits that exist on the release branch
|
||||||
|
# that aren't on main.
|
||||||
|
def get_commit_difference(repo):
|
||||||
|
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '...' + MAIN_BRANCH).strip().split('\n')
|
||||||
|
|
||||||
|
# Convert to full-fledged commit objects
|
||||||
|
commits = [repo.get_commit(c) for c in commits]
|
||||||
|
|
||||||
|
# Filter out merge commits for PRs
|
||||||
|
return list(filter(lambda c: not is_pr_merge_commit(c), commits))
|
||||||
|
|
||||||
|
# Is the given commit the automatic merge commit from when merging a PR
|
||||||
|
def is_pr_merge_commit(commit):
|
||||||
|
return commit.committer.login == 'web-flow' and len(commit.parents) > 1
|
||||||
|
|
||||||
|
# Gets a copy of the commit message that should display nicely
|
||||||
|
def get_truncated_commit_message(commit):
|
||||||
|
message = commit.commit.message.split('\n')[0]
|
||||||
|
if len(message) > 60:
|
||||||
|
return message[:57] + '...'
|
||||||
|
else:
|
||||||
|
return message
|
||||||
|
|
||||||
|
# Converts a commit into the PR that introduced it to the main branch.
|
||||||
|
# Returns the PR object, or None if no PR could be found.
|
||||||
|
def get_pr_for_commit(repo, commit):
|
||||||
|
prs = commit.get_pulls()
|
||||||
|
|
||||||
|
if prs.totalCount > 0:
|
||||||
|
# In the case that there are multiple PRs, return the earliest one
|
||||||
|
prs = list(prs)
|
||||||
|
sorted(prs, key=lambda pr: int(pr.number))
|
||||||
|
return prs[0]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Get the person who merged the pull request.
|
||||||
|
# For most cases this will be the same as the author, but for PRs opened
|
||||||
|
# by external contributors getting the merger will get us the GitHub
|
||||||
|
# employee who reviewed and merged the PR.
|
||||||
|
def get_merger_of_pr(repo, pr):
|
||||||
|
return repo.get_commit(pr.merge_commit_sha).author.login
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if len(sys.argv) != 3:
|
||||||
|
raise Exception('Usage: update-release.branch.py <github token> <repository nwo>')
|
||||||
|
github_token = sys.argv[1]
|
||||||
|
repository_nwo = sys.argv[2]
|
||||||
|
|
||||||
|
repo = Github(github_token).get_repo(repository_nwo)
|
||||||
|
|
||||||
|
# Print what we intend to go
|
||||||
|
print('Considering difference between ' + MAIN_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
|
||||||
|
short_main_sha = run_git('rev-parse', '--short', MAIN_BRANCH).strip()
|
||||||
|
print('Current head of ' + MAIN_BRANCH + ' is ' + short_main_sha)
|
||||||
|
|
||||||
|
# See if there are any commits to merge in
|
||||||
|
commits = get_commit_difference(repo)
|
||||||
|
if len(commits) == 0:
|
||||||
|
print('No commits to merge from ' + MAIN_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
|
||||||
|
return
|
||||||
|
|
||||||
|
# The branch name is based off of the name of branch being merged into
|
||||||
|
# and the SHA of the branch being merged from. Thus if the branch already
|
||||||
|
# exists we can assume we don't need to recreate it.
|
||||||
|
new_branch_name = 'update-' + LATEST_RELEASE_BRANCH + '-' + short_main_sha
|
||||||
|
print('Branch name is ' + new_branch_name)
|
||||||
|
|
||||||
|
# Check if the branch already exists. If so we can abort as this script
|
||||||
|
# has already run on this combination of branches.
|
||||||
|
if branch_exists_on_remote(new_branch_name):
|
||||||
|
print('Branch ' + new_branch_name + ' already exists. Nothing to do.')
|
||||||
|
return
|
||||||
|
|
||||||
|
# Create the new branch and push it to the remote
|
||||||
|
print('Creating branch ' + new_branch_name)
|
||||||
|
run_git('checkout', '-b', new_branch_name, MAIN_BRANCH)
|
||||||
|
run_git('push', ORIGIN, new_branch_name)
|
||||||
|
|
||||||
|
# Open a PR to update the branch
|
||||||
|
open_pr(repo, commits, short_main_sha, new_branch_name)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
17
.github/workflows/codeql.yml
vendored
17
.github/workflows/codeql.yml
vendored
@@ -1,6 +1,6 @@
|
|||||||
name: "CodeQL action"
|
name: "CodeQL action"
|
||||||
|
|
||||||
on: [push]
|
on: [push, pull_request]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
@@ -10,8 +10,19 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
# Must fetch at least the immediate parents so that if this is
|
||||||
|
# a pull request then we can checkout the head of the pull request.
|
||||||
|
fetch-depth: 2
|
||||||
|
|
||||||
|
# If this run was triggered by a pull request event then checkout
|
||||||
|
# the head of the pull request instead of the merge commit.
|
||||||
|
- run: git checkout HEAD^2
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
|
||||||
- uses: ./init
|
- uses: ./init
|
||||||
with:
|
with:
|
||||||
config-file: ./.github/codeql/codeql-config.yml
|
languages: javascript
|
||||||
|
config-file: ./.github/codeql/codeql-config.yml
|
||||||
- uses: ./analyze
|
- uses: ./analyze
|
||||||
|
|||||||
162
.github/workflows/integration-testing.yml
vendored
162
.github/workflows/integration-testing.yml
vendored
@@ -1,22 +1,152 @@
|
|||||||
name: "Integration Testing"
|
name: "Integration Testing"
|
||||||
|
|
||||||
on: [push]
|
on: [push, pull_request]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
dispatch-events:
|
multi-language-repo_test-autodetect-languages:
|
||||||
if: github.event.repository.full_name == 'github/codeql-action'
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
|
||||||
- name: Send repository dispatch events
|
|
||||||
run: |
|
|
||||||
curl -X POST \
|
|
||||||
-H "Authorization: Bearer ${{ secrets.CODEQL_TESTING_TOKEN }}" \
|
|
||||||
-H "Accept: application/vnd.github.everest-preview+json" \
|
|
||||||
https://api.github.com/repos/Anthophila/amazon-cognito-js-copy/dispatches \
|
|
||||||
-d '{"event_type":"codeql-integration","client_payload": {"sha": "${{ github.sha }}"}}'
|
|
||||||
|
|
||||||
curl -X POST \
|
steps:
|
||||||
-H "Authorization: Bearer ${{ secrets.CODEQL_TESTING_TOKEN }}" \
|
- uses: actions/checkout@v2
|
||||||
-H "Accept: application/vnd.github.everest-preview+json" \
|
- name: Move codeql-action
|
||||||
https://api.github.com/repos/Anthophila/electron-test-action/dispatches \
|
shell: bash
|
||||||
-d '{"event_type":"codeql-integration","client_payload": {"sha": "${{ github.sha }}"}}'
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
- uses: ./../action/init
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- run: |
|
||||||
|
cd "$CODEQL_ACTION_DATABASE_DIR"
|
||||||
|
# List all directories as there will be precisely one directory per database
|
||||||
|
# but there may be other files in this directory such as query suites.
|
||||||
|
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
||||||
|
[[ ! -d cpp ]] || \
|
||||||
|
[[ ! -d csharp ]] || \
|
||||||
|
[[ ! -d go ]] || \
|
||||||
|
[[ ! -d java ]] || \
|
||||||
|
[[ ! -d javascript ]] || \
|
||||||
|
[[ ! -d python ]]; then
|
||||||
|
echo "Did not find expected number of databases. Database dir contains: $(ls)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
multi-language-repo_test-custom-queries-and-remote-config:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: cpp,csharp,java,javascript,python
|
||||||
|
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
# Currently is not possible to analyze Go in conjunction with other languages in macos
|
||||||
|
multi-language-repo_test-go-custom-queries:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/setup-go@v2
|
||||||
|
if: ${{ matrix.os == 'macos-latest' }}
|
||||||
|
with:
|
||||||
|
go-version: '^1.13.1'
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: go
|
||||||
|
config-file: ./.github/codeql/custom-queries.yml
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
multi-language-repo_rubocop:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
- name: Set up Ruby
|
||||||
|
uses: ruby/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: 2.6
|
||||||
|
- name: Install Code Scanning integration
|
||||||
|
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
||||||
|
- name: Install dependencies
|
||||||
|
run: bundle install
|
||||||
|
- name: Rubocop run
|
||||||
|
run: |
|
||||||
|
bash -c "
|
||||||
|
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
||||||
|
[[ $? -ne 2 ]]
|
||||||
|
"
|
||||||
|
- uses: ./../action/upload-sarif
|
||||||
|
with:
|
||||||
|
sarif_file: rubocop.sarif
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
test-proxy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ubuntu:18.04
|
||||||
|
options: --dns 127.0.0.1
|
||||||
|
services:
|
||||||
|
squid-proxy:
|
||||||
|
image: datadog/squid:latest
|
||||||
|
ports:
|
||||||
|
- 3128:3128
|
||||||
|
env:
|
||||||
|
https_proxy: http://squid-proxy:3128
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: javascript
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|||||||
27
.github/workflows/js-uptodate-check.yml
vendored
27
.github/workflows/js-uptodate-check.yml
vendored
@@ -1,27 +0,0 @@
|
|||||||
name: "Check generated JavaScript"
|
|
||||||
|
|
||||||
on: [pull_request]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-js:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v1
|
|
||||||
- name: Check generated JavaScript
|
|
||||||
run: |
|
|
||||||
# Sanity check that repo is clean to start with
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then this workflow needs attention...
|
|
||||||
>&2 echo "Failed: Repo should be clean before testing!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
# Generate the JavaScript files
|
|
||||||
npm run-script build
|
|
||||||
# Check that repo is still clean
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then the PR needs attention
|
|
||||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "Success: JavaScript files are up to date"
|
|
||||||
12
.github/workflows/npm-test.yml
vendored
12
.github/workflows/npm-test.yml
vendored
@@ -1,12 +0,0 @@
|
|||||||
name: "npm run-script test"
|
|
||||||
|
|
||||||
on: [push]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
npm-test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v1
|
|
||||||
- name: npm run-script test
|
|
||||||
run: npm run-script test
|
|
||||||
71
.github/workflows/pr-checks.yml
vendored
Normal file
71
.github/workflows/pr-checks.yml
vendored
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
name: "PR checks"
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
tslint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
- name: tslint
|
||||||
|
run: npm run-script lint
|
||||||
|
|
||||||
|
check-js:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
- name: Check generated JavaScript
|
||||||
|
run: |
|
||||||
|
# Sanity check that repo is clean to start with
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then this workflow needs attention...
|
||||||
|
>&2 echo "Failed: Repo should be clean before testing!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
# Generate the JavaScript files
|
||||||
|
npm run-script build
|
||||||
|
# Check that repo is still clean
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then the PR needs attention
|
||||||
|
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||||
|
git status
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Success: JavaScript files are up to date"
|
||||||
|
|
||||||
|
check-node-modules:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
- name: Check node modules up to date
|
||||||
|
run: |
|
||||||
|
# Sanity check that repo is clean to start with
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then this workflow needs attention...
|
||||||
|
>&2 echo "Failed: Repo should be clean before testing!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Reinstall modules and then clean to remove absolute paths
|
||||||
|
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||||
|
npm ci
|
||||||
|
npm run removeNPMAbsolutePaths
|
||||||
|
# Check that repo is still clean
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then the PR needs attention
|
||||||
|
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
|
||||||
|
git status
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Success: node_modules are up to date"
|
||||||
|
|
||||||
|
npm-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
- name: npm run-script test
|
||||||
|
run: npm run-script test
|
||||||
12
.github/workflows/ts-lint.yml
vendored
12
.github/workflows/ts-lint.yml
vendored
@@ -1,12 +0,0 @@
|
|||||||
name: "TSLint"
|
|
||||||
|
|
||||||
on: [push]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
tslint:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v1
|
|
||||||
- name: tslint
|
|
||||||
run: npm run-script lint
|
|
||||||
32
.github/workflows/update-release-branch.yml
vendored
Normal file
32
.github/workflows/update-release-branch.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
name: Update release branch
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: 0 9 * * 1
|
||||||
|
repository_dispatch:
|
||||||
|
# Example of how to trigger this:
|
||||||
|
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
|
||||||
|
# Replace <token> with a personal access token from this page: https://github.com/settings/tokens
|
||||||
|
types: [update-release-branch]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
update:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
# Need full history so we calculate diffs
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: 3.5
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install PyGithub==1.51 requests
|
||||||
|
|
||||||
|
- name: Update release branch
|
||||||
|
run: python .github/update-release-branch.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }}
|
||||||
25
.vscode/launch.json
vendored
Normal file
25
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
// Use IntelliSense to learn about possible attributes.
|
||||||
|
// Hover to view descriptions of existing attributes.
|
||||||
|
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"type": "node",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Debug AVA test file",
|
||||||
|
"runtimeExecutable": "${workspaceFolder}/node_modules/.bin/ava",
|
||||||
|
"runtimeArgs": [
|
||||||
|
"${file}",
|
||||||
|
"--break",
|
||||||
|
"--serial",
|
||||||
|
"--timeout=20m"
|
||||||
|
],
|
||||||
|
"port": 9229,
|
||||||
|
"outputCapture": "std",
|
||||||
|
"skipFiles": [
|
||||||
|
"<node_internals>/**/*.js"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
10
.vscode/settings.json
vendored
Normal file
10
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"files.exclude": {
|
||||||
|
// include the defaults from VS Code
|
||||||
|
"**/.git": true,
|
||||||
|
"**/.DS_Store": true,
|
||||||
|
|
||||||
|
// transpiled JavaScript
|
||||||
|
"lib": true,
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -10,13 +10,34 @@ Contributions to this project are [released](https://help.github.com/articles/gi
|
|||||||
|
|
||||||
Please note that this project is released with a [Contributor Code of Conduct][code-of-conduct]. By participating in this project you agree to abide by its terms.
|
Please note that this project is released with a [Contributor Code of Conduct][code-of-conduct]. By participating in this project you agree to abide by its terms.
|
||||||
|
|
||||||
|
## Development and Testing
|
||||||
|
|
||||||
|
Before you start, ensure that you have a recent version of node installed. You can see which version of node is used by the action in `init/action.yml`.
|
||||||
|
|
||||||
|
### Common tasks
|
||||||
|
|
||||||
|
* Transpile the TypeScript to JavaScript: `npm run build`. Note that the JavaScript files are committed to git.
|
||||||
|
* Run tests: `npm run test`. You’ll need to ensure that the JavaScript files are up-to-date first by running the command above.
|
||||||
|
* Run the linter: `npm run lint`.
|
||||||
|
|
||||||
|
This project also includes configuration to run tests from VSCode (with support for breakpoints) - open the test file you wish to run and choose "Debug AVA test file" from the Run menu in the Run panel.
|
||||||
|
|
||||||
|
### Running the action
|
||||||
|
|
||||||
|
To see the effect of your changes and to test them, push your changes in a branch and then look at the [Actions output](https://github.com/github/codeql-action/actions) for that branch. You can also exercise the code locally by running the automated tests.
|
||||||
|
|
||||||
|
### Integration tests
|
||||||
|
|
||||||
|
As well as the unit tests (see _Common tasks_ above), there are integration tests, defined in `.github/workflows/integration-testing.yml`. These are run by a CI check. Depending on the change you’re making, you may want to add a test to this file or extend an existing one.
|
||||||
|
|
||||||
## Submitting a pull request
|
## Submitting a pull request
|
||||||
|
|
||||||
1. [Fork][fork] and clone the repository
|
1. [Fork][fork] and clone the repository
|
||||||
2. Create a new branch: `git checkout -b my-branch-name`
|
2. Create a new branch: `git checkout -b my-branch-name`
|
||||||
3. Make your change, add tests, and make sure the tests still pass
|
3. Make your change, add tests, and make sure the tests still pass
|
||||||
4. Push to your fork and [submit a pull request][pr]
|
4. Push to your fork and [submit a pull request][pr]
|
||||||
5. Pat your self on the back and wait for your pull request to be reviewed and merged.
|
5. Pat yourself on the back and wait for your pull request to be reviewed and merged.
|
||||||
|
If you're a GitHub staff member, you can merge your own PR once it's approved; for external contributors, GitHub staff will merge your PR once it's approved.
|
||||||
|
|
||||||
Here are a few things you can do that will increase the likelihood of your pull request being accepted:
|
Here are a few things you can do that will increase the likelihood of your pull request being accepted:
|
||||||
|
|
||||||
|
|||||||
113
README.md
113
README.md
@@ -1,9 +1,17 @@
|
|||||||
# CodeQL Action
|
# CodeQL Action
|
||||||
|
|
||||||
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/semmle/ql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
This project is released under the [MIT License](LICENSE).
|
||||||
|
|
||||||
|
The underlying CodeQL CLI, used in this action, is licensed under the [GitHub CodeQL Terms and Conditions](https://securitylab.github.com/tools/codeql/license). As such, this action may be used on open source projects hosted on GitHub, and on private repositories that are owned by an organisation with GitHub Advanced Security enabled.
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
|
This is a short walkthrough, but for more information read [configuring code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning).
|
||||||
|
|
||||||
To get code scanning results from CodeQL analysis on your repo you can use the following workflow as a template:
|
To get code scanning results from CodeQL analysis on your repo you can use the following workflow as a template:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
@@ -12,21 +20,29 @@ name: "Code Scanning - Action"
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
|
pull_request:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 0 * * 0'
|
- cron: '0 0 * * 0'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
CodeQL-Build:
|
CodeQL-Build:
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
|
|
||||||
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
# Must fetch at least the immediate parents so that if this is
|
||||||
|
# a pull request then we can checkout the head of the pull request.
|
||||||
|
# Only include this option if you are running this workflow on pull requests.
|
||||||
|
fetch-depth: 2
|
||||||
|
|
||||||
|
# If this run was triggered by a pull request event then checkout
|
||||||
|
# the head of the pull request instead of the merge commit.
|
||||||
|
# Only include this step if you are running this workflow on pull requests.
|
||||||
|
- run: git checkout HEAD^2
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
@@ -72,24 +88,9 @@ If you prefer to integrate this within an existing CI workflow, it should end up
|
|||||||
uses: github/codeql-action/analyze@v1
|
uses: github/codeql-action/analyze@v1
|
||||||
```
|
```
|
||||||
|
|
||||||
### Actions triggers
|
### Configuration file
|
||||||
|
|
||||||
The CodeQL action should be run on `push` events, and on a `schedule`. `Push` events allow us to do a detailed analysis of the delta in a pull request, while the `schedule` event ensures that GitHub regularly scans the repository for the latest vulnerabilities, even if the repository becomes inactive. This action does not support the `pull_request` event.
|
Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`.
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
You may optionally specify additional queries for CodeQL to execute by using a config file. The queries must belong to a [QL pack](https://help.semmle.com/codeql/codeql-cli/reference/qlpack-overview.html) and can be in your repository or any public repository. You can choose a single .ql file, a folder containing multiple .ql files, a .qls [query suite](https://help.semmle.com/codeql/codeql-cli/procedures/query-suites.html) file, or any combination of the above. To use queries from other repositories use the same syntax as when [using an action](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepsuses).
|
|
||||||
|
|
||||||
You can disable the default queries using `disable-default-queries: true`.
|
|
||||||
|
|
||||||
You can choose to ignore some files or folders from the analysis, or include additional files/folders for analysis. This *only* works for Javascript and Python analysis.
|
|
||||||
Identifying potential files for extraction:
|
|
||||||
|
|
||||||
- Scans each folder that's defined as `paths` in turn, traversing subfolders, and looking for relevant files.
|
|
||||||
- If it finds a subfolder that's defined as `paths-ignore`, stop traversing.
|
|
||||||
- If a file or folder is both in `paths` and `paths-ignore`, the `paths-ignore` is ignored.
|
|
||||||
|
|
||||||
Use the `config-file` parameter of the init action to enable the configuration file. For example:
|
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: github/codeql-action/init@v1
|
- uses: github/codeql-action/init@v1
|
||||||
@@ -97,72 +98,8 @@ Use the `config-file` parameter of the init action to enable the configuration f
|
|||||||
config-file: ./.github/codeql/codeql-config.yml
|
config-file: ./.github/codeql/codeql-config.yml
|
||||||
```
|
```
|
||||||
|
|
||||||
A config file looks like this:
|
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration)."
|
||||||
|
|
||||||
```yaml
|
|
||||||
name: "My CodeQL config"
|
|
||||||
|
|
||||||
disable-default-queries: true
|
|
||||||
|
|
||||||
queries:
|
|
||||||
- name: In-repo queries (Runs the queries located in the my-queries folder of the repo)
|
|
||||||
uses: ./my-queries
|
|
||||||
- name: External Javascript QL pack (Runs a QL pack located in an external repo)
|
|
||||||
uses: /Semmle/ql/javascript/ql/src/Electron@master
|
|
||||||
- name: External query (Runs a single query located in an external QL pack)
|
|
||||||
uses: Semmle/ql/javascript/ql/src/AngularJS/DeadAngularJSEventListener.ql@master
|
|
||||||
- name: Select query suite (Runs a query suites)
|
|
||||||
uses: ./codeql-querypacks/complex-python-querypack/rootAndBar.qls
|
|
||||||
|
|
||||||
paths:
|
|
||||||
- src/util.ts
|
|
||||||
|
|
||||||
paths-ignore:
|
|
||||||
- src
|
|
||||||
- lib
|
|
||||||
```
|
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
### Trouble with Go dependencies
|
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
|
||||||
|
|
||||||
#### If you use a vendor directory
|
|
||||||
|
|
||||||
Try passing
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
env:
|
|
||||||
GOFLAGS: "-mod=vendor"
|
|
||||||
```
|
|
||||||
|
|
||||||
to `github/codeql-action/analyze`.
|
|
||||||
|
|
||||||
### If you do not use a vendor directory
|
|
||||||
|
|
||||||
Dependencies on public repositories should just work. If you have dependencies on private repositories, one option is to use `git config` and a [personal access token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) to authenticate when downloading dependencies. Add a section like
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
steps:
|
|
||||||
- name: Configure git private repo access
|
|
||||||
env:
|
|
||||||
TOKEN: ${{ secrets.GITHUB_PAT }}
|
|
||||||
run: |
|
|
||||||
git config --global url."https://${TOKEN}@github.com/foo/bar".insteadOf "https://github.com/foo/bar"
|
|
||||||
git config --global url."https://${TOKEN}@github.com/foo/baz".insteadOf "https://github.com/foo/baz"
|
|
||||||
```
|
|
||||||
|
|
||||||
before any codeql actions. A similar thing can also be done with an SSH key or deploy key.
|
|
||||||
|
|
||||||
### C# using dotnet version 2 on linux
|
|
||||||
|
|
||||||
This currently requires invoking `dotnet` with the `/p:UseSharedCompilation=false` flag. For example:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
dotnet build /p:UseSharedCompilation=false
|
|
||||||
```
|
|
||||||
|
|
||||||
Version 3 does not require the additional flag.
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
This project is released under the [MIT License](LICENSE).
|
|
||||||
|
|||||||
32
action.yml
32
action.yml
@@ -1,32 +0,0 @@
|
|||||||
name: 'CodeQL'
|
|
||||||
description: 'CodeQL analysis'
|
|
||||||
author: 'GitHub'
|
|
||||||
inputs:
|
|
||||||
tools:
|
|
||||||
description: URL of CodeQL tools
|
|
||||||
required: false
|
|
||||||
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200427/codeql-bundle.tar.gz
|
|
||||||
languages:
|
|
||||||
description: The languages to be analysed
|
|
||||||
required: false
|
|
||||||
token:
|
|
||||||
default: ${{ github.token }}
|
|
||||||
config-file:
|
|
||||||
description: Path of the config file to use
|
|
||||||
required: false
|
|
||||||
check_name:
|
|
||||||
description: The name of the check run to add text to.
|
|
||||||
output:
|
|
||||||
description: The path of the directory in which to save the SARIF results
|
|
||||||
required: false
|
|
||||||
default: '../results'
|
|
||||||
upload:
|
|
||||||
description: Upload the SARIF file
|
|
||||||
required: false
|
|
||||||
default: true
|
|
||||||
matrix:
|
|
||||||
default: ${{ toJson(matrix) }}
|
|
||||||
runs:
|
|
||||||
using: 'node12'
|
|
||||||
pre: './lib/setup-tracer.js'
|
|
||||||
main: './lib/finalize-db.js'
|
|
||||||
@@ -4,6 +4,7 @@ author: 'GitHub'
|
|||||||
inputs:
|
inputs:
|
||||||
check_name:
|
check_name:
|
||||||
description: The name of the check run to add text to.
|
description: The name of the check run to add text to.
|
||||||
|
required: false
|
||||||
output:
|
output:
|
||||||
description: The path of the directory in which to save the SARIF results
|
description: The path of the directory in which to save the SARIF results
|
||||||
required: false
|
required: false
|
||||||
@@ -11,7 +12,18 @@ inputs:
|
|||||||
upload:
|
upload:
|
||||||
description: Upload the SARIF file
|
description: Upload the SARIF file
|
||||||
required: false
|
required: false
|
||||||
default: true
|
default: "true"
|
||||||
|
ram:
|
||||||
|
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
|
||||||
|
required: false
|
||||||
|
threads:
|
||||||
|
description: The number of threads to be used by CodeQL.
|
||||||
|
required: false
|
||||||
|
default: "1"
|
||||||
|
checkout_path:
|
||||||
|
description: "The path at which the analyzed repository was checked out. Used to relativeize any absolute paths in the uploaded SARIF file."
|
||||||
|
required: false
|
||||||
|
default: ${{ github.workspace }}
|
||||||
token:
|
token:
|
||||||
default: ${{ github.token }}
|
default: ${{ github.token }}
|
||||||
matrix:
|
matrix:
|
||||||
|
|||||||
@@ -5,15 +5,21 @@ inputs:
|
|||||||
tools:
|
tools:
|
||||||
description: URL of CodeQL tools
|
description: URL of CodeQL tools
|
||||||
required: false
|
required: false
|
||||||
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200427/codeql-bundle.tar.gz
|
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200630/codeql-bundle.tar.gz
|
||||||
languages:
|
languages:
|
||||||
description: The languages to be analysed
|
description: The languages to be analysed
|
||||||
required: false
|
required: false
|
||||||
token:
|
token:
|
||||||
default: ${{ github.token }}
|
default: ${{ github.token }}
|
||||||
|
matrix:
|
||||||
|
default: ${{ toJson(matrix) }}
|
||||||
config-file:
|
config-file:
|
||||||
description: Path of the config file to use
|
description: Path of the config file to use
|
||||||
required: false
|
required: false
|
||||||
|
setup-python-dependencies:
|
||||||
|
description: Try to auto-install your python dependencies
|
||||||
|
required: true
|
||||||
|
default: 'true'
|
||||||
runs:
|
runs:
|
||||||
using: 'node12'
|
using: 'node12'
|
||||||
main: '../lib/setup-tracer.js'
|
main: '../lib/setup-tracer.js'
|
||||||
|
|||||||
@@ -1,11 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
clearMocks: true,
|
|
||||||
moduleFileExtensions: ['js', 'ts'],
|
|
||||||
testEnvironment: 'node',
|
|
||||||
testMatch: ['**/*.test.ts'],
|
|
||||||
testRunner: 'jest-circus/runner',
|
|
||||||
transform: {
|
|
||||||
'^.+\\.ts$': 'ts-jest'
|
|
||||||
},
|
|
||||||
verbose: true
|
|
||||||
}
|
|
||||||
48
lib/analysis-paths.js
generated
48
lib/analysis-paths.js
generated
@@ -8,20 +8,52 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
function includeAndExcludeAnalysisPaths(config, languages) {
|
function isInterpretedLanguage(language) {
|
||||||
|
return language === 'javascript' || language === 'python';
|
||||||
|
}
|
||||||
|
// Matches a string containing only characters that are legal to include in paths on windows.
|
||||||
|
exports.legalWindowsPathCharactersRegex = /^[^<>:"\|?]*$/;
|
||||||
|
// Builds an environment variable suitable for LGTM_INDEX_INCLUDE or LGTM_INDEX_EXCLUDE
|
||||||
|
function buildIncludeExcludeEnvVar(paths) {
|
||||||
|
// Ignore anything containing a *
|
||||||
|
paths = paths.filter(p => p.indexOf('*') === -1);
|
||||||
|
// Some characters are illegal in path names in windows
|
||||||
|
if (process.platform === 'win32') {
|
||||||
|
paths = paths.filter(p => p.match(exports.legalWindowsPathCharactersRegex));
|
||||||
|
}
|
||||||
|
return paths.join('\n');
|
||||||
|
}
|
||||||
|
function includeAndExcludeAnalysisPaths(config) {
|
||||||
|
// The 'LGTM_INDEX_INCLUDE' and 'LGTM_INDEX_EXCLUDE' environment variables
|
||||||
|
// control which files/directories are traversed when scanning.
|
||||||
|
// This allows including files that otherwise would not be scanned, or
|
||||||
|
// excluding and not traversing entire file subtrees.
|
||||||
|
// It does not understand globs or double-globs because that would require it to
|
||||||
|
// traverse the entire file tree to determine which files are matched.
|
||||||
|
// Any paths containing "*" are not included in these.
|
||||||
if (config.paths.length !== 0) {
|
if (config.paths.length !== 0) {
|
||||||
core.exportVariable('LGTM_INDEX_INCLUDE', config.paths.join('\n'));
|
core.exportVariable('LGTM_INDEX_INCLUDE', buildIncludeExcludeEnvVar(config.paths));
|
||||||
}
|
}
|
||||||
if (config.pathsIgnore.length !== 0) {
|
if (config.pathsIgnore.length !== 0) {
|
||||||
core.exportVariable('LGTM_INDEX_EXCLUDE', config.pathsIgnore.join('\n'));
|
core.exportVariable('LGTM_INDEX_EXCLUDE', buildIncludeExcludeEnvVar(config.pathsIgnore));
|
||||||
}
|
}
|
||||||
function isInterpretedLanguage(language) {
|
// The 'LGTM_INDEX_FILTERS' environment variable controls which files are
|
||||||
return language === 'javascript' && language === 'python';
|
// extracted or ignored. It does not control which directories are traversed.
|
||||||
|
// This does understand the glob and double-glob syntax.
|
||||||
|
const filters = [];
|
||||||
|
filters.push(...config.paths.map(p => 'include:' + p));
|
||||||
|
filters.push(...config.pathsIgnore.map(p => 'exclude:' + p));
|
||||||
|
if (filters.length !== 0) {
|
||||||
|
core.exportVariable('LGTM_INDEX_FILTERS', filters.join('\n'));
|
||||||
}
|
}
|
||||||
// Index include/exclude only work in javascript and python
|
// Index include/exclude/filters only work in javascript and python.
|
||||||
// If some other language is detected/configured show a warning
|
// If any other languages are detected/configured then show a warning.
|
||||||
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) && !languages.every(isInterpretedLanguage)) {
|
if ((config.paths.length !== 0 ||
|
||||||
|
config.pathsIgnore.length !== 0 ||
|
||||||
|
filters.length !== 0) &&
|
||||||
|
!config.languages.every(isInterpretedLanguage)) {
|
||||||
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
|
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;
|
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;
|
||||||
|
//# sourceMappingURL=analysis-paths.js.map
|
||||||
1
lib/analysis-paths.js.map
Normal file
1
lib/analysis-paths.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEjD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACrE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACpF;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC;KAC1F;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACvD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC/D;IAED,oEAAoE;IACpE,sEAAsE;IACtE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;QACxB,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC;QAC/B,OAAO,CAAC,MAAM,KAAK,CAAC,CAAC;QACvB,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAClD,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC5G;AACH,CAAC;AAjCD,wEAiCC"}
|
||||||
41
lib/analysis-paths.test.js
generated
Normal file
41
lib/analysis-paths.test.js
generated
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||||
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
|
ava_1.default("emptyPaths", async (t) => {
|
||||||
|
const config = {
|
||||||
|
languages: [],
|
||||||
|
queries: {},
|
||||||
|
pathsIgnore: [],
|
||||||
|
paths: [],
|
||||||
|
};
|
||||||
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
|
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
|
||||||
|
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
|
||||||
|
t.is(process.env['LGTM_INDEX_FILTERS'], undefined);
|
||||||
|
});
|
||||||
|
ava_1.default("nonEmptyPaths", async (t) => {
|
||||||
|
const config = {
|
||||||
|
languages: [],
|
||||||
|
queries: {},
|
||||||
|
paths: ['path1', 'path2', '**/path3'],
|
||||||
|
pathsIgnore: ['path4', 'path5', 'path6/**'],
|
||||||
|
};
|
||||||
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
|
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
|
||||||
|
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path4\npath5');
|
||||||
|
t.is(process.env['LGTM_INDEX_FILTERS'], 'include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**');
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=analysis-paths.test.js.map
|
||||||
1
lib/analysis-paths.test.js.map
Normal file
1
lib/analysis-paths.test.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA2C;AAE3C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;KACV,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;KAC5C,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,gGAAgG,CAAC,CAAC;AAC5I,CAAC,CAAC,CAAC"}
|
||||||
22
lib/api-client.js
generated
Normal file
22
lib/api-client.js
generated
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
|
const github = __importStar(require("@actions/github"));
|
||||||
|
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||||
|
exports.getApiClient = function () {
|
||||||
|
return new github.GitHub(core.getInput('token'), {
|
||||||
|
userAgent: "CodeQL Action",
|
||||||
|
log: console_log_level_1.default({ level: "debug" })
|
||||||
|
});
|
||||||
|
};
|
||||||
|
//# sourceMappingURL=api-client.js.map
|
||||||
1
lib/api-client.js.map
Normal file
1
lib/api-client.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAEnC,QAAA,YAAY,GAAG;IAC1B,OAAO,IAAI,MAAM,CAAC,MAAM,CACtB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB;QACE,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACP,CAAC,CAAC"}
|
||||||
28
lib/autobuild.js
generated
28
lib/autobuild.js
generated
@@ -8,8 +8,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const exec = __importStar(require("@actions/exec"));
|
const codeql_1 = require("./codeql");
|
||||||
const path = __importStar(require("path"));
|
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
async function run() {
|
async function run() {
|
||||||
@@ -22,35 +21,30 @@ async function run() {
|
|||||||
// We want pick the dominant language in the repo from the ones we're able to build
|
// We want pick the dominant language in the repo from the ones we're able to build
|
||||||
// The languages are sorted in order specified by user or by lines of code if we got
|
// The languages are sorted in order specified by user or by lines of code if we got
|
||||||
// them from the GitHub API, so try to build the first language on the list.
|
// them from the GitHub API, so try to build the first language on the list.
|
||||||
const language = (_a = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]) === null || _a === void 0 ? void 0 : _a.split(',')[0];
|
const autobuildLanguages = ((_a = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]) === null || _a === void 0 ? void 0 : _a.split(',')) || [];
|
||||||
|
const language = autobuildLanguages[0];
|
||||||
if (!language) {
|
if (!language) {
|
||||||
core.info("None of the languages in this project require extra build steps");
|
core.info("None of the languages in this project require extra build steps");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
core.debug(`Detected dominant traced language: ${language}`);
|
core.debug(`Detected dominant traced language: ${language}`);
|
||||||
|
if (autobuildLanguages.length > 1) {
|
||||||
|
core.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages.slice(1).join(' and ')}, you must replace this block with custom build steps.`);
|
||||||
|
}
|
||||||
core.startGroup(`Attempting to automatically build ${language} code`);
|
core.startGroup(`Attempting to automatically build ${language} code`);
|
||||||
// TODO: share config accross actions better via env variables
|
const codeQL = codeql_1.getCodeQL();
|
||||||
const codeqlCmd = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_CMD);
|
await codeQL.runAutobuild(language);
|
||||||
const cmdName = process.platform === 'win32' ? 'autobuild.cmd' : 'autobuild.sh';
|
|
||||||
const autobuildCmd = path.join(path.dirname(codeqlCmd), language, 'tools', cmdName);
|
|
||||||
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
|
|
||||||
// This is because of an issue with Azure pipelines timing out connections after 4 minutes
|
|
||||||
// and Maven not properly handling closed connections
|
|
||||||
// Otherwise long build processes will timeout when pulling down Java packages
|
|
||||||
// https://developercommunity.visualstudio.com/content/problem/292284/maven-hosted-agent-connection-timeout.html
|
|
||||||
let javaToolOptions = process.env['JAVA_TOOL_OPTIONS'] || "";
|
|
||||||
process.env['JAVA_TOOL_OPTIONS'] = [...javaToolOptions.split(/\s+/), '-Dhttp.keepAlive=false', '-Dmaven.wagon.http.pool=false'].join(' ');
|
|
||||||
await exec.exec(autobuildCmd);
|
|
||||||
core.endGroup();
|
core.endGroup();
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
core.setFailed(error.message);
|
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
|
||||||
await util.reportActionFailed('autobuild', error.message, error.stack);
|
await util.reportActionFailed('autobuild', error.message, error.stack);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
await util.reportActionSucceeded('autobuild');
|
await util.reportActionSucceeded('autobuild');
|
||||||
}
|
}
|
||||||
run().catch(e => {
|
run().catch(e => {
|
||||||
core.setFailed("autobuild action failed: " + e);
|
core.setFailed("autobuild action failed. " + e);
|
||||||
console.log(e);
|
console.log(e);
|
||||||
});
|
});
|
||||||
|
//# sourceMappingURL=autobuild.js.map
|
||||||
1
lib/autobuild.js.map
Normal file
1
lib/autobuild.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,qCAAqC;AACrC,gEAAkD;AAClD,6CAA+B;AAE/B,KAAK,UAAU,GAAG;;IAChB,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,EAAE;YACzF,OAAO;SACR;QAED,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,OAAA,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,8BAA8B,CAAC,0CAAE,KAAK,CAAC,GAAG,MAAK,EAAE,CAAC;QACnG,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAEpC,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,MAAM,IAAI,CAAC,kBAAkB,CAAC,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QACvE,OAAO;KACR;IAED,MAAM,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,CAAC;AAChD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||||
226
lib/codeql.js
generated
Normal file
226
lib/codeql.js
generated
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
|
const exec = __importStar(require("@actions/exec"));
|
||||||
|
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const semver = __importStar(require("semver"));
|
||||||
|
const util = __importStar(require("./util"));
|
||||||
|
/**
|
||||||
|
* Stores the CodeQL object, and is populated by `setupCodeQL` or `getCodeQL`.
|
||||||
|
* Can be overridden in tests using `setCodeQL`.
|
||||||
|
*/
|
||||||
|
let cachedCodeQL = undefined;
|
||||||
|
/**
|
||||||
|
* Environment variable used to store the location of the CodeQL CLI executable.
|
||||||
|
* Value is set by setupCodeQL and read by getCodeQL.
|
||||||
|
*/
|
||||||
|
const CODEQL_ACTION_CMD = "CODEQL_ACTION_CMD";
|
||||||
|
async function setupCodeQL() {
|
||||||
|
try {
|
||||||
|
const codeqlURL = core.getInput('tools', { required: true });
|
||||||
|
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL);
|
||||||
|
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
|
||||||
|
if (codeqlFolder) {
|
||||||
|
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||||
|
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||||
|
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
|
||||||
|
}
|
||||||
|
let codeqlCmd = path.join(codeqlFolder, 'codeql', 'codeql');
|
||||||
|
if (process.platform === 'win32') {
|
||||||
|
codeqlCmd += ".exe";
|
||||||
|
}
|
||||||
|
else if (process.platform !== 'linux' && process.platform !== 'darwin') {
|
||||||
|
throw new Error("Unsupported plaform: " + process.platform);
|
||||||
|
}
|
||||||
|
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
|
||||||
|
core.exportVariable(CODEQL_ACTION_CMD, codeqlCmd);
|
||||||
|
return cachedCodeQL;
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
core.error(e);
|
||||||
|
throw new Error("Unable to download and extract CodeQL CLI");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.setupCodeQL = setupCodeQL;
|
||||||
|
function getCodeQLURLVersion(url) {
|
||||||
|
const match = url.match(/\/codeql-bundle-(.*)\//);
|
||||||
|
if (match === null || match.length < 2) {
|
||||||
|
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
|
||||||
|
}
|
||||||
|
let version = match[1];
|
||||||
|
if (!semver.valid(version)) {
|
||||||
|
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||||
|
version = '0.0.0-' + version;
|
||||||
|
}
|
||||||
|
const s = semver.clean(version);
|
||||||
|
if (!s) {
|
||||||
|
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
exports.getCodeQLURLVersion = getCodeQLURLVersion;
|
||||||
|
function getCodeQL() {
|
||||||
|
if (cachedCodeQL === undefined) {
|
||||||
|
const codeqlCmd = util.getRequiredEnvParam(CODEQL_ACTION_CMD);
|
||||||
|
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
|
||||||
|
}
|
||||||
|
return cachedCodeQL;
|
||||||
|
}
|
||||||
|
exports.getCodeQL = getCodeQL;
|
||||||
|
function resolveFunction(partialCodeql, methodName) {
|
||||||
|
if (typeof partialCodeql[methodName] !== 'function') {
|
||||||
|
const dummyMethod = () => {
|
||||||
|
throw new Error('CodeQL ' + methodName + ' method not correctly defined');
|
||||||
|
};
|
||||||
|
return dummyMethod;
|
||||||
|
}
|
||||||
|
return partialCodeql[methodName];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Set the functionality for CodeQL methods. Only for use in tests.
|
||||||
|
*
|
||||||
|
* Accepts a partial object and any undefined methods will be implemented
|
||||||
|
* to immediately throw an exception indicating which method is missing.
|
||||||
|
*/
|
||||||
|
function setCodeQL(partialCodeql) {
|
||||||
|
cachedCodeQL = {
|
||||||
|
getDir: resolveFunction(partialCodeql, 'getDir'),
|
||||||
|
printVersion: resolveFunction(partialCodeql, 'printVersion'),
|
||||||
|
getTracerEnv: resolveFunction(partialCodeql, 'getTracerEnv'),
|
||||||
|
databaseInit: resolveFunction(partialCodeql, 'databaseInit'),
|
||||||
|
runAutobuild: resolveFunction(partialCodeql, 'runAutobuild'),
|
||||||
|
extractScannedLanguage: resolveFunction(partialCodeql, 'extractScannedLanguage'),
|
||||||
|
finalizeDatabase: resolveFunction(partialCodeql, 'finalizeDatabase'),
|
||||||
|
resolveQueries: resolveFunction(partialCodeql, 'resolveQueries'),
|
||||||
|
databaseAnalyze: resolveFunction(partialCodeql, 'databaseAnalyze')
|
||||||
|
};
|
||||||
|
}
|
||||||
|
exports.setCodeQL = setCodeQL;
|
||||||
|
function getCodeQLForCmd(cmd) {
|
||||||
|
return {
|
||||||
|
getDir: function () {
|
||||||
|
return path.dirname(cmd);
|
||||||
|
},
|
||||||
|
printVersion: async function () {
|
||||||
|
await exec.exec(cmd, [
|
||||||
|
'version',
|
||||||
|
'--format=json'
|
||||||
|
]);
|
||||||
|
},
|
||||||
|
getTracerEnv: async function (databasePath, compilerSpec) {
|
||||||
|
let envFile = path.resolve(databasePath, 'working', 'env.tmp');
|
||||||
|
const compilerSpecArg = compilerSpec ? ["--compiler-spec=" + compilerSpec] : [];
|
||||||
|
await exec.exec(cmd, [
|
||||||
|
'database',
|
||||||
|
'trace-command',
|
||||||
|
databasePath,
|
||||||
|
...compilerSpecArg,
|
||||||
|
process.execPath,
|
||||||
|
path.resolve(__dirname, 'tracer-env.js'),
|
||||||
|
envFile
|
||||||
|
]);
|
||||||
|
return JSON.parse(fs.readFileSync(envFile, 'utf-8'));
|
||||||
|
},
|
||||||
|
databaseInit: async function (databasePath, language, sourceRoot) {
|
||||||
|
await exec.exec(cmd, [
|
||||||
|
'database',
|
||||||
|
'init',
|
||||||
|
databasePath,
|
||||||
|
'--language=' + language,
|
||||||
|
'--source-root=' + sourceRoot,
|
||||||
|
]);
|
||||||
|
},
|
||||||
|
runAutobuild: async function (language) {
|
||||||
|
const cmdName = process.platform === 'win32' ? 'autobuild.cmd' : 'autobuild.sh';
|
||||||
|
const autobuildCmd = path.join(path.dirname(cmd), language, 'tools', cmdName);
|
||||||
|
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
|
||||||
|
// This is because of an issue with Azure pipelines timing out connections after 4 minutes
|
||||||
|
// and Maven not properly handling closed connections
|
||||||
|
// Otherwise long build processes will timeout when pulling down Java packages
|
||||||
|
// https://developercommunity.visualstudio.com/content/problem/292284/maven-hosted-agent-connection-timeout.html
|
||||||
|
let javaToolOptions = process.env['JAVA_TOOL_OPTIONS'] || "";
|
||||||
|
process.env['JAVA_TOOL_OPTIONS'] = [...javaToolOptions.split(/\s+/), '-Dhttp.keepAlive=false', '-Dmaven.wagon.http.pool=false'].join(' ');
|
||||||
|
await exec.exec(autobuildCmd);
|
||||||
|
},
|
||||||
|
extractScannedLanguage: async function (databasePath, language) {
|
||||||
|
// Get extractor location
|
||||||
|
let extractorPath = '';
|
||||||
|
await exec.exec(cmd, [
|
||||||
|
'resolve',
|
||||||
|
'extractor',
|
||||||
|
'--format=json',
|
||||||
|
'--language=' + language
|
||||||
|
], {
|
||||||
|
silent: true,
|
||||||
|
listeners: {
|
||||||
|
stdout: (data) => { extractorPath += data.toString(); },
|
||||||
|
stderr: (data) => { process.stderr.write(data); }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Set trace command
|
||||||
|
const ext = process.platform === 'win32' ? '.cmd' : '.sh';
|
||||||
|
const traceCommand = path.resolve(JSON.parse(extractorPath), 'tools', 'autobuild' + ext);
|
||||||
|
// Run trace command
|
||||||
|
await exec.exec(cmd, [
|
||||||
|
'database',
|
||||||
|
'trace-command',
|
||||||
|
databasePath,
|
||||||
|
'--',
|
||||||
|
traceCommand
|
||||||
|
]);
|
||||||
|
},
|
||||||
|
finalizeDatabase: async function (databasePath) {
|
||||||
|
await exec.exec(cmd, [
|
||||||
|
'database',
|
||||||
|
'finalize',
|
||||||
|
databasePath
|
||||||
|
]);
|
||||||
|
},
|
||||||
|
resolveQueries: async function (queries, extraSearchPath) {
|
||||||
|
const codeqlArgs = [
|
||||||
|
'resolve',
|
||||||
|
'queries',
|
||||||
|
...queries,
|
||||||
|
'--format=bylanguage'
|
||||||
|
];
|
||||||
|
if (extraSearchPath !== undefined) {
|
||||||
|
codeqlArgs.push('--search-path', extraSearchPath);
|
||||||
|
}
|
||||||
|
let output = '';
|
||||||
|
await exec.exec(cmd, codeqlArgs, {
|
||||||
|
listeners: {
|
||||||
|
stdout: (data) => {
|
||||||
|
output += data.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return JSON.parse(output);
|
||||||
|
},
|
||||||
|
databaseAnalyze: async function (databasePath, sarifFile, querySuite) {
|
||||||
|
await exec.exec(cmd, [
|
||||||
|
'database',
|
||||||
|
'analyze',
|
||||||
|
util.getMemoryFlag(),
|
||||||
|
util.getThreadsFlag(),
|
||||||
|
databasePath,
|
||||||
|
'--format=sarif-latest',
|
||||||
|
'--output=' + sarifFile,
|
||||||
|
'--no-sarif-add-snippets',
|
||||||
|
querySuite
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=codeql.js.map
|
||||||
1
lib/codeql.js.map
Normal file
1
lib/codeql.js.map
Normal file
File diff suppressed because one or more lines are too long
60
lib/codeql.test.js
generated
Normal file
60
lib/codeql.test.js
generated
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const nock_1 = __importDefault(require("nock"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const codeql = __importStar(require("./codeql"));
|
||||||
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
const util = __importStar(require("./util"));
|
||||||
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
|
ava_1.default('download codeql bundle cache', async (t) => {
|
||||||
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
|
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
|
||||||
|
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
|
||||||
|
const versions = ['20200601', '20200610'];
|
||||||
|
for (let i = 0; i < versions.length; i++) {
|
||||||
|
const version = versions[i];
|
||||||
|
nock_1.default('https://example.com')
|
||||||
|
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||||
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
|
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||||
|
await codeql.setupCodeQL();
|
||||||
|
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
|
||||||
|
}
|
||||||
|
const cachedVersions = toolcache.findAllVersions('CodeQL');
|
||||||
|
t.is(cachedVersions.length, 2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default('parse codeql bundle url version', t => {
|
||||||
|
const tests = {
|
||||||
|
'20200601': '0.0.0-20200601',
|
||||||
|
'20200601.0': '0.0.0-20200601.0',
|
||||||
|
'20200601.0.0': '20200601.0.0',
|
||||||
|
'1.2.3': '1.2.3',
|
||||||
|
'1.2.3-alpha': '1.2.3-alpha',
|
||||||
|
'1.2.3-beta.1': '1.2.3-beta.1',
|
||||||
|
};
|
||||||
|
for (const [version, expectedVersion] of Object.entries(tests)) {
|
||||||
|
const url = `https://github.com/.../codeql-bundle-${version}/...`;
|
||||||
|
try {
|
||||||
|
const parsedVersion = codeql.getCodeQLURLVersion(url);
|
||||||
|
t.deepEqual(parsedVersion, expectedVersion);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
t.fail(e.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=codeql.test.js.map
|
||||||
1
lib/codeql.test.js.map
Normal file
1
lib/codeql.test.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"codeql.test.js","sourceRoot":"","sources":["../src/codeql.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,iDAAmC;AACnC,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,MAAM,CAAC,WAAW,EAAE,CAAC;YAE3B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,MAAM,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YACtD,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC"}
|
||||||
614
lib/config-utils.js
generated
614
lib/config-utils.js
generated
@@ -12,124 +12,536 @@ const io = __importStar(require("@actions/io"));
|
|||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
const yaml = __importStar(require("js-yaml"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
class ExternalQuery {
|
const api = __importStar(require("./api-client"));
|
||||||
constructor(repository, ref) {
|
const codeql_1 = require("./codeql");
|
||||||
this.path = '';
|
const externalQueries = __importStar(require("./external-queries"));
|
||||||
this.repository = repository;
|
const util = __importStar(require("./util"));
|
||||||
this.ref = ref;
|
// Property names from the user-supplied config file.
|
||||||
|
const NAME_PROPERTY = 'name';
|
||||||
|
const DISABLE_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
|
||||||
|
const QUERIES_PROPERTY = 'queries';
|
||||||
|
const QUERIES_USES_PROPERTY = 'uses';
|
||||||
|
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
|
||||||
|
const PATHS_PROPERTY = 'paths';
|
||||||
|
/**
|
||||||
|
* A list of queries from https://github.com/github/codeql that
|
||||||
|
* we don't want to run. Disabling them here is a quicker alternative to
|
||||||
|
* disabling them in the code scanning query suites. Queries should also
|
||||||
|
* be disabled in the suites, and removed from this list here once the
|
||||||
|
* bundle is updated to make those suite changes live.
|
||||||
|
*
|
||||||
|
* Format is a map from language to an array of path suffixes of .ql files.
|
||||||
|
*/
|
||||||
|
const DISABLED_BUILTIN_QUERIES = {
|
||||||
|
'csharp': [
|
||||||
|
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
|
||||||
|
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
|
||||||
|
]
|
||||||
|
};
|
||||||
|
function queryIsDisabled(language, query) {
|
||||||
|
return (DISABLED_BUILTIN_QUERIES[language] || [])
|
||||||
|
.some(disabledQuery => query.endsWith(disabledQuery));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Asserts that the noDeclaredLanguage and multipleDeclaredLanguages fields are
|
||||||
|
* both empty and errors if they are not.
|
||||||
|
*/
|
||||||
|
function validateQueries(resolvedQueries) {
|
||||||
|
const noDeclaredLanguage = resolvedQueries.noDeclaredLanguage;
|
||||||
|
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
||||||
|
if (noDeclaredLanguageQueries.length !== 0) {
|
||||||
|
throw new Error('The following queries do not declare a language. ' +
|
||||||
|
'Their qlpack.yml files are either missing or is invalid.\n' +
|
||||||
|
noDeclaredLanguageQueries.join('\n'));
|
||||||
|
}
|
||||||
|
const multipleDeclaredLanguages = resolvedQueries.multipleDeclaredLanguages;
|
||||||
|
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
|
||||||
|
if (multipleDeclaredLanguagesQueries.length !== 0) {
|
||||||
|
throw new Error('The following queries declare multiple languages. ' +
|
||||||
|
'Their qlpack.yml files are either missing or is invalid.\n' +
|
||||||
|
multipleDeclaredLanguagesQueries.join('\n'));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.ExternalQuery = ExternalQuery;
|
/**
|
||||||
class Config {
|
* Run 'codeql resolve queries' and add the results to resultMap
|
||||||
constructor() {
|
*/
|
||||||
this.name = "";
|
async function runResolveQueries(resultMap, toResolve, extraSearchPath, errorOnInvalidQueries) {
|
||||||
this.disableDefaultQueries = false;
|
const codeQl = codeql_1.getCodeQL();
|
||||||
this.additionalQueries = [];
|
const resolvedQueries = await codeQl.resolveQueries(toResolve, extraSearchPath);
|
||||||
this.externalQueries = [];
|
for (const [language, queries] of Object.entries(resolvedQueries.byLanguage)) {
|
||||||
this.pathsIgnore = [];
|
if (resultMap[language] === undefined) {
|
||||||
this.paths = [];
|
resultMap[language] = [];
|
||||||
|
}
|
||||||
|
resultMap[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
|
||||||
}
|
}
|
||||||
addQuery(queryUses) {
|
if (errorOnInvalidQueries) {
|
||||||
// The logic for parsing the string is based on what actions does for
|
validateQueries(resolvedQueries);
|
||||||
// parsing the 'uses' actions in the workflow file
|
|
||||||
if (queryUses === "") {
|
|
||||||
throw '"uses" value for queries cannot be blank';
|
|
||||||
}
|
|
||||||
if (queryUses.startsWith("./")) {
|
|
||||||
this.additionalQueries.push(queryUses.slice(2));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let tok = queryUses.split('@');
|
|
||||||
if (tok.length !== 2) {
|
|
||||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
|
||||||
}
|
|
||||||
const ref = tok[1];
|
|
||||||
tok = tok[0].split('/');
|
|
||||||
// The first token is the owner
|
|
||||||
// The second token is the repo
|
|
||||||
// The rest is a path, if there is more than one token combine them to form the full path
|
|
||||||
if (tok.length > 3) {
|
|
||||||
tok = [tok[0], tok[1], tok.slice(2).join('/')];
|
|
||||||
}
|
|
||||||
if (tok.length < 2) {
|
|
||||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
|
||||||
}
|
|
||||||
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
|
|
||||||
if (tok.length === 3) {
|
|
||||||
external.path = tok[2];
|
|
||||||
}
|
|
||||||
this.externalQueries.push(external);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.Config = Config;
|
/**
|
||||||
const configFolder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
* Get the set of queries included by default.
|
||||||
function initConfig() {
|
*/
|
||||||
|
async function addDefaultQueries(languages, resultMap) {
|
||||||
|
const suites = languages.map(l => l + '-code-scanning.qls');
|
||||||
|
await runResolveQueries(resultMap, suites, undefined, false);
|
||||||
|
}
|
||||||
|
// The set of acceptable values for built-in suites from the codeql bundle
|
||||||
|
const builtinSuites = ['security-extended', 'security-and-quality'];
|
||||||
|
/**
|
||||||
|
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
||||||
|
* Throws an error if suiteName is not a valid builtin suite.
|
||||||
|
*/
|
||||||
|
async function addBuiltinSuiteQueries(configFile, languages, resultMap, suiteName) {
|
||||||
|
const suite = builtinSuites.find((suite) => suite === suiteName);
|
||||||
|
if (!suite) {
|
||||||
|
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
||||||
|
}
|
||||||
|
const suites = languages.map(l => l + '-' + suiteName + '.qls');
|
||||||
|
await runResolveQueries(resultMap, suites, undefined, false);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Retrieve the set of queries at localQueryPath and add them to resultMap.
|
||||||
|
*/
|
||||||
|
async function addLocalQueries(configFile, resultMap, localQueryPath) {
|
||||||
|
// Resolve the local path against the workspace so that when this is
|
||||||
|
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
||||||
|
const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
|
||||||
|
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||||
|
// Check the file exists
|
||||||
|
if (!fs.existsSync(absoluteQueryPath)) {
|
||||||
|
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||||
|
}
|
||||||
|
// Call this after checking file exists, because it'll fail if file doesn't exist
|
||||||
|
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
|
||||||
|
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||||
|
if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
|
||||||
|
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||||
|
}
|
||||||
|
// Get the root of the current repo to use when resolving query dependencies
|
||||||
|
const rootOfRepo = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||||
|
await runResolveQueries(resultMap, [absoluteQueryPath], rootOfRepo, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
|
||||||
|
*/
|
||||||
|
async function addRemoteQueries(configFile, resultMap, queryUses) {
|
||||||
|
let tok = queryUses.split('@');
|
||||||
|
if (tok.length !== 2) {
|
||||||
|
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||||
|
}
|
||||||
|
const ref = tok[1];
|
||||||
|
tok = tok[0].split('/');
|
||||||
|
// The first token is the owner
|
||||||
|
// The second token is the repo
|
||||||
|
// The rest is a path, if there is more than one token combine them to form the full path
|
||||||
|
if (tok.length < 2) {
|
||||||
|
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||||
|
}
|
||||||
|
// Check none of the parts of the repository name are empty
|
||||||
|
if (tok[0].trim() === '' || tok[1].trim() === '') {
|
||||||
|
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||||
|
}
|
||||||
|
const nwo = tok[0] + '/' + tok[1];
|
||||||
|
// Checkout the external repository
|
||||||
|
const rootOfRepo = await externalQueries.checkoutExternalRepository(nwo, ref);
|
||||||
|
const queryPath = tok.length > 2
|
||||||
|
? path.join(rootOfRepo, tok.slice(2).join('/'))
|
||||||
|
: rootOfRepo;
|
||||||
|
await runResolveQueries(resultMap, [queryPath], rootOfRepo, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Parse a query 'uses' field to a discrete set of query files and update resultMap.
|
||||||
|
*
|
||||||
|
* The logic for parsing the string is based on what actions does for
|
||||||
|
* parsing the 'uses' actions in the workflow file. So it can handle
|
||||||
|
* local paths starting with './', or references to remote repos, or
|
||||||
|
* a finite set of hardcoded terms for builtin suites.
|
||||||
|
*/
|
||||||
|
async function parseQueryUses(configFile, languages, resultMap, queryUses) {
|
||||||
|
queryUses = queryUses.trim();
|
||||||
|
if (queryUses === "") {
|
||||||
|
throw new Error(getQueryUsesInvalid(configFile));
|
||||||
|
}
|
||||||
|
// Check for the local path case before we start trying to parse the repository name
|
||||||
|
if (queryUses.startsWith("./")) {
|
||||||
|
await addLocalQueries(configFile, resultMap, queryUses.slice(2));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Check for one of the builtin suites
|
||||||
|
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
|
||||||
|
await addBuiltinSuiteQueries(configFile, languages, resultMap, queryUses);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Otherwise, must be a reference to another repo
|
||||||
|
await addRemoteQueries(configFile, resultMap, queryUses);
|
||||||
|
}
|
||||||
|
// Regex validating stars in paths or paths-ignore entries.
|
||||||
|
// The intention is to only allow ** to appear when immediately
|
||||||
|
// preceded and followed by a slash.
|
||||||
|
const pathStarsRegex = /.*(?:\*\*[^/].*|\*\*$|[^/]\*\*.*)/;
|
||||||
|
// Characters that are supported by filters in workflows, but not by us.
|
||||||
|
// See https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
|
||||||
|
const filterPatternCharactersRegex = /.*[\?\+\[\]!].*/;
|
||||||
|
// Checks that a paths of paths-ignore entry is valid, possibly modifying it
|
||||||
|
// to make it valid, or if not possible then throws an error.
|
||||||
|
function validateAndSanitisePath(originalPath, propertyName, configFile) {
|
||||||
|
// Take a copy so we don't modify the original path, so we can still construct error messages
|
||||||
|
let path = originalPath;
|
||||||
|
// All paths are relative to the src root, so strip off leading slashes.
|
||||||
|
while (path.charAt(0) === '/') {
|
||||||
|
path = path.substring(1);
|
||||||
|
}
|
||||||
|
// Trailing ** are redundant, so strip them off
|
||||||
|
if (path.endsWith('/**')) {
|
||||||
|
path = path.substring(0, path.length - 2);
|
||||||
|
}
|
||||||
|
// An empty path is not allowed as it's meaningless
|
||||||
|
if (path === '') {
|
||||||
|
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" is not an invalid path. ' +
|
||||||
|
'It is not necessary to include it, and it is not allowed to exclude it.'));
|
||||||
|
}
|
||||||
|
// Check for illegal uses of **
|
||||||
|
if (path.match(pathStarsRegex)) {
|
||||||
|
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an invalid "**" wildcard. ' +
|
||||||
|
'They must be immediately preceeded and followed by a slash as in "/**/", or come at the start or end.'));
|
||||||
|
}
|
||||||
|
// Check for other regex characters that we don't support.
|
||||||
|
// Output a warning so the user knows, but otherwise continue normally.
|
||||||
|
if (path.match(filterPatternCharactersRegex)) {
|
||||||
|
core.warning(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an unsupported character. ' +
|
||||||
|
'The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.'));
|
||||||
|
}
|
||||||
|
// Ban any uses of backslash for now.
|
||||||
|
// This may not play nicely with project layouts.
|
||||||
|
// This restriction can be lifted later if we determine they are ok.
|
||||||
|
if (path.indexOf('\\') !== -1) {
|
||||||
|
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an "\\" character. These are not allowed in filters. ' +
|
||||||
|
'If running on windows we recommend using "/" instead for path filters.'));
|
||||||
|
}
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
exports.validateAndSanitisePath = validateAndSanitisePath;
|
||||||
|
function getNameInvalid(configFile) {
|
||||||
|
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
|
||||||
|
}
|
||||||
|
exports.getNameInvalid = getNameInvalid;
|
||||||
|
function getDisableDefaultQueriesInvalid(configFile) {
|
||||||
|
return getConfigFilePropertyError(configFile, DISABLE_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
|
||||||
|
}
|
||||||
|
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
|
||||||
|
function getQueriesInvalid(configFile) {
|
||||||
|
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
|
||||||
|
}
|
||||||
|
exports.getQueriesInvalid = getQueriesInvalid;
|
||||||
|
function getQueryUsesInvalid(configFile, queryUses) {
|
||||||
|
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'must be a built-in suite (' + builtinSuites.join(' or ') +
|
||||||
|
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
|
||||||
|
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
|
||||||
|
}
|
||||||
|
exports.getQueryUsesInvalid = getQueryUsesInvalid;
|
||||||
|
function getPathsIgnoreInvalid(configFile) {
|
||||||
|
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
|
||||||
|
}
|
||||||
|
exports.getPathsIgnoreInvalid = getPathsIgnoreInvalid;
|
||||||
|
function getPathsInvalid(configFile) {
|
||||||
|
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
|
||||||
|
}
|
||||||
|
exports.getPathsInvalid = getPathsInvalid;
|
||||||
|
function getLocalPathOutsideOfRepository(configFile, localPath) {
|
||||||
|
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" is outside of the repository');
|
||||||
|
}
|
||||||
|
exports.getLocalPathOutsideOfRepository = getLocalPathOutsideOfRepository;
|
||||||
|
function getLocalPathDoesNotExist(configFile, localPath) {
|
||||||
|
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" does not exist in the repository');
|
||||||
|
}
|
||||||
|
exports.getLocalPathDoesNotExist = getLocalPathDoesNotExist;
|
||||||
|
function getConfigFileOutsideWorkspaceErrorMessage(configFile) {
|
||||||
|
return 'The configuration file "' + configFile + '" is outside of the workspace';
|
||||||
|
}
|
||||||
|
exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage;
|
||||||
|
function getConfigFileDoesNotExistErrorMessage(configFile) {
|
||||||
|
return 'The configuration file "' + configFile + '" does not exist';
|
||||||
|
}
|
||||||
|
exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage;
|
||||||
|
function getConfigFileRepoFormatInvalidMessage(configFile) {
|
||||||
|
let error = 'The configuration file "' + configFile + '" is not a supported remote file reference.';
|
||||||
|
error += ' Expected format <owner>/<repository>/<file-path>@<ref>';
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
exports.getConfigFileRepoFormatInvalidMessage = getConfigFileRepoFormatInvalidMessage;
|
||||||
|
function getConfigFileFormatInvalidMessage(configFile) {
|
||||||
|
return 'The configuration file "' + configFile + '" could not be read';
|
||||||
|
}
|
||||||
|
exports.getConfigFileFormatInvalidMessage = getConfigFileFormatInvalidMessage;
|
||||||
|
function getConfigFileDirectoryGivenMessage(configFile) {
|
||||||
|
return 'The configuration file "' + configFile + '" looks like a directory, not a file';
|
||||||
|
}
|
||||||
|
exports.getConfigFileDirectoryGivenMessage = getConfigFileDirectoryGivenMessage;
|
||||||
|
function getConfigFilePropertyError(configFile, property, error) {
|
||||||
|
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets the set of languages in the current repository
|
||||||
|
*/
|
||||||
|
async function getLanguagesInRepo() {
|
||||||
|
var _a;
|
||||||
|
// Translate between GitHub's API names for languages and ours
|
||||||
|
const codeqlLanguages = {
|
||||||
|
'C': 'cpp',
|
||||||
|
'C++': 'cpp',
|
||||||
|
'C#': 'csharp',
|
||||||
|
'Go': 'go',
|
||||||
|
'Java': 'java',
|
||||||
|
'JavaScript': 'javascript',
|
||||||
|
'TypeScript': 'javascript',
|
||||||
|
'Python': 'python',
|
||||||
|
};
|
||||||
|
let repo_nwo = (_a = process.env['GITHUB_REPOSITORY']) === null || _a === void 0 ? void 0 : _a.split("/");
|
||||||
|
if (repo_nwo) {
|
||||||
|
let owner = repo_nwo[0];
|
||||||
|
let repo = repo_nwo[1];
|
||||||
|
core.debug(`GitHub repo ${owner} ${repo}`);
|
||||||
|
const response = await api.getApiClient().request("GET /repos/:owner/:repo/languages", ({
|
||||||
|
owner,
|
||||||
|
repo
|
||||||
|
}));
|
||||||
|
core.debug("Languages API response: " + JSON.stringify(response));
|
||||||
|
// The GitHub API is going to return languages in order of popularity,
|
||||||
|
// When we pick a language to autobuild we want to pick the most popular traced language
|
||||||
|
// Since sets in javascript maintain insertion order, using a set here and then splatting it
|
||||||
|
// into an array gives us an array of languages ordered by popularity
|
||||||
|
let languages = new Set();
|
||||||
|
for (let lang in response.data) {
|
||||||
|
if (lang in codeqlLanguages) {
|
||||||
|
languages.add(codeqlLanguages[lang]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return [...languages];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Get the languages to analyse.
|
||||||
|
*
|
||||||
|
* The result is obtained from the action input parameter 'languages' if that
|
||||||
|
* has been set, otherwise it is deduced as all languages in the repo that
|
||||||
|
* can be analysed.
|
||||||
|
*/
|
||||||
|
async function getLanguages() {
|
||||||
|
// Obtain from action input 'languages' if set
|
||||||
|
let languages = core.getInput('languages', { required: false })
|
||||||
|
.split(',')
|
||||||
|
.map(x => x.trim())
|
||||||
|
.filter(x => x.length > 0);
|
||||||
|
core.info("Languages from configuration: " + JSON.stringify(languages));
|
||||||
|
if (languages.length === 0) {
|
||||||
|
// Obtain languages as all languages in the repo that can be analysed
|
||||||
|
languages = await getLanguagesInRepo();
|
||||||
|
core.info("Automatically detected languages: " + JSON.stringify(languages));
|
||||||
|
}
|
||||||
|
return languages;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Get the default config for when the user has not supplied one.
|
||||||
|
*/
|
||||||
|
async function getDefaultConfig() {
|
||||||
|
const languages = await getLanguages();
|
||||||
|
const queries = {};
|
||||||
|
await addDefaultQueries(languages, queries);
|
||||||
|
return {
|
||||||
|
languages: languages,
|
||||||
|
queries: queries,
|
||||||
|
pathsIgnore: [],
|
||||||
|
paths: []
|
||||||
|
};
|
||||||
|
}
|
||||||
|
exports.getDefaultConfig = getDefaultConfig;
|
||||||
|
/**
|
||||||
|
* Load the config from the given file.
|
||||||
|
*/
|
||||||
|
async function loadConfig(configFile) {
|
||||||
|
let parsedYAML;
|
||||||
|
if (isLocal(configFile)) {
|
||||||
|
// Treat the config file as relative to the workspace
|
||||||
|
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||||
|
configFile = path.resolve(workspacePath, configFile);
|
||||||
|
parsedYAML = getLocalConfig(configFile, workspacePath);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
parsedYAML = await getRemoteConfig(configFile);
|
||||||
|
}
|
||||||
|
// Validate that the 'name' property is syntactically correct,
|
||||||
|
// even though we don't use the value yet.
|
||||||
|
if (NAME_PROPERTY in parsedYAML) {
|
||||||
|
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
|
||||||
|
throw new Error(getNameInvalid(configFile));
|
||||||
|
}
|
||||||
|
if (parsedYAML[NAME_PROPERTY].length === 0) {
|
||||||
|
throw new Error(getNameInvalid(configFile));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const languages = await getLanguages();
|
||||||
|
// If the languages parameter was not given and no languages were
|
||||||
|
// detected then fail here as this is a workflow configuration error.
|
||||||
|
if (languages.length === 0) {
|
||||||
|
throw new Error("Did not detect any languages to analyze. Please update input in workflow.");
|
||||||
|
}
|
||||||
|
const queries = {};
|
||||||
|
const pathsIgnore = [];
|
||||||
|
const paths = [];
|
||||||
|
let disableDefaultQueries = false;
|
||||||
|
if (DISABLE_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
|
||||||
|
if (typeof parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
||||||
|
throw new Error(getDisableDefaultQueriesInvalid(configFile));
|
||||||
|
}
|
||||||
|
disableDefaultQueries = parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY];
|
||||||
|
}
|
||||||
|
if (!disableDefaultQueries) {
|
||||||
|
await addDefaultQueries(languages, queries);
|
||||||
|
}
|
||||||
|
if (QUERIES_PROPERTY in parsedYAML) {
|
||||||
|
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
||||||
|
throw new Error(getQueriesInvalid(configFile));
|
||||||
|
}
|
||||||
|
for (const query of parsedYAML[QUERIES_PROPERTY]) {
|
||||||
|
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||||
|
throw new Error(getQueryUsesInvalid(configFile));
|
||||||
|
}
|
||||||
|
await parseQueryUses(configFile, languages, queries, query[QUERIES_USES_PROPERTY]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||||
|
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
|
||||||
|
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||||
|
}
|
||||||
|
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
|
||||||
|
if (typeof path !== "string" || path === '') {
|
||||||
|
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||||
|
}
|
||||||
|
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (PATHS_PROPERTY in parsedYAML) {
|
||||||
|
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
|
||||||
|
throw new Error(getPathsInvalid(configFile));
|
||||||
|
}
|
||||||
|
parsedYAML[PATHS_PROPERTY].forEach(path => {
|
||||||
|
if (typeof path !== "string" || path === '') {
|
||||||
|
throw new Error(getPathsInvalid(configFile));
|
||||||
|
}
|
||||||
|
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return { languages, queries, pathsIgnore, paths };
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Load and return the config.
|
||||||
|
*
|
||||||
|
* This will parse the config from the user input if present, or generate
|
||||||
|
* a default config. The parsed config is then stored to a known location.
|
||||||
|
*/
|
||||||
|
async function initConfig() {
|
||||||
const configFile = core.getInput('config-file');
|
const configFile = core.getInput('config-file');
|
||||||
const config = new Config();
|
let config;
|
||||||
// If no config file was provided create an empty one
|
// If no config file was provided create an empty one
|
||||||
if (configFile === '') {
|
if (configFile === '') {
|
||||||
core.debug('No configuration file was provided');
|
core.debug('No configuration file was provided');
|
||||||
return config;
|
config = await getDefaultConfig();
|
||||||
}
|
}
|
||||||
try {
|
else {
|
||||||
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
config = await loadConfig(configFile);
|
||||||
if (parsedYAML.name && typeof parsedYAML.name === "string") {
|
|
||||||
config.name = parsedYAML.name;
|
|
||||||
}
|
|
||||||
if (parsedYAML['disable-default-queries'] && typeof parsedYAML['disable-default-queries'] === "boolean") {
|
|
||||||
config.disableDefaultQueries = parsedYAML['disable-default-queries'];
|
|
||||||
}
|
|
||||||
const queries = parsedYAML.queries;
|
|
||||||
if (queries && queries instanceof Array) {
|
|
||||||
queries.forEach(query => {
|
|
||||||
if (query.uses && typeof query.uses === "string") {
|
|
||||||
config.addQuery(query.uses);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const pathsIgnore = parsedYAML['paths-ignore'];
|
|
||||||
if (pathsIgnore && pathsIgnore instanceof Array) {
|
|
||||||
pathsIgnore.forEach(path => {
|
|
||||||
if (typeof path === "string") {
|
|
||||||
config.pathsIgnore.push(path);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const paths = parsedYAML.paths;
|
|
||||||
if (paths && paths instanceof Array) {
|
|
||||||
paths.forEach(path => {
|
|
||||||
if (typeof path === "string") {
|
|
||||||
config.paths.push(path);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
core.setFailed(err);
|
|
||||||
}
|
}
|
||||||
|
// Save the config so we can easily access it again in the future
|
||||||
|
await saveConfig(config);
|
||||||
return config;
|
return config;
|
||||||
}
|
}
|
||||||
|
exports.initConfig = initConfig;
|
||||||
|
function isLocal(configPath) {
|
||||||
|
// If the path starts with ./, look locally
|
||||||
|
if (configPath.indexOf("./") === 0) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return (configPath.indexOf("@") === -1);
|
||||||
|
}
|
||||||
|
function getLocalConfig(configFile, workspacePath) {
|
||||||
|
// Error if the config file is now outside of the workspace
|
||||||
|
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||||
|
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||||
|
}
|
||||||
|
// Error if the file does not exist
|
||||||
|
if (!fs.existsSync(configFile)) {
|
||||||
|
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
||||||
|
}
|
||||||
|
return yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||||
|
}
|
||||||
|
async function getRemoteConfig(configFile) {
|
||||||
|
// retrieve the various parts of the config location, and ensure they're present
|
||||||
|
const format = new RegExp('(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)');
|
||||||
|
const pieces = format.exec(configFile);
|
||||||
|
// 5 = 4 groups + the whole expression
|
||||||
|
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
||||||
|
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
|
||||||
|
}
|
||||||
|
const response = await api.getApiClient().repos.getContents({
|
||||||
|
owner: pieces.groups.owner,
|
||||||
|
repo: pieces.groups.repo,
|
||||||
|
path: pieces.groups.path,
|
||||||
|
ref: pieces.groups.ref,
|
||||||
|
});
|
||||||
|
let fileContents;
|
||||||
|
if ("content" in response.data && response.data.content !== undefined) {
|
||||||
|
fileContents = response.data.content;
|
||||||
|
}
|
||||||
|
else if (Array.isArray(response.data)) {
|
||||||
|
throw new Error(getConfigFileDirectoryGivenMessage(configFile));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new Error(getConfigFileFormatInvalidMessage(configFile));
|
||||||
|
}
|
||||||
|
return yaml.safeLoad(Buffer.from(fileContents, 'base64').toString('binary'));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Get the directory where the parsed config will be stored.
|
||||||
|
*/
|
||||||
|
function getPathToParsedConfigFolder() {
|
||||||
|
return util.getRequiredEnvParam('RUNNER_TEMP');
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Get the file path where the parsed config will be stored.
|
||||||
|
*/
|
||||||
|
function getPathToParsedConfigFile() {
|
||||||
|
return path.join(getPathToParsedConfigFolder(), 'config');
|
||||||
|
}
|
||||||
|
exports.getPathToParsedConfigFile = getPathToParsedConfigFile;
|
||||||
|
/**
|
||||||
|
* Store the given config to the path returned from getPathToParsedConfigFile.
|
||||||
|
*/
|
||||||
async function saveConfig(config) {
|
async function saveConfig(config) {
|
||||||
const configString = JSON.stringify(config);
|
const configString = JSON.stringify(config);
|
||||||
await io.mkdirP(configFolder);
|
await io.mkdirP(getPathToParsedConfigFolder());
|
||||||
fs.writeFileSync(path.join(configFolder, 'config'), configString, 'utf8');
|
fs.writeFileSync(getPathToParsedConfigFile(), configString, 'utf8');
|
||||||
core.debug('Saved config:');
|
core.debug('Saved config:');
|
||||||
core.debug(configString);
|
core.debug(configString);
|
||||||
}
|
}
|
||||||
async function loadConfig() {
|
/**
|
||||||
const configFile = path.join(configFolder, 'config');
|
* Get the config.
|
||||||
if (fs.existsSync(configFile)) {
|
*
|
||||||
const configString = fs.readFileSync(configFile, 'utf8');
|
* If this is the first time in a workflow that this is being called then
|
||||||
core.debug('Loaded config:');
|
* this will parse the config from the user input. The parsed config is then
|
||||||
core.debug(configString);
|
* stored to a known location. On the second and further calls, this will
|
||||||
return JSON.parse(configString);
|
* return the contents of the parsed config from the known location.
|
||||||
}
|
*/
|
||||||
else {
|
async function getConfig() {
|
||||||
const config = initConfig();
|
const configFile = getPathToParsedConfigFile();
|
||||||
core.debug('Initialized config:');
|
if (!fs.existsSync(configFile)) {
|
||||||
core.debug(JSON.stringify(config));
|
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||||
await saveConfig(config);
|
|
||||||
return config;
|
|
||||||
}
|
}
|
||||||
|
const configString = fs.readFileSync(configFile, 'utf8');
|
||||||
|
core.debug('Loaded config:');
|
||||||
|
core.debug(configString);
|
||||||
|
return JSON.parse(configString);
|
||||||
}
|
}
|
||||||
exports.loadConfig = loadConfig;
|
exports.getConfig = getConfig;
|
||||||
|
//# sourceMappingURL=config-utils.js.map
|
||||||
1
lib/config-utils.js.map
Normal file
1
lib/config-utils.js.map
Normal file
File diff suppressed because one or more lines are too long
387
lib/config-utils.test.js
generated
Normal file
387
lib/config-utils.test.js
generated
Normal file
@@ -0,0 +1,387 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const github = __importStar(require("@actions/github"));
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const sinon_1 = __importDefault(require("sinon"));
|
||||||
|
const api = __importStar(require("./api-client"));
|
||||||
|
const CodeQL = __importStar(require("./codeql"));
|
||||||
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
const util = __importStar(require("./util"));
|
||||||
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
|
function setInput(name, value) {
|
||||||
|
// Transformation copied from
|
||||||
|
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
|
||||||
|
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
|
||||||
|
if (value !== undefined) {
|
||||||
|
process.env[envVar] = value;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
delete process.env[envVar];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function mockGetContents(content) {
|
||||||
|
// Passing an auth token is required, so we just use a dummy value
|
||||||
|
let client = new github.GitHub('123');
|
||||||
|
const response = {
|
||||||
|
data: content
|
||||||
|
};
|
||||||
|
const spyGetContents = sinon_1.default.stub(client.repos, "getContents").resolves(response);
|
||||||
|
sinon_1.default.stub(api, "getApiClient").value(() => client);
|
||||||
|
return spyGetContents;
|
||||||
|
}
|
||||||
|
ava_1.default("load empty config", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
|
setInput('config-file', undefined);
|
||||||
|
setInput('languages', 'javascript,python');
|
||||||
|
CodeQL.setCodeQL({
|
||||||
|
resolveQueries: async function () {
|
||||||
|
return {
|
||||||
|
byLanguage: {},
|
||||||
|
noDeclaredLanguage: {},
|
||||||
|
multipleDeclaredLanguages: {},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const config = await configUtils.initConfig();
|
||||||
|
t.deepEqual(config, await configUtils.getDefaultConfig());
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("loading config saves config", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
|
setInput('config-file', undefined);
|
||||||
|
setInput('languages', 'javascript,python');
|
||||||
|
CodeQL.setCodeQL({
|
||||||
|
resolveQueries: async function () {
|
||||||
|
return {
|
||||||
|
byLanguage: {},
|
||||||
|
noDeclaredLanguage: {},
|
||||||
|
multipleDeclaredLanguages: {},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
});
|
||||||
|
// Sanity check the saved config file does not already exist
|
||||||
|
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile()));
|
||||||
|
// Sanity check that getConfig throws before we have called initConfig
|
||||||
|
await t.throwsAsync(configUtils.getConfig);
|
||||||
|
const config1 = await configUtils.initConfig();
|
||||||
|
// The saved config file should now exist
|
||||||
|
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile()));
|
||||||
|
// And that same newly-initialised config should now be returned by getConfig
|
||||||
|
const config2 = await configUtils.getConfig();
|
||||||
|
t.deepEqual(config1, config2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("load input outside of workspace", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
|
setInput('config-file', '../input');
|
||||||
|
try {
|
||||||
|
await configUtils.initConfig();
|
||||||
|
throw new Error('initConfig did not throw error');
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("load non-local input with invalid repo syntax", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
|
// no filename given, just a repo
|
||||||
|
setInput('config-file', 'octo-org/codeql-config@main');
|
||||||
|
try {
|
||||||
|
await configUtils.initConfig();
|
||||||
|
throw new Error('initConfig did not throw error');
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage('octo-org/codeql-config@main')));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("load non-existent input", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
|
t.false(fs.existsSync(path.join(tmpDir, 'input')));
|
||||||
|
setInput('config-file', 'input');
|
||||||
|
setInput('languages', 'javascript');
|
||||||
|
try {
|
||||||
|
await configUtils.initConfig();
|
||||||
|
throw new Error('initConfig did not throw error');
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("load non-empty input", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
|
CodeQL.setCodeQL({
|
||||||
|
resolveQueries: async function () {
|
||||||
|
return {
|
||||||
|
byLanguage: {
|
||||||
|
'javascript': {
|
||||||
|
'/foo/a.ql': {},
|
||||||
|
'/bar/b.ql': {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
noDeclaredLanguage: {},
|
||||||
|
multipleDeclaredLanguages: {},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
});
|
||||||
|
// Just create a generic config object with non-default values for all fields
|
||||||
|
const inputFileContents = `
|
||||||
|
name: my config
|
||||||
|
disable-default-queries: true
|
||||||
|
queries:
|
||||||
|
- uses: ./foo
|
||||||
|
paths-ignore:
|
||||||
|
- a
|
||||||
|
- b
|
||||||
|
paths:
|
||||||
|
- c/d`;
|
||||||
|
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||||
|
// And the config we expect it to parse to
|
||||||
|
const expectedConfig = {
|
||||||
|
languages: ['javascript'],
|
||||||
|
queries: { 'javascript': ['/foo/a.ql', '/bar/b.ql'] },
|
||||||
|
pathsIgnore: ['a', 'b'],
|
||||||
|
paths: ['c/d'],
|
||||||
|
};
|
||||||
|
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||||
|
setInput('config-file', 'input');
|
||||||
|
setInput('languages', 'javascript');
|
||||||
|
const actualConfig = await configUtils.initConfig();
|
||||||
|
// Should exactly equal the object we constructed earlier
|
||||||
|
t.deepEqual(actualConfig, expectedConfig);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("default queries are used", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
|
// Check that the default behaviour is to add the default queries.
|
||||||
|
// In this case if a config file is specified but does not include
|
||||||
|
// the disable-default-queries field.
|
||||||
|
// We determine this by whether CodeQL.resolveQueries is called
|
||||||
|
// with the correct arguments.
|
||||||
|
const resolveQueriesArgs = [];
|
||||||
|
CodeQL.setCodeQL({
|
||||||
|
resolveQueries: async function (queries, extraSearchPath) {
|
||||||
|
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||||
|
return {
|
||||||
|
byLanguage: {
|
||||||
|
'javascript': {},
|
||||||
|
},
|
||||||
|
noDeclaredLanguage: {},
|
||||||
|
multipleDeclaredLanguages: {},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
});
|
||||||
|
// The important point of this config is that it doesn't specify
|
||||||
|
// the disable-default-queries field.
|
||||||
|
// Any other details are hopefully irrelevant for this tetst.
|
||||||
|
const inputFileContents = `
|
||||||
|
paths:
|
||||||
|
- foo`;
|
||||||
|
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||||
|
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||||
|
setInput('config-file', 'input');
|
||||||
|
setInput('languages', 'javascript');
|
||||||
|
await configUtils.initConfig();
|
||||||
|
// Check resolve queries was called correctly
|
||||||
|
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||||
|
t.deepEqual(resolveQueriesArgs[0].queries, ['javascript-code-scanning.qls']);
|
||||||
|
t.deepEqual(resolveQueriesArgs[0].extraSearchPath, undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("API client used when reading remote config", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
|
CodeQL.setCodeQL({
|
||||||
|
resolveQueries: async function () {
|
||||||
|
return {
|
||||||
|
byLanguage: {},
|
||||||
|
noDeclaredLanguage: {},
|
||||||
|
multipleDeclaredLanguages: {},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const inputFileContents = `
|
||||||
|
name: my config
|
||||||
|
disable-default-queries: true
|
||||||
|
queries:
|
||||||
|
- uses: ./
|
||||||
|
- uses: ./foo
|
||||||
|
- uses: foo/bar@dev
|
||||||
|
paths-ignore:
|
||||||
|
- a
|
||||||
|
- b
|
||||||
|
paths:
|
||||||
|
- c/d`;
|
||||||
|
const dummyResponse = {
|
||||||
|
content: Buffer.from(inputFileContents).toString("base64"),
|
||||||
|
};
|
||||||
|
const spyGetContents = mockGetContents(dummyResponse);
|
||||||
|
// Create checkout directory for remote queries repository
|
||||||
|
fs.mkdirSync(path.join(tmpDir, 'foo/bar'), { recursive: true });
|
||||||
|
setInput('config-file', 'octo-org/codeql-config/config.yaml@main');
|
||||||
|
setInput('languages', 'javascript');
|
||||||
|
await configUtils.initConfig();
|
||||||
|
t.assert(spyGetContents.called);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("Remote config handles the case where a directory is provided", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
|
const dummyResponse = []; // directories are returned as arrays
|
||||||
|
mockGetContents(dummyResponse);
|
||||||
|
const repoReference = 'octo-org/codeql-config/config.yaml@main';
|
||||||
|
setInput('config-file', repoReference);
|
||||||
|
try {
|
||||||
|
await configUtils.initConfig();
|
||||||
|
throw new Error('initConfig did not throw error');
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
t.deepEqual(err, new Error(configUtils.getConfigFileDirectoryGivenMessage(repoReference)));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("Invalid format of remote config handled correctly", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
|
const dummyResponse = {
|
||||||
|
// note no "content" property here
|
||||||
|
};
|
||||||
|
mockGetContents(dummyResponse);
|
||||||
|
const repoReference = 'octo-org/codeql-config/config.yaml@main';
|
||||||
|
setInput('config-file', repoReference);
|
||||||
|
try {
|
||||||
|
await configUtils.initConfig();
|
||||||
|
throw new Error('initConfig did not throw error');
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference)));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
|
||||||
|
ava_1.default("load invalid input - " + testName, async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
|
CodeQL.setCodeQL({
|
||||||
|
resolveQueries: async function () {
|
||||||
|
return {
|
||||||
|
byLanguage: {},
|
||||||
|
noDeclaredLanguage: {},
|
||||||
|
multipleDeclaredLanguages: {},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const inputFile = path.join(tmpDir, 'input');
|
||||||
|
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
|
||||||
|
setInput('config-file', 'input');
|
||||||
|
setInput('languages', 'javascript');
|
||||||
|
try {
|
||||||
|
await configUtils.initConfig();
|
||||||
|
throw new Error('initConfig did not throw error');
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
doInvalidInputTest('name invalid type', `
|
||||||
|
name:
|
||||||
|
- foo: bar`, configUtils.getNameInvalid);
|
||||||
|
doInvalidInputTest('disable-default-queries invalid type', `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
|
||||||
|
doInvalidInputTest('queries invalid type', `queries: foo`, configUtils.getQueriesInvalid);
|
||||||
|
doInvalidInputTest('paths-ignore invalid type', `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
|
||||||
|
doInvalidInputTest('paths invalid type', `paths: 17`, configUtils.getPathsInvalid);
|
||||||
|
doInvalidInputTest('queries uses invalid type', `
|
||||||
|
queries:
|
||||||
|
- uses:
|
||||||
|
- hello: world`, configUtils.getQueryUsesInvalid);
|
||||||
|
function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) {
|
||||||
|
// Invalid contents of a "queries.uses" field.
|
||||||
|
// Should fail with the expected error message
|
||||||
|
const inputFileContents = `
|
||||||
|
name: my config
|
||||||
|
queries:
|
||||||
|
- name: foo
|
||||||
|
uses: ` + input;
|
||||||
|
doInvalidInputTest("queries uses \"" + input + "\"", inputFileContents, expectedErrorMessageGenerator);
|
||||||
|
}
|
||||||
|
// Various "uses" fields, and the errors they should produce
|
||||||
|
doInvalidQueryUsesTest("''", c => configUtils.getQueryUsesInvalid(c, undefined));
|
||||||
|
doInvalidQueryUsesTest("foo/bar", c => configUtils.getQueryUsesInvalid(c, "foo/bar"));
|
||||||
|
doInvalidQueryUsesTest("foo/bar@v1@v2", c => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
|
||||||
|
doInvalidQueryUsesTest("foo@master", c => configUtils.getQueryUsesInvalid(c, "foo@master"));
|
||||||
|
doInvalidQueryUsesTest("https://github.com/foo/bar@master", c => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
|
||||||
|
doInvalidQueryUsesTest("./foo", c => configUtils.getLocalPathDoesNotExist(c, "foo"));
|
||||||
|
doInvalidQueryUsesTest("./..", c => configUtils.getLocalPathOutsideOfRepository(c, ".."));
|
||||||
|
const validPaths = [
|
||||||
|
'foo',
|
||||||
|
'foo/',
|
||||||
|
'foo/**',
|
||||||
|
'foo/**/',
|
||||||
|
'foo/**/**',
|
||||||
|
'foo/**/bar/**/baz',
|
||||||
|
'**/',
|
||||||
|
'**/foo',
|
||||||
|
'/foo',
|
||||||
|
];
|
||||||
|
const invalidPaths = [
|
||||||
|
'a/***/b',
|
||||||
|
'a/**b',
|
||||||
|
'a/b**',
|
||||||
|
'**',
|
||||||
|
];
|
||||||
|
ava_1.default('path validations', t => {
|
||||||
|
// Dummy values to pass to validateAndSanitisePath
|
||||||
|
const propertyName = 'paths';
|
||||||
|
const configFile = './.github/codeql/config.yml';
|
||||||
|
for (const path of validPaths) {
|
||||||
|
t.truthy(configUtils.validateAndSanitisePath(path, propertyName, configFile));
|
||||||
|
}
|
||||||
|
for (const path of invalidPaths) {
|
||||||
|
t.throws(() => configUtils.validateAndSanitisePath(path, propertyName, configFile));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
ava_1.default('path sanitisation', t => {
|
||||||
|
// Dummy values to pass to validateAndSanitisePath
|
||||||
|
const propertyName = 'paths';
|
||||||
|
const configFile = './.github/codeql/config.yml';
|
||||||
|
// Valid paths are not modified
|
||||||
|
t.deepEqual(configUtils.validateAndSanitisePath('foo/bar', propertyName, configFile), 'foo/bar');
|
||||||
|
// Trailing stars are stripped
|
||||||
|
t.deepEqual(configUtils.validateAndSanitisePath('foo/**', propertyName, configFile), 'foo/');
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=config-utils.test.js.map
|
||||||
1
lib/config-utils.test.js.map
Normal file
1
lib/config-utils.test.js.map
Normal file
File diff suppressed because one or more lines are too long
35
lib/external-queries.js
generated
35
lib/external-queries.js
generated
@@ -11,21 +11,24 @@ const core = __importStar(require("@actions/core"));
|
|||||||
const exec = __importStar(require("@actions/exec"));
|
const exec = __importStar(require("@actions/exec"));
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
async function checkoutExternalQueries(config) {
|
const util = __importStar(require("./util"));
|
||||||
const folder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
/**
|
||||||
for (const externalQuery of config.externalQueries) {
|
* Check out repository at the given ref, and return the directory of the checkout.
|
||||||
core.info('Checking out ' + externalQuery.repository);
|
*/
|
||||||
const checkoutLocation = path.join(folder, externalQuery.repository);
|
async function checkoutExternalRepository(repository, ref) {
|
||||||
if (!fs.existsSync(checkoutLocation)) {
|
const folder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||||
const repoURL = 'https://github.com/' + externalQuery.repository + '.git';
|
core.info('Checking out ' + repository);
|
||||||
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
|
const checkoutLocation = path.join(folder, repository);
|
||||||
await exec.exec('git', [
|
if (!fs.existsSync(checkoutLocation)) {
|
||||||
'--work-tree=' + checkoutLocation,
|
const repoURL = 'https://github.com/' + repository + '.git';
|
||||||
'--git-dir=' + checkoutLocation + '/.git',
|
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
|
||||||
'checkout', externalQuery.ref,
|
await exec.exec('git', [
|
||||||
]);
|
'--work-tree=' + checkoutLocation,
|
||||||
}
|
'--git-dir=' + checkoutLocation + '/.git',
|
||||||
config.additionalQueries.push(path.join(checkoutLocation, externalQuery.path));
|
'checkout', ref,
|
||||||
|
]);
|
||||||
}
|
}
|
||||||
|
return checkoutLocation;
|
||||||
}
|
}
|
||||||
exports.checkoutExternalQueries = checkoutExternalQueries;
|
exports.checkoutExternalRepository = checkoutExternalRepository;
|
||||||
|
//# sourceMappingURL=external-queries.js.map
|
||||||
1
lib/external-queries.js.map
Normal file
1
lib/external-queries.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAE7B,6CAA+B;AAE/B;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAAC,UAAkB,EAAE,GAAW;IAC9E,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC;IAEvD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,UAAU,CAAC,CAAC;IAExC,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;IACvD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,UAAU,GAAG,MAAM,CAAC;QAC5D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;QAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YACrB,cAAc,GAAG,gBAAgB;YACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;YACzC,UAAU,EAAE,GAAG;SAChB,CAAC,CAAC;KACJ;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAjBD,gEAiBC"}
|
||||||
28
lib/external-queries.test.js
generated
Normal file
28
lib/external-queries.test.js
generated
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const externalQueries = __importStar(require("./external-queries"));
|
||||||
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
const util = __importStar(require("./util"));
|
||||||
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
|
ava_1.default("checkoutExternalQueries", async (t) => {
|
||||||
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env["RUNNER_TEMP"] = tmpDir;
|
||||||
|
await externalQueries.checkoutExternalRepository("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b");
|
||||||
|
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in the default branch
|
||||||
|
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=external-queries.test.js.map
|
||||||
1
lib/external-queries.test.js.map
Normal file
1
lib/external-queries.test.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,oEAAsD;AACtD,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACxC,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QACpC,MAAM,eAAe,CAAC,0BAA0B,CAAC,kBAAkB,EAAE,0CAA0C,CAAC,CAAC;QAEjH,mGAAmG;QACnG,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IAC/E,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
130
lib/finalize-db.js
generated
130
lib/finalize-db.js
generated
@@ -12,97 +12,73 @@ const exec = __importStar(require("@actions/exec"));
|
|||||||
const io = __importStar(require("@actions/io"));
|
const io = __importStar(require("@actions/io"));
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
|
const codeql_1 = require("./codeql");
|
||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
const externalQueries = __importStar(require("./external-queries"));
|
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
const upload_lib = __importStar(require("./upload-lib"));
|
const upload_lib = __importStar(require("./upload-lib"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
async function createdDBForScannedLanguages(codeqlCmd, databaseFolder) {
|
async function setupPythonExtractor() {
|
||||||
|
const codeqlPython = process.env["CODEQL_PYTHON"];
|
||||||
|
if (codeqlPython === undefined || codeqlPython.length === 0) {
|
||||||
|
// If CODEQL_PYTHON is not set, no dependencies were installed, so we don't need to do anything
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let output = '';
|
||||||
|
const options = {
|
||||||
|
listeners: {
|
||||||
|
stdout: (data) => {
|
||||||
|
output += data.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
await exec.exec(codeqlPython, ['-c', 'import os; import pip; print(os.path.dirname(os.path.dirname(pip.__file__)))'], options);
|
||||||
|
core.info('Setting LGTM_INDEX_IMPORT_PATH=' + output);
|
||||||
|
process.env['LGTM_INDEX_IMPORT_PATH'] = output;
|
||||||
|
output = '';
|
||||||
|
await exec.exec(codeqlPython, ['-c', 'import sys; print(sys.version_info[0])'], options);
|
||||||
|
core.info('Setting LGTM_PYTHON_SETUP_VERSION=' + output);
|
||||||
|
process.env['LGTM_PYTHON_SETUP_VERSION'] = output;
|
||||||
|
}
|
||||||
|
async function createdDBForScannedLanguages(databaseFolder) {
|
||||||
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
||||||
if (scannedLanguages) {
|
if (scannedLanguages) {
|
||||||
|
const codeql = codeql_1.getCodeQL();
|
||||||
for (const language of scannedLanguages.split(',')) {
|
for (const language of scannedLanguages.split(',')) {
|
||||||
core.startGroup('Extracting ' + language);
|
core.startGroup('Extracting ' + language);
|
||||||
// Get extractor location
|
if (language === 'python') {
|
||||||
let extractorPath = '';
|
await setupPythonExtractor();
|
||||||
await exec.exec(codeqlCmd, ['resolve', 'extractor', '--format=json', '--language=' + language], {
|
}
|
||||||
silent: true,
|
await codeql.extractScannedLanguage(path.join(databaseFolder, language), language);
|
||||||
listeners: {
|
|
||||||
stdout: (data) => { extractorPath += data.toString(); },
|
|
||||||
stderr: (data) => { process.stderr.write(data); }
|
|
||||||
}
|
|
||||||
});
|
|
||||||
// Set trace command
|
|
||||||
const ext = process.platform === 'win32' ? '.cmd' : '.sh';
|
|
||||||
const traceCommand = path.resolve(JSON.parse(extractorPath), 'tools', 'autobuild' + ext);
|
|
||||||
// Run trace command
|
|
||||||
await exec.exec(codeqlCmd, ['database', 'trace-command', path.join(databaseFolder, language), '--', traceCommand]);
|
|
||||||
core.endGroup();
|
core.endGroup();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function finalizeDatabaseCreation(codeqlCmd, databaseFolder) {
|
async function finalizeDatabaseCreation(databaseFolder, config) {
|
||||||
await createdDBForScannedLanguages(codeqlCmd, databaseFolder);
|
await createdDBForScannedLanguages(databaseFolder);
|
||||||
const languages = process.env[sharedEnv.CODEQL_ACTION_LANGUAGES] || '';
|
const codeql = codeql_1.getCodeQL();
|
||||||
for (const language of languages.split(',')) {
|
for (const language of config.languages) {
|
||||||
core.startGroup('Finalizing ' + language);
|
core.startGroup('Finalizing ' + language);
|
||||||
await exec.exec(codeqlCmd, ['database', 'finalize', path.join(databaseFolder, language)]);
|
await codeql.finalizeDatabase(path.join(databaseFolder, language));
|
||||||
core.endGroup();
|
core.endGroup();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function resolveQueryLanguages(codeqlCmd, config) {
|
|
||||||
let res = new Map();
|
|
||||||
if (config.additionalQueries.length !== 0) {
|
|
||||||
let resolveQueriesOutput = '';
|
|
||||||
const options = {
|
|
||||||
listeners: {
|
|
||||||
stdout: (data) => {
|
|
||||||
resolveQueriesOutput += data.toString();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
await exec.exec(codeqlCmd, [
|
|
||||||
'resolve',
|
|
||||||
'queries',
|
|
||||||
...config.additionalQueries,
|
|
||||||
'--format=bylanguage'
|
|
||||||
], options);
|
|
||||||
const resolveQueriesOutputObject = JSON.parse(resolveQueriesOutput);
|
|
||||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
|
||||||
res[language] = Object.keys(queries);
|
|
||||||
}
|
|
||||||
const noDeclaredLanguage = resolveQueriesOutputObject.noDeclaredLanguage;
|
|
||||||
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
|
||||||
if (noDeclaredLanguageQueries.length !== 0) {
|
|
||||||
throw new Error('Some queries do not declare a language, their qlpack.yml file is missing or is invalid');
|
|
||||||
}
|
|
||||||
const multipleDeclaredLanguages = resolveQueriesOutputObject.multipleDeclaredLanguages;
|
|
||||||
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
|
|
||||||
if (multipleDeclaredLanguagesQueries.length !== 0) {
|
|
||||||
throw new Error('Some queries declare multiple languages, their qlpack.yml file is missing or is invalid');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
// Runs queries and creates sarif files in the given folder
|
// Runs queries and creates sarif files in the given folder
|
||||||
async function runQueries(codeqlCmd, databaseFolder, sarifFolder, config) {
|
async function runQueries(databaseFolder, sarifFolder, config) {
|
||||||
const queriesPerLanguage = await resolveQueryLanguages(codeqlCmd, config);
|
const codeql = codeql_1.getCodeQL();
|
||||||
for (let database of fs.readdirSync(databaseFolder)) {
|
for (let database of fs.readdirSync(databaseFolder)) {
|
||||||
core.startGroup('Analyzing ' + database);
|
core.startGroup('Analyzing ' + database);
|
||||||
const queries = [];
|
const queries = config.queries[database] || [];
|
||||||
if (!config.disableDefaultQueries) {
|
if (queries.length === 0) {
|
||||||
queries.push(database + '-code-scanning.qls');
|
throw new Error('Unable to analyse ' + database + ' as no queries were selected for this language');
|
||||||
}
|
}
|
||||||
queries.push(...(queriesPerLanguage[database] || []));
|
// Pass the queries to codeql using a file instead of using the command
|
||||||
|
// line to avoid command line length restrictions, particularly on windows.
|
||||||
|
const querySuite = path.join(databaseFolder, database + '-queries.qls');
|
||||||
|
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||||
|
fs.writeFileSync(querySuite, querySuiteContents);
|
||||||
|
core.debug('Query suite file for ' + database + '...\n' + querySuiteContents);
|
||||||
const sarifFile = path.join(sarifFolder, database + '.sarif');
|
const sarifFile = path.join(sarifFolder, database + '.sarif');
|
||||||
await exec.exec(codeqlCmd, [
|
await codeql.databaseAnalyze(path.join(databaseFolder, database), sarifFile, querySuite);
|
||||||
'database',
|
|
||||||
'analyze',
|
|
||||||
path.join(databaseFolder, database),
|
|
||||||
'--format=sarif-latest',
|
|
||||||
'--output=' + sarifFile,
|
|
||||||
'--no-sarif-add-snippets',
|
|
||||||
...queries
|
|
||||||
]);
|
|
||||||
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
|
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
|
||||||
core.endGroup();
|
core.endGroup();
|
||||||
}
|
}
|
||||||
@@ -112,20 +88,21 @@ async function run() {
|
|||||||
if (util.should_abort('finish', true) || !await util.reportActionStarting('finish')) {
|
if (util.should_abort('finish', true) || !await util.reportActionStarting('finish')) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const config = await configUtils.loadConfig();
|
const config = await configUtils.getConfig();
|
||||||
core.exportVariable(sharedEnv.ODASA_TRACER_CONFIGURATION, '');
|
core.exportVariable(sharedEnv.ODASA_TRACER_CONFIGURATION, '');
|
||||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||||
const codeqlCmd = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_CMD);
|
|
||||||
const databaseFolder = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_DATABASE_DIR);
|
const databaseFolder = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_DATABASE_DIR);
|
||||||
const sarifFolder = core.getInput('output');
|
const sarifFolder = core.getInput('output');
|
||||||
await io.mkdirP(sarifFolder);
|
await io.mkdirP(sarifFolder);
|
||||||
core.info('Finalizing database creation');
|
core.info('Finalizing database creation');
|
||||||
await finalizeDatabaseCreation(codeqlCmd, databaseFolder);
|
await finalizeDatabaseCreation(databaseFolder, config);
|
||||||
await externalQueries.checkoutExternalQueries(config);
|
|
||||||
core.info('Analyzing database');
|
core.info('Analyzing database');
|
||||||
await runQueries(codeqlCmd, databaseFolder, sarifFolder, config);
|
await runQueries(databaseFolder, sarifFolder, config);
|
||||||
if ('true' === core.getInput('upload')) {
|
if ('true' === core.getInput('upload')) {
|
||||||
await upload_lib.upload(sarifFolder);
|
if (!await upload_lib.upload(sarifFolder)) {
|
||||||
|
await util.reportActionFailed('finish', 'upload');
|
||||||
|
return;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
@@ -139,3 +116,4 @@ run().catch(e => {
|
|||||||
core.setFailed("analyze action failed: " + e);
|
core.setFailed("analyze action failed: " + e);
|
||||||
console.log(e);
|
console.log(e);
|
||||||
});
|
});
|
||||||
|
//# sourceMappingURL=finalize-db.js.map
|
||||||
1
lib/finalize-db.js.map
Normal file
1
lib/finalize-db.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"finalize-db.js","sourceRoot":"","sources":["../src/finalize-db.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,gDAAkC;AAClC,uCAAyB;AACzB,2CAA6B;AAE7B,qCAAqC;AACrC,4DAA8C;AAC9C,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAG/B,KAAK,UAAU,oBAAoB;IACjC,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IAClD,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3D,+FAA+F;QAC/F,OAAO;KACR;IAED,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,OAAO,GAAG;QACd,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC;IAEF,MAAM,IAAI,CAAC,IAAI,CACb,YAAY,EACZ,CAAC,IAAI,EAAE,8EAA8E,CAAC,EACtF,OAAO,CAAC,CAAC;IACX,IAAI,CAAC,IAAI,CAAC,iCAAiC,GAAG,MAAM,CAAC,CAAC;IACtD,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;IAE/C,MAAM,GAAG,EAAE,CAAC;IACZ,MAAM,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,IAAI,EAAE,wCAAwC,CAAC,EAAE,OAAO,CAAC,CAAC;IACzF,IAAI,CAAC,IAAI,CAAC,oCAAoC,GAAG,MAAM,CAAC,CAAC;IACzD,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC,GAAG,MAAM,CAAC;AACpD,CAAC;AAED,KAAK,UAAU,4BAA4B,CAAC,cAAsB;IAChE,MAAM,gBAAgB,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,+BAA+B,CAAC,CAAC;IAChF,IAAI,gBAAgB,EAAE;QACpB,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,KAAK,MAAM,QAAQ,IAAI,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE;YAClD,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;YAE1C,IAAI,QAAQ,KAAK,QAAQ,EAAE;gBACzB,MAAM,oBAAoB,EAAE,CAAC;aAC9B;YAED,MAAM,MAAM,CAAC,sBAAsB,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,EAAE,QAAQ,CAAC,CAAC;YACnF,IAAI,CAAC,QAAQ,EAAE,CAAC;SACjB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CAAC,cAAsB,EAAE,MAA0B;IACxF,MAAM,4BAA4B,CAAC,cAAc,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;QAC1C,MAAM,MAAM,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC,CAAC;QACnE,IAAI,CAAC,QAAQ,EAAE,CAAC;KACjB;AACH,CAAC;AAED,2DAA2D;AAC3D,KAAK,UAAU,UAAU,CAAC,cAAsB,EAAE,WAAmB,EAAE,MAA0B;IAC/F,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,IAAI,QAAQ,IAAI,EAAE,CAAC,WAAW,CAAC,cAAc,CAAC,EAAE;QACnD,IAAI,CAAC,UAAU,CAAC,YAAY,GAAG,QAAQ,CAAC,CAAC;QAEzC,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;QAC/C,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,oBAAoB,GAAG,QAAQ,GAAG,gDAAgD,CAAC,CAAC;SACrG;QAED,uEAAuE;QACvE,2EAA2E;QAC3E,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,GAAG,cAAc,CAAC,CAAC;QACxE,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACxE,EAAE,CAAC,aAAa,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;QACjD,IAAI,CAAC,KAAK,CAAC,uBAAuB,GAAG,QAAQ,GAAG,OAAO,GAAG,kBAAkB,CAAC,CAAC;QAE9E,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,QAAQ,GAAG,QAAQ,CAAC,CAAC;QAE9D,MAAM,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,EAAE,SAAS,EAAE,UAAU,CAAC,CAAC;QAEzF,IAAI,CAAC,KAAK,CAAC,6BAA6B,GAAG,QAAQ,GAAG,eAAe,GAAG,SAAS,GAAG,GAAG,CAAC,CAAC;QACzF,IAAI,CAAC,QAAQ,EAAE,CAAC;KACjB;AACH,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,QAAQ,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,QAAQ,CAAC,EAAE;YACnF,OAAO;SACR;QACD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,EAAE,CAAC;QAE7C,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,0BAA0B,EAAE,EAAE,CAAC,CAAC;QAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;QAEzD,MAAM,cAAc,GAAG,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;QAEtF,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;QAC5C,MAAM,EAAE,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;QAE7B,IAAI,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;QAC1C,MAAM,wBAAwB,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;QAEvD,IAAI,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QAChC,MAAM,UAAU,CAAC,cAAc,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;QAEtD,IAAI,MAAM,KAAK,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACtC,IAAI,CAAC,MAAM,UAAU,CAAC,MAAM,CAAC,WAAW,CAAC,EAAE;gBACzC,MAAM,IAAI,CAAC,kBAAkB,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;gBAClD,OAAO;aACR;SACF;KAEF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,IAAI,CAAC,kBAAkB,CAAC,QAAQ,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QACpE,OAAO;KACR;IAED,MAAM,IAAI,CAAC,qBAAqB,CAAC,QAAQ,CAAC,CAAC;AAC7C,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,yBAAyB,GAAG,CAAC,CAAC,CAAC;IAC9C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||||
2
lib/finalize-db.test.js
generated
Normal file
2
lib/finalize-db.test.js
generated
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
"use strict";
|
||||||
|
//# sourceMappingURL=finalize-db.test.js.map
|
||||||
1
lib/finalize-db.test.js.map
Normal file
1
lib/finalize-db.test.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"finalize-db.test.js","sourceRoot":"","sources":["../src/finalize-db.test.ts"],"names":[],"mappings":""}
|
||||||
1
lib/fingerprints.js
generated
1
lib/fingerprints.js
generated
@@ -245,3 +245,4 @@ function addFingerprints(sarifContents) {
|
|||||||
return JSON.stringify(sarif);
|
return JSON.stringify(sarif);
|
||||||
}
|
}
|
||||||
exports.addFingerprints = addFingerprints;
|
exports.addFingerprints = addFingerprints;
|
||||||
|
//# sourceMappingURL=fingerprints.js.map
|
||||||
1
lib/fingerprints.js.map
Normal file
1
lib/fingerprints.js.map
Normal file
File diff suppressed because one or more lines are too long
159
lib/fingerprints.test.js
generated
Normal file
159
lib/fingerprints.test.js
generated
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const fingerprints = __importStar(require("./fingerprints"));
|
||||||
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
|
function testHash(t, input, expectedHashes) {
|
||||||
|
let index = 0;
|
||||||
|
let callback = function (lineNumber, hash) {
|
||||||
|
t.is(lineNumber, index + 1);
|
||||||
|
t.is(hash, expectedHashes[index]);
|
||||||
|
index++;
|
||||||
|
};
|
||||||
|
fingerprints.hash(callback, input);
|
||||||
|
t.is(index, input.split(/\r\n|\r|\n/).length);
|
||||||
|
}
|
||||||
|
ava_1.default('hash', (t) => {
|
||||||
|
// Try empty file
|
||||||
|
testHash(t, "", ["c129715d7a2bc9a3:1"]);
|
||||||
|
// Try various combinations of newline characters
|
||||||
|
testHash(t, " a\nb\n \t\tc\n d", [
|
||||||
|
"271789c17abda88f:1",
|
||||||
|
"54703d4cd895b18:1",
|
||||||
|
"180aee12dab6264:1",
|
||||||
|
"a23a3dc5e078b07b:1"
|
||||||
|
]);
|
||||||
|
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
|
||||||
|
"8b7cf3e952e7aeb2:1",
|
||||||
|
"b1ae1287ec4718d9:1",
|
||||||
|
"bff680108adb0fcc:1",
|
||||||
|
"c6805c5e1288b612:1",
|
||||||
|
"b86d3392aea1be30:1",
|
||||||
|
"e6ceba753e1a442:1",
|
||||||
|
]);
|
||||||
|
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End\n", [
|
||||||
|
"e9496ae3ebfced30:1",
|
||||||
|
"fb7c023a8b9ccb3f:1",
|
||||||
|
"ce8ba1a563dcdaca:1",
|
||||||
|
"e20e36e16fcb0cc8:1",
|
||||||
|
"b3edc88f2938467e:1",
|
||||||
|
"c8e28b0b4002a3a0:1",
|
||||||
|
"c129715d7a2bc9a3:1",
|
||||||
|
]);
|
||||||
|
testHash(t, " hello; \t\nworld!!!\r\r\r \t\tGreetings\r End\r", [
|
||||||
|
"e9496ae3ebfced30:1",
|
||||||
|
"fb7c023a8b9ccb3f:1",
|
||||||
|
"ce8ba1a563dcdaca:1",
|
||||||
|
"e20e36e16fcb0cc8:1",
|
||||||
|
"b3edc88f2938467e:1",
|
||||||
|
"c8e28b0b4002a3a0:1",
|
||||||
|
"c129715d7a2bc9a3:1",
|
||||||
|
]);
|
||||||
|
testHash(t, " hello; \t\r\nworld!!!\r\n\r\n\r\n \t\tGreetings\r\n End\r\n", [
|
||||||
|
"e9496ae3ebfced30:1",
|
||||||
|
"fb7c023a8b9ccb3f:1",
|
||||||
|
"ce8ba1a563dcdaca:1",
|
||||||
|
"e20e36e16fcb0cc8:1",
|
||||||
|
"b3edc88f2938467e:1",
|
||||||
|
"c8e28b0b4002a3a0:1",
|
||||||
|
"c129715d7a2bc9a3:1",
|
||||||
|
]);
|
||||||
|
testHash(t, " hello; \t\nworld!!!\r\n\n\r \t\tGreetings\r End\r\n", [
|
||||||
|
"e9496ae3ebfced30:1",
|
||||||
|
"fb7c023a8b9ccb3f:1",
|
||||||
|
"ce8ba1a563dcdaca:1",
|
||||||
|
"e20e36e16fcb0cc8:1",
|
||||||
|
"b3edc88f2938467e:1",
|
||||||
|
"c8e28b0b4002a3a0:1",
|
||||||
|
"c129715d7a2bc9a3:1",
|
||||||
|
]);
|
||||||
|
// Try repeating line that will generate identical hashes
|
||||||
|
testHash(t, "Lorem ipsum dolor sit amet.\n".repeat(10), [
|
||||||
|
"a7f2ff13bc495cf2:1",
|
||||||
|
"a7f2ff13bc495cf2:2",
|
||||||
|
"a7f2ff13bc495cf2:3",
|
||||||
|
"a7f2ff13bc495cf2:4",
|
||||||
|
"a7f2ff13bc495cf2:5",
|
||||||
|
"a7f2ff13bc495cf2:6",
|
||||||
|
"a7f2ff1481e87703:1",
|
||||||
|
"a9cf91f7bbf1862b:1",
|
||||||
|
"55ec222b86bcae53:1",
|
||||||
|
"cc97dc7b1d7d8f7b:1",
|
||||||
|
"c129715d7a2bc9a3:1"
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
function testResolveUriToFile(uri, index, artifactsURIs) {
|
||||||
|
const location = { "uri": uri, "index": index };
|
||||||
|
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
|
||||||
|
return fingerprints.resolveUriToFile(location, artifacts);
|
||||||
|
}
|
||||||
|
ava_1.default('resolveUriToFile', t => {
|
||||||
|
// The resolveUriToFile method checks that the file exists and is in the right directory
|
||||||
|
// so we need to give it real files to look at. We will use this file as an example.
|
||||||
|
// For this to work we require the current working directory to be a parent, but this
|
||||||
|
// should generally always be the case so this is fine.
|
||||||
|
const cwd = process.cwd();
|
||||||
|
const filepath = __filename;
|
||||||
|
t.true(filepath.startsWith(cwd + '/'));
|
||||||
|
const relativeFilepaht = filepath.substring(cwd.length + 1);
|
||||||
|
process.env['GITHUB_WORKSPACE'] = cwd;
|
||||||
|
// Absolute paths are unmodified
|
||||||
|
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
|
||||||
|
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
|
||||||
|
// Relative paths are made absolute
|
||||||
|
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
|
||||||
|
t.is(testResolveUriToFile('file://' + relativeFilepaht, undefined, []), filepath);
|
||||||
|
// Absolute paths outside the src root are discarded
|
||||||
|
t.is(testResolveUriToFile('/src/foo/bar.js', undefined, []), undefined);
|
||||||
|
t.is(testResolveUriToFile('file:///src/foo/bar.js', undefined, []), undefined);
|
||||||
|
// Other schemes are discarded
|
||||||
|
t.is(testResolveUriToFile('https://' + filepath, undefined, []), undefined);
|
||||||
|
t.is(testResolveUriToFile('ftp://' + filepath, undefined, []), undefined);
|
||||||
|
// Invalid URIs are discarded
|
||||||
|
t.is(testResolveUriToFile(1, undefined, []), undefined);
|
||||||
|
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
|
||||||
|
// Non-existant files are discarded
|
||||||
|
t.is(testResolveUriToFile(filepath + '2', undefined, []), undefined);
|
||||||
|
// Index is resolved
|
||||||
|
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
|
||||||
|
t.is(testResolveUriToFile(undefined, 1, ['foo', filepath]), filepath);
|
||||||
|
// Invalid indexes are discarded
|
||||||
|
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
|
||||||
|
t.is(testResolveUriToFile(undefined, '0', [filepath]), undefined);
|
||||||
|
});
|
||||||
|
ava_1.default('addFingerprints', t => {
|
||||||
|
// Run an end-to-end test on a test file
|
||||||
|
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.input.sarif').toString();
|
||||||
|
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.expected.sarif').toString();
|
||||||
|
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||||
|
input = JSON.stringify(JSON.parse(input));
|
||||||
|
expected = JSON.stringify(JSON.parse(expected));
|
||||||
|
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||||
|
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||||
|
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||||
|
});
|
||||||
|
ava_1.default('missingRegions', t => {
|
||||||
|
// Run an end-to-end test on a test file
|
||||||
|
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.input.sarif').toString();
|
||||||
|
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.expected.sarif').toString();
|
||||||
|
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||||
|
input = JSON.stringify(JSON.parse(input));
|
||||||
|
expected = JSON.stringify(JSON.parse(expected));
|
||||||
|
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||||
|
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||||
|
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=fingerprints.test.js.map
|
||||||
1
lib/fingerprints.test.js.map
Normal file
1
lib/fingerprints.test.js.map
Normal file
File diff suppressed because one or more lines are too long
47
lib/setup-tools.js
generated
47
lib/setup-tools.js
generated
@@ -10,6 +10,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
|
const semver = __importStar(require("semver"));
|
||||||
class CodeQLSetup {
|
class CodeQLSetup {
|
||||||
constructor(codeqlDist) {
|
constructor(codeqlDist) {
|
||||||
this.dist = codeqlDist;
|
this.dist = codeqlDist;
|
||||||
@@ -19,7 +20,7 @@ class CodeQLSetup {
|
|||||||
if (process.platform === 'win32') {
|
if (process.platform === 'win32') {
|
||||||
this.platform = 'win64';
|
this.platform = 'win64';
|
||||||
if (this.cmd.endsWith('codeql')) {
|
if (this.cmd.endsWith('codeql')) {
|
||||||
this.cmd += ".cmd";
|
this.cmd += ".exe";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (process.platform === 'linux') {
|
else if (process.platform === 'linux') {
|
||||||
@@ -35,17 +36,41 @@ class CodeQLSetup {
|
|||||||
}
|
}
|
||||||
exports.CodeQLSetup = CodeQLSetup;
|
exports.CodeQLSetup = CodeQLSetup;
|
||||||
async function setupCodeQL() {
|
async function setupCodeQL() {
|
||||||
const version = '1.0.0';
|
try {
|
||||||
const codeqlURL = core.getInput('tools', { required: true });
|
const codeqlURL = core.getInput('tools', { required: true });
|
||||||
let codeqlFolder = toolcache.find('CodeQL', version);
|
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL);
|
||||||
if (codeqlFolder) {
|
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
|
||||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
if (codeqlFolder) {
|
||||||
|
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||||
|
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||||
|
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
|
||||||
|
}
|
||||||
|
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
||||||
}
|
}
|
||||||
else {
|
catch (e) {
|
||||||
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
core.error(e);
|
||||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
throw new Error("Unable to download and extract CodeQL CLI");
|
||||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', version);
|
|
||||||
}
|
}
|
||||||
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
|
||||||
}
|
}
|
||||||
exports.setupCodeQL = setupCodeQL;
|
exports.setupCodeQL = setupCodeQL;
|
||||||
|
function getCodeQLURLVersion(url) {
|
||||||
|
const match = url.match(/\/codeql-bundle-(.*)\//);
|
||||||
|
if (match === null || match.length < 2) {
|
||||||
|
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
|
||||||
|
}
|
||||||
|
let version = match[1];
|
||||||
|
if (!semver.valid(version)) {
|
||||||
|
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||||
|
version = '0.0.0-' + version;
|
||||||
|
}
|
||||||
|
const s = semver.clean(version);
|
||||||
|
if (!s) {
|
||||||
|
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
exports.getCodeQLURLVersion = getCodeQLURLVersion;
|
||||||
|
//# sourceMappingURL=setup-tools.js.map
|
||||||
1
lib/setup-tools.js.map
Normal file
1
lib/setup-tools.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"setup-tools.js","sourceRoot":"","sources":["../src/setup-tools.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,+DAAiD;AACjD,2CAA6B;AAC7B,+CAAiC;AAEjC,MAAa,WAAW;IAMtB,YAAY,UAAkB;QAC5B,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAC3C,4BAA4B;QAC5B,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;YACxB,IAAI,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBAC/B,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC;aACpB;SACF;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YACvC,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;SAC3B;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACxC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;SACzB;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;SAC7D;IACH,CAAC;CACF;AAxBD,kCAwBC;AAEM,KAAK,UAAU,WAAW;IAC/B,IAAI;QACF,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;QAC7D,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAExD,IAAI,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,gBAAgB,CAAC,CAAC;QAC9D,IAAI,YAAY,EAAE;YAChB,IAAI,CAAC,KAAK,CAAC,yBAAyB,YAAY,EAAE,CAAC,CAAC;SACrD;aAAM;YACL,MAAM,UAAU,GAAG,MAAM,SAAS,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,eAAe,GAAG,MAAM,SAAS,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC/D,YAAY,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,eAAe,EAAE,QAAQ,EAAE,gBAAgB,CAAC,CAAC;SACtF;QACD,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAE3D;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACd,MAAM,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC;KAC9D;AACH,CAAC;AAnBD,kCAmBC;AAED,SAAgB,mBAAmB,CAAC,GAAW;IAE7C,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,CAAC,wBAAwB,CAAC,CAAC;IAClD,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,wBAAwB,GAAG,iCAAiC,CAAC,CAAC;KAC/E;IAED,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IAEvB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE;QAC1B,IAAI,CAAC,KAAK,CAAC,kBAAkB,OAAO,gEAAgE,OAAO,GAAG,CAAC,CAAC;QAChH,OAAO,GAAG,QAAQ,GAAG,OAAO,CAAC;KAC9B;IAED,MAAM,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;IAChC,IAAI,CAAC,CAAC,EAAE;QACN,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,iDAAiD,OAAO,UAAU,CAAC,CAAC;KAC/G;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AApBD,kDAoBC"}
|
||||||
60
lib/setup-tools.test.js
generated
Normal file
60
lib/setup-tools.test.js
generated
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const nock_1 = __importDefault(require("nock"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const setupTools = __importStar(require("./setup-tools"));
|
||||||
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
const util = __importStar(require("./util"));
|
||||||
|
testing_utils_1.silenceDebugOutput(ava_1.default);
|
||||||
|
ava_1.default('download codeql bundle cache', async (t) => {
|
||||||
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
|
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
|
||||||
|
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
|
||||||
|
const versions = ['20200601', '20200610'];
|
||||||
|
for (let i = 0; i < versions.length; i++) {
|
||||||
|
const version = versions[i];
|
||||||
|
nock_1.default('https://example.com')
|
||||||
|
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||||
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
|
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||||
|
await setupTools.setupCodeQL();
|
||||||
|
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
|
||||||
|
}
|
||||||
|
const cachedVersions = toolcache.findAllVersions('CodeQL');
|
||||||
|
t.is(cachedVersions.length, 2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default('parse codeql bundle url version', t => {
|
||||||
|
const tests = {
|
||||||
|
'20200601': '0.0.0-20200601',
|
||||||
|
'20200601.0': '0.0.0-20200601.0',
|
||||||
|
'20200601.0.0': '20200601.0.0',
|
||||||
|
'1.2.3': '1.2.3',
|
||||||
|
'1.2.3-alpha': '1.2.3-alpha',
|
||||||
|
'1.2.3-beta.1': '1.2.3-beta.1',
|
||||||
|
};
|
||||||
|
for (const [version, expectedVersion] of Object.entries(tests)) {
|
||||||
|
const url = `https://github.com/.../codeql-bundle-${version}/...`;
|
||||||
|
try {
|
||||||
|
const parsedVersion = setupTools.getCodeQLURLVersion(url);
|
||||||
|
t.deepEqual(parsedVersion, expectedVersion);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
t.fail(e.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=setup-tools.test.js.map
|
||||||
1
lib/setup-tools.test.js.map
Normal file
1
lib/setup-tools.test.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"setup-tools.test.js","sourceRoot":"","sources":["../src/setup-tools.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,0DAA4C;AAC5C,mDAAmD;AACnD,6CAA+B;AAE/B,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,UAAU,CAAC,WAAW,EAAE,CAAC;YAE/B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,UAAU,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YAC1D,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC"}
|
||||||
111
lib/setup-tracer.js
generated
111
lib/setup-tracer.js
generated
@@ -10,11 +10,12 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const exec = __importStar(require("@actions/exec"));
|
const exec = __importStar(require("@actions/exec"));
|
||||||
const io = __importStar(require("@actions/io"));
|
const io = __importStar(require("@actions/io"));
|
||||||
|
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||||
|
const codeql_1 = require("./codeql");
|
||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
const setuptools = __importStar(require("./setup-tools"));
|
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
const CRITICAL_TRACER_VARS = new Set(['SEMMLE_PRELOAD_libtrace',
|
const CRITICAL_TRACER_VARS = new Set(['SEMMLE_PRELOAD_libtrace',
|
||||||
@@ -28,12 +29,7 @@ const CRITICAL_TRACER_VARS = new Set(['SEMMLE_PRELOAD_libtrace',
|
|||||||
'SEMMLE_JAVA_TOOL_OPTIONS'
|
'SEMMLE_JAVA_TOOL_OPTIONS'
|
||||||
]);
|
]);
|
||||||
async function tracerConfig(codeql, database, compilerSpec) {
|
async function tracerConfig(codeql, database, compilerSpec) {
|
||||||
const compilerSpecArg = compilerSpec ? ["--compiler-spec=" + compilerSpec] : [];
|
const env = await codeql.getTracerEnv(database, compilerSpec);
|
||||||
let envFile = path.resolve(database, 'working', 'env.tmp');
|
|
||||||
await exec.exec(codeql.cmd, ['database', 'trace-command', database,
|
|
||||||
...compilerSpecArg,
|
|
||||||
process.execPath, path.resolve(__dirname, 'tracer-env.js'), envFile]);
|
|
||||||
const env = JSON.parse(fs.readFileSync(envFile, 'utf-8'));
|
|
||||||
const config = env['ODASA_TRACER_CONFIGURATION'];
|
const config = env['ODASA_TRACER_CONFIGURATION'];
|
||||||
const info = { spec: config, env: {} };
|
const info = { spec: config, env: {} };
|
||||||
// Extract critical tracer variables from the environment
|
// Extract critical tracer variables from the environment
|
||||||
@@ -100,12 +96,13 @@ function concatTracerConfigs(configs) {
|
|||||||
totalCount += count;
|
totalCount += count;
|
||||||
totalLines.push(...lines.slice(2));
|
totalLines.push(...lines.slice(2));
|
||||||
}
|
}
|
||||||
const newLogFilePath = path.resolve(util.workspaceFolder(), 'compound-build-tracer.log');
|
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||||
const spec = path.resolve(util.workspaceFolder(), 'compound-spec');
|
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
|
||||||
const tempFolder = path.resolve(util.workspaceFolder(), 'compound-temp');
|
const spec = path.resolve(tempFolder, 'compound-spec');
|
||||||
|
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
|
||||||
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
|
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
|
||||||
if (copyExecutables) {
|
if (copyExecutables) {
|
||||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = tempFolder;
|
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
|
||||||
envSize += 1;
|
envSize += 1;
|
||||||
}
|
}
|
||||||
fs.writeFileSync(spec, newSpecContent.join('\n'));
|
fs.writeFileSync(spec, newSpecContent.join('\n'));
|
||||||
@@ -125,49 +122,89 @@ function concatTracerConfigs(configs) {
|
|||||||
fs.writeFileSync(envPath, buffer);
|
fs.writeFileSync(envPath, buffer);
|
||||||
return { env, spec };
|
return { env, spec };
|
||||||
}
|
}
|
||||||
|
async function installPythonDeps(codeql) {
|
||||||
|
core.startGroup('Setup Python dependencies');
|
||||||
|
let scriptsFolder = '';
|
||||||
|
try {
|
||||||
|
const repoPath = await toolcache.downloadTool('https://github.com/Daverlo/codeql-python-autobuild/archive/master.zip');
|
||||||
|
const extracted = await toolcache.extractZip(repoPath);
|
||||||
|
scriptsFolder = path.join(extracted, 'codeql-python-autobuild-master');
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
// The download should not fail, but in case it fails we just abort trying to setup the python deps
|
||||||
|
core.warning('Unable to download and extract the scripts needed for installing the python dependecies');
|
||||||
|
core.endGroup();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Setup tools
|
||||||
|
try {
|
||||||
|
await exec.exec(path.join(scriptsFolder, 'install_tools.sh'));
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
// This script tries to install some needed tools in the runner. It should not fail, but if it does
|
||||||
|
// we just abort the process without failing the action
|
||||||
|
core.warning('Unable to download and extract the scripts needed for installing the python dependecies');
|
||||||
|
core.endGroup();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Install dependencies
|
||||||
|
try {
|
||||||
|
await exec.exec(path.join(scriptsFolder, 'auto_install_packages.py'), [codeql.getDir()]);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
core.endGroup();
|
||||||
|
throw new Error('We were unable to install your python dependencies. You can call this action with "setup-python-dependencies: false" to disable this process');
|
||||||
|
}
|
||||||
|
core.endGroup();
|
||||||
|
}
|
||||||
async function run() {
|
async function run() {
|
||||||
|
let config;
|
||||||
|
let codeql;
|
||||||
try {
|
try {
|
||||||
if (util.should_abort('init', false) || !await util.reportActionStarting('init')) {
|
if (util.should_abort('init', false) || !await util.reportActionStarting('init')) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// The config file MUST be parsed in the init action
|
|
||||||
const config = await configUtils.loadConfig();
|
|
||||||
core.startGroup('Load language configuration');
|
|
||||||
const languages = await util.getLanguages();
|
|
||||||
// If the languages parameter was not given and no languages were
|
|
||||||
// detected then fail here as this is a workflow configuration error.
|
|
||||||
if (languages.length === 0) {
|
|
||||||
core.setFailed("Did not detect any languages to analyze. Please update input in workflow.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
core.endGroup();
|
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config, languages);
|
|
||||||
const sourceRoot = path.resolve();
|
|
||||||
core.startGroup('Setup CodeQL tools');
|
core.startGroup('Setup CodeQL tools');
|
||||||
const codeqlSetup = await setuptools.setupCodeQL();
|
codeql = await codeql_1.setupCodeQL();
|
||||||
await exec.exec(codeqlSetup.cmd, ['version', '--format=json']);
|
await codeql.printVersion();
|
||||||
core.endGroup();
|
core.endGroup();
|
||||||
|
core.startGroup('Load language configuration');
|
||||||
|
config = await configUtils.initConfig();
|
||||||
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
|
core.endGroup();
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
core.setFailed(e.message);
|
||||||
|
await util.reportActionAborted('init', e.message);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const sourceRoot = path.resolve();
|
||||||
// Forward Go flags
|
// Forward Go flags
|
||||||
const goFlags = process.env['GOFLAGS'];
|
const goFlags = process.env['GOFLAGS'];
|
||||||
if (goFlags) {
|
if (goFlags) {
|
||||||
core.exportVariable('GOFLAGS', goFlags);
|
core.exportVariable('GOFLAGS', goFlags);
|
||||||
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
|
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
|
||||||
}
|
}
|
||||||
|
const setupPythonDependencies = core.getInput('setup-python-dependencies', { required: true });
|
||||||
|
if (config.languages.includes('python') && setupPythonDependencies === 'true') {
|
||||||
|
await installPythonDeps(codeql);
|
||||||
|
}
|
||||||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||||
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
||||||
core.exportVariable('CODEQL_RAM', codeqlRam);
|
core.exportVariable('CODEQL_RAM', codeqlRam);
|
||||||
const databaseFolder = path.resolve(util.workspaceFolder(), 'codeql_databases');
|
const databaseFolder = path.resolve(util.getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
|
||||||
await io.mkdirP(databaseFolder);
|
await io.mkdirP(databaseFolder);
|
||||||
let tracedLanguages = {};
|
let tracedLanguages = {};
|
||||||
let scannedLanguages = [];
|
let scannedLanguages = [];
|
||||||
// TODO: replace this code once CodeQL supports multi-language tracing
|
// TODO: replace this code once CodeQL supports multi-language tracing
|
||||||
for (let language of languages) {
|
for (let language of config.languages) {
|
||||||
const languageDatabase = path.join(databaseFolder, language);
|
const languageDatabase = path.join(databaseFolder, language);
|
||||||
// Init language database
|
// Init language database
|
||||||
await exec.exec(codeqlSetup.cmd, ['database', 'init', languageDatabase, '--language=' + language, '--source-root=' + sourceRoot]);
|
await codeql.databaseInit(languageDatabase, language, sourceRoot);
|
||||||
// TODO: add better detection of 'traced languages' instead of using a hard coded list
|
// TODO: add better detection of 'traced languages' instead of using a hard coded list
|
||||||
if (['cpp', 'java', 'csharp'].includes(language)) {
|
if (['cpp', 'java', 'csharp'].includes(language)) {
|
||||||
const config = await tracerConfig(codeqlSetup, languageDatabase);
|
const config = await tracerConfig(codeql, languageDatabase);
|
||||||
tracedLanguages[language] = config;
|
tracedLanguages[language] = config;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -183,14 +220,16 @@ async function run() {
|
|||||||
}
|
}
|
||||||
core.exportVariable('ODASA_TRACER_CONFIGURATION', mainTracerConfig.spec);
|
core.exportVariable('ODASA_TRACER_CONFIGURATION', mainTracerConfig.spec);
|
||||||
if (process.platform === 'darwin') {
|
if (process.platform === 'darwin') {
|
||||||
core.exportVariable('DYLD_INSERT_LIBRARIES', path.join(codeqlSetup.tools, 'osx64', 'libtrace.dylib'));
|
core.exportVariable('DYLD_INSERT_LIBRARIES', path.join(codeql.getDir(), 'tools', 'osx64', 'libtrace.dylib'));
|
||||||
}
|
}
|
||||||
else if (process.platform === 'win32') {
|
else if (process.platform === 'win32') {
|
||||||
await exec.exec('powershell', [path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
await exec.exec('powershell', [
|
||||||
path.resolve(codeqlSetup.tools, 'win64', 'tracer.exe')], { env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
|
path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
||||||
|
path.resolve(codeql.getDir(), 'tools', 'win64', 'tracer.exe'),
|
||||||
|
], { env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
core.exportVariable('LD_PRELOAD', path.join(codeqlSetup.tools, 'linux64', '${LIB}trace.so'));
|
core.exportVariable('LD_PRELOAD', path.join(codeql.getDir(), 'tools', 'linux64', '${LIB}trace.so'));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -198,17 +237,17 @@ async function run() {
|
|||||||
core.exportVariable(sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES, tracedLanguageKeys.join(','));
|
core.exportVariable(sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES, tracedLanguageKeys.join(','));
|
||||||
// TODO: make this a "private" environment variable of the action
|
// TODO: make this a "private" environment variable of the action
|
||||||
core.exportVariable(sharedEnv.CODEQL_ACTION_DATABASE_DIR, databaseFolder);
|
core.exportVariable(sharedEnv.CODEQL_ACTION_DATABASE_DIR, databaseFolder);
|
||||||
core.exportVariable(sharedEnv.CODEQL_ACTION_CMD, codeqlSetup.cmd);
|
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
await util.reportActionFailed('init', error.message, error.stack);
|
await util.reportActionFailed('init', error.message, error.stack);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
|
||||||
await util.reportActionSucceeded('init');
|
await util.reportActionSucceeded('init');
|
||||||
|
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||||
}
|
}
|
||||||
run().catch(e => {
|
run().catch(e => {
|
||||||
core.setFailed("init action failed: " + e);
|
core.setFailed("init action failed: " + e);
|
||||||
console.log(e);
|
console.log(e);
|
||||||
});
|
});
|
||||||
|
//# sourceMappingURL=setup-tracer.js.map
|
||||||
1
lib/setup-tracer.js.map
Normal file
1
lib/setup-tracer.js.map
Normal file
File diff suppressed because one or more lines are too long
4
lib/shared-environment.js
generated
4
lib/shared-environment.js
generated
@@ -1,8 +1,7 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.CODEQL_ACTION_CMD = 'CODEQL_ACTION_CMD';
|
|
||||||
exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
|
exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
|
||||||
exports.CODEQL_ACTION_LANGUAGES = 'CODEQL_ACTION_LANGUAGES';
|
exports.CODEQL_ACTION_ANALYSIS_KEY = 'CODEQL_ACTION_ANALYSIS_KEY';
|
||||||
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
|
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
|
||||||
exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
|
exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
|
||||||
exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
||||||
@@ -14,3 +13,4 @@ exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
|||||||
exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
|
exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
|
||||||
// Populated when the init action completes successfully
|
// Populated when the init action completes successfully
|
||||||
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
|
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
|
||||||
|
//# sourceMappingURL=shared-environment.js.map
|
||||||
1
lib/shared-environment.js.map
Normal file
1
lib/shared-environment.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,+BAA+B,GAAG,iCAAiC,CAAC;AACpE,QAAA,8BAA8B,GAAG,gCAAgC,CAAC;AAC/E,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,wBAAwB,GAAG,0BAA0B,CAAC;AACnE,wDAAwD;AAC3C,QAAA,4BAA4B,GAAG,8BAA8B,CAAC"}
|
||||||
22
lib/test-utils.js
generated
Normal file
22
lib/test-utils.js
generated
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
function silenceDebugOutput(test) {
|
||||||
|
const typedTest = test;
|
||||||
|
typedTest.beforeEach(t => {
|
||||||
|
const processStdoutWrite = process.stdout.write.bind(process.stdout);
|
||||||
|
t.context.write = processStdoutWrite;
|
||||||
|
process.stdout.write = (str, encoding, cb) => {
|
||||||
|
// Core library will directly call process.stdout.write for commands
|
||||||
|
// We don't want :: commands to be executed by the runner during tests
|
||||||
|
if (!str.match(/^::/)) {
|
||||||
|
processStdoutWrite(str, encoding, cb);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
});
|
||||||
|
typedTest.afterEach(t => {
|
||||||
|
process.stdout.write = t.context.write;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.silenceDebugOutput = silenceDebugOutput;
|
||||||
|
//# sourceMappingURL=test-utils.js.map
|
||||||
1
lib/test-utils.js.map
Normal file
1
lib/test-utils.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"test-utils.js","sourceRoot":"","sources":["../src/test-utils.ts"],"names":[],"mappings":";;AAEA,SAAgB,kBAAkB,CAAC,IAAwB;IACzD,MAAM,SAAS,GAAG,IAAmC,CAAC;IAEtD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACrB,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,KAAK,GAAG,kBAAkB,CAAC;QACrC,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,GAAQ,EAAE,QAAc,EAAE,EAA0B,EAAE,EAAE;YAC5E,oEAAoE;YACpE,sEAAsE;YACtE,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACnB,kBAAkB,CAAC,GAAG,EAAE,QAAQ,EAAE,EAAE,CAAC,CAAC;aACzC;YACD,OAAO,IAAI,CAAC;QAChB,CAAC,CAAC;IACN,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE;QACpB,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC;IAC3C,CAAC,CAAC,CAAC;AACL,CAAC;AAnBD,gDAmBC"}
|
||||||
75
lib/testing-utils.js
generated
Normal file
75
lib/testing-utils.js
generated
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const sinon_1 = __importDefault(require("sinon"));
|
||||||
|
const CodeQL = __importStar(require("./codeql"));
|
||||||
|
function wrapOutput(context) {
|
||||||
|
// Function signature taken from Socket.write.
|
||||||
|
// Note there are two overloads:
|
||||||
|
// write(buffer: Uint8Array | string, cb?: (err?: Error) => void): boolean;
|
||||||
|
// write(str: Uint8Array | string, encoding?: string, cb?: (err?: Error) => void): boolean;
|
||||||
|
return (chunk, encoding, cb) => {
|
||||||
|
// Work out which method overload we are in
|
||||||
|
if (cb === undefined && typeof encoding === 'function') {
|
||||||
|
cb = encoding;
|
||||||
|
encoding = undefined;
|
||||||
|
}
|
||||||
|
// Record the output
|
||||||
|
if (typeof chunk === 'string') {
|
||||||
|
context.testOutput += chunk;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
context.testOutput += new TextDecoder(encoding || 'utf-8').decode(chunk);
|
||||||
|
}
|
||||||
|
// Satisfy contract by calling callback when done
|
||||||
|
if (cb !== undefined && typeof cb === 'function') {
|
||||||
|
cb();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function setupTests(test) {
|
||||||
|
const typedTest = test;
|
||||||
|
typedTest.beforeEach(t => {
|
||||||
|
// Set an empty CodeQL object so that all method calls will fail
|
||||||
|
// unless the test explicitly sets one up.
|
||||||
|
CodeQL.setCodeQL({});
|
||||||
|
// Replace stdout and stderr so we can record output during tests
|
||||||
|
t.context.testOutput = "";
|
||||||
|
const processStdoutWrite = process.stdout.write.bind(process.stdout);
|
||||||
|
t.context.stdoutWrite = processStdoutWrite;
|
||||||
|
process.stdout.write = wrapOutput(t.context);
|
||||||
|
const processStderrWrite = process.stderr.write.bind(process.stderr);
|
||||||
|
t.context.stderrWrite = processStderrWrite;
|
||||||
|
process.stderr.write = wrapOutput(t.context);
|
||||||
|
// Many tests modify environment variables. Take a copy now so that
|
||||||
|
// we reset them after the test to keep tests independent of each other.
|
||||||
|
// process.env only has strings fields, so a shallow copy is fine.
|
||||||
|
t.context.env = {};
|
||||||
|
Object.assign(t.context.env, process.env);
|
||||||
|
});
|
||||||
|
typedTest.afterEach.always(t => {
|
||||||
|
// Restore stdout and stderr
|
||||||
|
// The captured output is only replayed if the test failed
|
||||||
|
process.stdout.write = t.context.stdoutWrite;
|
||||||
|
process.stderr.write = t.context.stderrWrite;
|
||||||
|
if (!t.passed) {
|
||||||
|
process.stdout.write(t.context.testOutput);
|
||||||
|
}
|
||||||
|
// Undo any modifications made by sinon
|
||||||
|
sinon_1.default.restore();
|
||||||
|
// Undo any modifications to the env
|
||||||
|
process.env = t.context.env;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.setupTests = setupTests;
|
||||||
|
//# sourceMappingURL=testing-utils.js.map
|
||||||
1
lib/testing-utils.js.map
Normal file
1
lib/testing-utils.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AACA,kDAA0B;AAE1B,iDAAmC;AAInC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CAAC,KAA0B,EAAE,QAAiB,EAAE,EAA0B,EAAW,EAAE;QAC5F,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACvB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;QAC7B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,eAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC"}
|
||||||
1
lib/tracer-env.js
generated
1
lib/tracer-env.js
generated
@@ -18,3 +18,4 @@ for (let entry of Object.entries(process.env)) {
|
|||||||
}
|
}
|
||||||
process.stdout.write(process.argv[2]);
|
process.stdout.write(process.argv[2]);
|
||||||
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');
|
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');
|
||||||
|
//# sourceMappingURL=tracer-env.js.map
|
||||||
1
lib/tracer-env.js.map
Normal file
1
lib/tracer-env.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"tracer-env.js","sourceRoot":"","sources":["../src/tracer-env.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AAEzB,MAAM,GAAG,GAAG,EAAE,CAAC;AACf,KAAK,IAAI,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;IAC7C,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACrB,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACvB,IAAI,OAAO,KAAK,KAAK,WAAW,IAAI,GAAG,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;QACtF,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KAClB;CACF;AACD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,CAAC"}
|
||||||
231
lib/upload-lib.js
generated
231
lib/upload-lib.js
generated
@@ -11,28 +11,15 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const http = __importStar(require("@actions/http-client"));
|
|
||||||
const auth = __importStar(require("@actions/http-client/auth"));
|
|
||||||
const io = __importStar(require("@actions/io"));
|
|
||||||
const file_url_1 = __importDefault(require("file-url"));
|
const file_url_1 = __importDefault(require("file-url"));
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
|
const jsonschema = __importStar(require("jsonschema"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const zlib_1 = __importDefault(require("zlib"));
|
const zlib_1 = __importDefault(require("zlib"));
|
||||||
|
const api = __importStar(require("./api-client"));
|
||||||
const fingerprints = __importStar(require("./fingerprints"));
|
const fingerprints = __importStar(require("./fingerprints"));
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
// Construct the location of the sentinel file for detecting multiple uploads.
|
|
||||||
// The returned location should be writable.
|
|
||||||
async function getSentinelFilePath() {
|
|
||||||
// Use the temp dir instead of placing next to the sarif file because of
|
|
||||||
// issues with docker actions. The directory containing the sarif file
|
|
||||||
// may not be writable by us.
|
|
||||||
const uploadsTmpDir = path.join(process.env['RUNNER_TEMP'] || '/tmp/codeql-action', 'uploads');
|
|
||||||
await io.mkdirP(uploadsTmpDir);
|
|
||||||
// Hash the absolute path so we'll behave correctly in the unlikely
|
|
||||||
// scenario a file is referenced twice with different paths.
|
|
||||||
return path.join(uploadsTmpDir, 'codeql-action-upload-sentinel');
|
|
||||||
}
|
|
||||||
// Takes a list of paths to sarif files and combines them together,
|
// Takes a list of paths to sarif files and combines them together,
|
||||||
// returning the contents of the combined sarif file.
|
// returning the contents of the combined sarif file.
|
||||||
function combineSarifFiles(sarifFiles) {
|
function combineSarifFiles(sarifFiles) {
|
||||||
@@ -54,89 +41,169 @@ function combineSarifFiles(sarifFiles) {
|
|||||||
return JSON.stringify(combinedSarif);
|
return JSON.stringify(combinedSarif);
|
||||||
}
|
}
|
||||||
exports.combineSarifFiles = combineSarifFiles;
|
exports.combineSarifFiles = combineSarifFiles;
|
||||||
|
// Upload the given payload.
|
||||||
|
// If the request fails then this will retry a small number of times.
|
||||||
|
async function uploadPayload(payload) {
|
||||||
|
core.info('Uploading results');
|
||||||
|
// If in test mode we don't want to upload the results
|
||||||
|
const testMode = process.env['TEST_MODE'] === 'true' || false;
|
||||||
|
if (testMode) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
const [owner, repo] = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
||||||
|
// Make up to 4 attempts to upload, and sleep for these
|
||||||
|
// number of seconds between each attempt.
|
||||||
|
// We don't want to backoff too much to avoid wasting action
|
||||||
|
// minutes, but just waiting a little bit could maybe help.
|
||||||
|
const backoffPeriods = [1, 5, 15];
|
||||||
|
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
|
||||||
|
const response = await api.getApiClient().request("PUT /repos/:owner/:repo/code-scanning/analysis", ({
|
||||||
|
owner: owner,
|
||||||
|
repo: repo,
|
||||||
|
data: payload,
|
||||||
|
}));
|
||||||
|
core.debug('response status: ' + response.status);
|
||||||
|
const statusCode = response.status;
|
||||||
|
if (statusCode === 202) {
|
||||||
|
core.info("Successfully uploaded results");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
const requestID = response.headers["x-github-request-id"];
|
||||||
|
// On any other status code that's not 5xx mark the upload as failed
|
||||||
|
if (!statusCode || statusCode < 500 || statusCode >= 600) {
|
||||||
|
core.setFailed('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// On a 5xx status code we may retry the request
|
||||||
|
if (attempt < backoffPeriods.length) {
|
||||||
|
// Log the failure as a warning but don't mark the action as failed yet
|
||||||
|
core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
|
||||||
|
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
|
||||||
|
' seconds: (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||||
|
// Sleep for the backoff period
|
||||||
|
await new Promise(r => setTimeout(r, backoffPeriods[attempt] * 1000));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// If the upload fails with 5xx then we assume it is a temporary problem
|
||||||
|
// and not an error that the user has caused or can fix.
|
||||||
|
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||||
|
core.error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
// Uploads a single sarif file or a directory of sarif files
|
// Uploads a single sarif file or a directory of sarif files
|
||||||
// depending on what the path happens to refer to.
|
// depending on what the path happens to refer to.
|
||||||
|
// Returns true iff the upload occurred and succeeded
|
||||||
async function upload(input) {
|
async function upload(input) {
|
||||||
if (fs.lstatSync(input).isDirectory()) {
|
if (fs.lstatSync(input).isDirectory()) {
|
||||||
const sarifFiles = fs.readdirSync(input)
|
const sarifFiles = fs.readdirSync(input)
|
||||||
.filter(f => f.endsWith(".sarif"))
|
.filter(f => f.endsWith(".sarif"))
|
||||||
.map(f => path.resolve(input, f));
|
.map(f => path.resolve(input, f));
|
||||||
await uploadFiles(sarifFiles);
|
if (sarifFiles.length === 0) {
|
||||||
|
core.setFailed("No SARIF files found to upload in \"" + input + "\".");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return await uploadFiles(sarifFiles);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
await uploadFiles([input]);
|
return await uploadFiles([input]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.upload = upload;
|
exports.upload = upload;
|
||||||
|
// Counts the number of results in the given SARIF file
|
||||||
|
function countResultsInSarif(sarif) {
|
||||||
|
let numResults = 0;
|
||||||
|
for (const run of JSON.parse(sarif).runs) {
|
||||||
|
numResults += run.results.length;
|
||||||
|
}
|
||||||
|
return numResults;
|
||||||
|
}
|
||||||
|
exports.countResultsInSarif = countResultsInSarif;
|
||||||
|
// Validates that the given file path refers to a valid SARIF file.
|
||||||
|
// Returns a non-empty list of error message if the file is invalid,
|
||||||
|
// otherwise returns the empty list if the file is valid.
|
||||||
|
function validateSarifFileSchema(sarifFilePath) {
|
||||||
|
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
|
||||||
|
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
|
||||||
|
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||||
|
if (result.valid) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Set the failure message to the stacks of all the errors.
|
||||||
|
// This should be of a manageable size and may even give enough to fix the error.
|
||||||
|
const errorMessages = result.errors.map(e => "- " + e.stack);
|
||||||
|
core.setFailed("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + errorMessages.join("\n"));
|
||||||
|
// Also output the more verbose error messages in groups as these may be very large.
|
||||||
|
for (const error of result.errors) {
|
||||||
|
core.startGroup("Error details: " + error.stack);
|
||||||
|
core.info(JSON.stringify(error, null, 2));
|
||||||
|
core.endGroup();
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||||
// Uploads the given set of sarif files.
|
// Uploads the given set of sarif files.
|
||||||
|
// Returns true iff the upload occurred and succeeded
|
||||||
async function uploadFiles(sarifFiles) {
|
async function uploadFiles(sarifFiles) {
|
||||||
core.startGroup("Uploading results");
|
core.startGroup("Uploading results");
|
||||||
try {
|
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||||
// Check if an upload has happened before. If so then abort.
|
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||||
// This is intended to catch when the finish and upload-sarif actions
|
if (process.env[sentinelEnvVar]) {
|
||||||
// are used together, and then the upload-sarif action is invoked twice.
|
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||||
const sentinelFile = await getSentinelFilePath();
|
return false;
|
||||||
if (fs.existsSync(sentinelFile)) {
|
|
||||||
core.info("Aborting as an upload has already happened from this job");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
|
|
||||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
|
||||||
const ref = util.getRequiredEnvParam('GITHUB_REF'); // it's in the form "refs/heads/master"
|
|
||||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
|
||||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
|
||||||
core.debug("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
|
||||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
|
||||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
|
||||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
|
||||||
let checkoutPath = core.getInput('checkout_path');
|
|
||||||
let checkoutURI = file_url_1.default(checkoutPath);
|
|
||||||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
|
||||||
if (Number.isNaN(workflowRunID)) {
|
|
||||||
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let matrix = core.getInput('matrix');
|
|
||||||
if (matrix === "null" || matrix === "") {
|
|
||||||
matrix = undefined;
|
|
||||||
}
|
|
||||||
const toolNames = util.getToolNames(sarifPayload);
|
|
||||||
const payload = JSON.stringify({
|
|
||||||
"commit_oid": commitOid,
|
|
||||||
"ref": ref,
|
|
||||||
"analysis_name": analysisName,
|
|
||||||
"sarif": zipped_sarif,
|
|
||||||
"workflow_run_id": workflowRunID,
|
|
||||||
"checkout_uri": checkoutURI,
|
|
||||||
"environment": matrix,
|
|
||||||
"started_at": startedAt,
|
|
||||||
"tool_names": toolNames,
|
|
||||||
});
|
|
||||||
core.info('Uploading results');
|
|
||||||
const githubToken = core.getInput('token');
|
|
||||||
const ph = new auth.BearerCredentialHandler(githubToken);
|
|
||||||
const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
|
|
||||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY'] + '/code-scanning/analysis';
|
|
||||||
const res = await client.put(url, payload);
|
|
||||||
const requestID = res.message.headers["x-github-request-id"];
|
|
||||||
core.debug('response status: ' + res.message.statusCode);
|
|
||||||
if (res.message.statusCode === 500) {
|
|
||||||
// If the upload fails with 500 then we assume it is a temporary problem
|
|
||||||
// with turbo-scan and not an error that the user has caused or can fix.
|
|
||||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
|
||||||
core.error('Upload failed (' + requestID + '): ' + await res.readBody());
|
|
||||||
}
|
|
||||||
else if (res.message.statusCode !== 202) {
|
|
||||||
core.setFailed('Upload failed (' + requestID + '): ' + await res.readBody());
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
core.info("Successfully uploaded results");
|
|
||||||
}
|
|
||||||
// Mark that we have made an upload
|
|
||||||
fs.writeFileSync(sentinelFile, '');
|
|
||||||
}
|
}
|
||||||
catch (error) {
|
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||||
core.setFailed(error.message);
|
// Validate that the files we were asked to upload are all valid SARIF files
|
||||||
|
for (const file of sarifFiles) {
|
||||||
|
if (!validateSarifFileSchema(file)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
const commitOid = await util.getCommitOid();
|
||||||
|
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||||
|
const ref = util.getRef();
|
||||||
|
const analysisKey = await util.getAnalysisKey();
|
||||||
|
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||||
|
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||||
|
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||||
|
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||||
|
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||||
|
let checkoutPath = core.getInput('checkout_path');
|
||||||
|
let checkoutURI = file_url_1.default(checkoutPath);
|
||||||
|
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||||
|
if (Number.isNaN(workflowRunID)) {
|
||||||
|
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
let matrix = core.getInput('matrix');
|
||||||
|
if (matrix === "null" || matrix === "") {
|
||||||
|
matrix = undefined;
|
||||||
|
}
|
||||||
|
const toolNames = util.getToolNames(sarifPayload);
|
||||||
|
const payload = JSON.stringify({
|
||||||
|
"commit_oid": commitOid,
|
||||||
|
"ref": ref,
|
||||||
|
"analysis_key": analysisKey,
|
||||||
|
"analysis_name": analysisName,
|
||||||
|
"sarif": zipped_sarif,
|
||||||
|
"workflow_run_id": workflowRunID,
|
||||||
|
"checkout_uri": checkoutURI,
|
||||||
|
"environment": matrix,
|
||||||
|
"started_at": startedAt,
|
||||||
|
"tool_names": toolNames,
|
||||||
|
});
|
||||||
|
// Log some useful debug info about the info
|
||||||
|
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
|
||||||
|
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
|
||||||
|
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
|
||||||
|
// Make the upload
|
||||||
|
const succeeded = await uploadPayload(payload);
|
||||||
core.endGroup();
|
core.endGroup();
|
||||||
|
return succeeded;
|
||||||
}
|
}
|
||||||
|
//# sourceMappingURL=upload-lib.js.map
|
||||||
1
lib/upload-lib.js.map
Normal file
1
lib/upload-lib.js.map
Normal file
File diff suppressed because one or more lines are too long
27
lib/upload-lib.test.js
generated
Normal file
27
lib/upload-lib.test.js
generated
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
const uploadLib = __importStar(require("./upload-lib"));
|
||||||
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
|
ava_1.default('validateSarifFileSchema - valid', t => {
|
||||||
|
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
|
||||||
|
t.true(uploadLib.validateSarifFileSchema(inputFile));
|
||||||
|
});
|
||||||
|
ava_1.default('validateSarifFileSchema - invalid', t => {
|
||||||
|
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
|
||||||
|
t.false(uploadLib.validateSarifFileSchema(inputFile));
|
||||||
|
// validateSarifFileSchema calls core.setFailed which sets the exit code on error
|
||||||
|
process.exitCode = 0;
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=upload-lib.test.js.map
|
||||||
1
lib/upload-lib.test.js.map
Normal file
1
lib/upload-lib.test.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,mDAA2C;AAC3C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;IACtD,iFAAiF;IACjF,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC"}
|
||||||
11
lib/upload-sarif.js
generated
11
lib/upload-sarif.js
generated
@@ -15,16 +15,21 @@ async function run() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
await upload_lib.upload(core.getInput('sarif_file'));
|
if (await upload_lib.upload(core.getInput('sarif_file'))) {
|
||||||
|
await util.reportActionSucceeded('upload-sarif');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
await util.reportActionFailed('upload-sarif', 'upload');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
await util.reportActionFailed('upload-sarif', error.message, error.stack);
|
await util.reportActionFailed('upload-sarif', error.message, error.stack);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
await util.reportActionSucceeded('upload-sarif');
|
|
||||||
}
|
}
|
||||||
run().catch(e => {
|
run().catch(e => {
|
||||||
core.setFailed("upload-sarif action failed: " + e);
|
core.setFailed("codeql/upload-sarif action failed: " + e);
|
||||||
console.log(e);
|
console.log(e);
|
||||||
});
|
});
|
||||||
|
//# sourceMappingURL=upload-sarif.js.map
|
||||||
1
lib/upload-sarif.js.map
Normal file
1
lib/upload-sarif.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"upload-sarif.js","sourceRoot":"","sources":["../src/upload-sarif.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,yDAA2C;AAC3C,6CAA+B;AAE/B,KAAK,UAAU,GAAG;IAChB,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,KAAK,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,cAAc,CAAC,EAAE;QAChG,OAAO;KACR;IAED,IAAI;QACF,IAAI,MAAM,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE;YACxD,MAAM,IAAI,CAAC,qBAAqB,CAAC,cAAc,CAAC,CAAC;SAClD;aAAM;YACL,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;SACzD;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QAC1E,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||||
289
lib/util.js
generated
289
lib/util.js
generated
@@ -6,16 +6,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
result["default"] = mod;
|
result["default"] = mod;
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const http = __importStar(require("@actions/http-client"));
|
const exec = __importStar(require("@actions/exec"));
|
||||||
const auth = __importStar(require("@actions/http-client/auth"));
|
const fs = __importStar(require("fs"));
|
||||||
const octokit = __importStar(require("@octokit/rest"));
|
const os = __importStar(require("os"));
|
||||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
|
const api = __importStar(require("./api-client"));
|
||||||
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
/**
|
/**
|
||||||
* Should the current action be aborted?
|
* Should the current action be aborted?
|
||||||
@@ -31,12 +29,6 @@ function should_abort(actionName, requireInitActionHasRun) {
|
|||||||
core.setFailed('GITHUB_REF must be set.');
|
core.setFailed('GITHUB_REF must be set.');
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
// Should abort if called on a merge commit for a pull request.
|
|
||||||
if (ref.startsWith('refs/pull/')) {
|
|
||||||
core.warning('The CodeQL ' + actionName + ' action is intended for workflows triggered on `push` events, '
|
|
||||||
+ 'but the current workflow is running on a pull request. Aborting.');
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
// If the init action is required, then check the it completed successfully.
|
// If the init action is required, then check the it completed successfully.
|
||||||
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
|
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
|
||||||
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
|
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
|
||||||
@@ -45,22 +37,12 @@ function should_abort(actionName, requireInitActionHasRun) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
exports.should_abort = should_abort;
|
exports.should_abort = should_abort;
|
||||||
/**
|
|
||||||
* Resolve the path to the workspace folder.
|
|
||||||
*/
|
|
||||||
function workspaceFolder() {
|
|
||||||
let workspaceFolder = process.env['RUNNER_WORKSPACE'];
|
|
||||||
if (!workspaceFolder)
|
|
||||||
workspaceFolder = path.resolve('..');
|
|
||||||
return workspaceFolder;
|
|
||||||
}
|
|
||||||
exports.workspaceFolder = workspaceFolder;
|
|
||||||
/**
|
/**
|
||||||
* Get an environment parameter, but throw an error if it is not set.
|
* Get an environment parameter, but throw an error if it is not set.
|
||||||
*/
|
*/
|
||||||
function getRequiredEnvParam(paramName) {
|
function getRequiredEnvParam(paramName) {
|
||||||
const value = process.env[paramName];
|
const value = process.env[paramName];
|
||||||
if (value === undefined) {
|
if (value === undefined || value.length === 0) {
|
||||||
throw new Error(paramName + ' environment variable must be set');
|
throw new Error(paramName + ' environment variable must be set');
|
||||||
}
|
}
|
||||||
core.debug(paramName + '=' + value);
|
core.debug(paramName + '=' + value);
|
||||||
@@ -68,87 +50,90 @@ function getRequiredEnvParam(paramName) {
|
|||||||
}
|
}
|
||||||
exports.getRequiredEnvParam = getRequiredEnvParam;
|
exports.getRequiredEnvParam = getRequiredEnvParam;
|
||||||
/**
|
/**
|
||||||
* Gets the set of languages in the current repository
|
* Gets the SHA of the commit that is currently checked out.
|
||||||
*/
|
*/
|
||||||
async function getLanguagesInRepo() {
|
async function getCommitOid() {
|
||||||
var _a;
|
// Try to use git to get the current commit SHA. If that fails then
|
||||||
// Translate between GitHub's API names for languages and ours
|
// log but otherwise silently fall back to using the SHA from the environment.
|
||||||
const codeqlLanguages = {
|
// The only time these two values will differ is during analysis of a PR when
|
||||||
'C': 'cpp',
|
// the workflow has changed the current commit to the head commit instead of
|
||||||
'C++': 'cpp',
|
// the merge commit, which must mean that git is available.
|
||||||
'C#': 'csharp',
|
// Even if this does go wrong, it's not a huge problem for the alerts to
|
||||||
'Go': 'go',
|
// reported on the merge commit.
|
||||||
'Java': 'java',
|
try {
|
||||||
'JavaScript': 'javascript',
|
let commitOid = '';
|
||||||
'TypeScript': 'javascript',
|
await exec.exec('git', ['rev-parse', 'HEAD'], {
|
||||||
'Python': 'python',
|
silent: true,
|
||||||
};
|
listeners: {
|
||||||
let repo_nwo = (_a = process.env['GITHUB_REPOSITORY']) === null || _a === void 0 ? void 0 : _a.split("/");
|
stdout: (data) => { commitOid += data.toString(); },
|
||||||
if (repo_nwo) {
|
stderr: (data) => { process.stderr.write(data); }
|
||||||
let owner = repo_nwo[0];
|
|
||||||
let repo = repo_nwo[1];
|
|
||||||
core.debug(`GitHub repo ${owner} ${repo}`);
|
|
||||||
let ok = new octokit.Octokit({
|
|
||||||
auth: core.getInput('token'),
|
|
||||||
userAgent: "CodeQL Action",
|
|
||||||
log: console_log_level_1.default({ level: "debug" })
|
|
||||||
});
|
|
||||||
const response = await ok.request("GET /repos/:owner/:repo/languages", ({
|
|
||||||
owner,
|
|
||||||
repo
|
|
||||||
}));
|
|
||||||
core.debug("Languages API response: " + JSON.stringify(response));
|
|
||||||
// The GitHub API is going to return languages in order of popularity,
|
|
||||||
// When we pick a language to autobuild we want to pick the most popular traced language
|
|
||||||
// Since sets in javascript maintain insertion order, using a set here and then splatting it
|
|
||||||
// into an array gives us an array of languages ordered by popularity
|
|
||||||
let languages = new Set();
|
|
||||||
for (let lang in response.data) {
|
|
||||||
if (lang in codeqlLanguages) {
|
|
||||||
languages.add(codeqlLanguages[lang]);
|
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
return [...languages];
|
return commitOid.trim();
|
||||||
}
|
}
|
||||||
else {
|
catch (e) {
|
||||||
return [];
|
core.info("Failed to call git to get current commit. Continuing with data from environment: " + e);
|
||||||
|
return getRequiredEnvParam('GITHUB_SHA');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
exports.getCommitOid = getCommitOid;
|
||||||
|
/**
|
||||||
|
* Get the path of the currently executing workflow.
|
||||||
|
*/
|
||||||
|
async function getWorkflowPath() {
|
||||||
|
const repo_nwo = getRequiredEnvParam('GITHUB_REPOSITORY').split("/");
|
||||||
|
const owner = repo_nwo[0];
|
||||||
|
const repo = repo_nwo[1];
|
||||||
|
const run_id = Number(getRequiredEnvParam('GITHUB_RUN_ID'));
|
||||||
|
const apiClient = api.getApiClient();
|
||||||
|
const runsResponse = await apiClient.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
run_id
|
||||||
|
});
|
||||||
|
const workflowUrl = runsResponse.data.workflow_url;
|
||||||
|
const workflowResponse = await apiClient.request('GET ' + workflowUrl);
|
||||||
|
return workflowResponse.data.path;
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* Get the languages to analyse.
|
* Get the analysis key paramter for the current job.
|
||||||
*
|
*
|
||||||
* The result is obtained from the environment parameter CODEQL_ACTION_LANGUAGES
|
* This will combine the workflow path and current job name.
|
||||||
* if that has been set, otherwise it is obtained from the action input parameter
|
* Computing this the first time requires making requests to
|
||||||
* 'languages' if that has been set, otherwise it is deduced as all languages in the
|
* the github API, but after that the result will be cached.
|
||||||
* repo that can be analysed.
|
|
||||||
*
|
|
||||||
* If the languages are obtained from either of the second choices, the
|
|
||||||
* CODEQL_ACTION_LANGUAGES environment variable will be exported with the
|
|
||||||
* deduced list.
|
|
||||||
*/
|
*/
|
||||||
async function getLanguages() {
|
async function getAnalysisKey() {
|
||||||
// Obtain from CODEQL_ACTION_LANGUAGES if set
|
let analysisKey = process.env[sharedEnv.CODEQL_ACTION_ANALYSIS_KEY];
|
||||||
const langsVar = process.env[sharedEnv.CODEQL_ACTION_LANGUAGES];
|
if (analysisKey !== undefined) {
|
||||||
if (langsVar) {
|
return analysisKey;
|
||||||
return langsVar.split(',')
|
|
||||||
.map(x => x.trim())
|
|
||||||
.filter(x => x.length > 0);
|
|
||||||
}
|
}
|
||||||
// Obtain from action input 'languages' if set
|
const workflowPath = await getWorkflowPath();
|
||||||
let languages = core.getInput('languages', { required: false })
|
const jobName = getRequiredEnvParam('GITHUB_JOB');
|
||||||
.split(',')
|
analysisKey = workflowPath + ':' + jobName;
|
||||||
.map(x => x.trim())
|
core.exportVariable(sharedEnv.CODEQL_ACTION_ANALYSIS_KEY, analysisKey);
|
||||||
.filter(x => x.length > 0);
|
return analysisKey;
|
||||||
core.info("Languages from configuration: " + JSON.stringify(languages));
|
|
||||||
if (languages.length === 0) {
|
|
||||||
// Obtain languages as all languages in the repo that can be analysed
|
|
||||||
languages = await getLanguagesInRepo();
|
|
||||||
core.info("Automatically detected languages: " + JSON.stringify(languages));
|
|
||||||
}
|
|
||||||
core.exportVariable(sharedEnv.CODEQL_ACTION_LANGUAGES, languages.join(','));
|
|
||||||
return languages;
|
|
||||||
}
|
}
|
||||||
exports.getLanguages = getLanguages;
|
exports.getAnalysisKey = getAnalysisKey;
|
||||||
|
/**
|
||||||
|
* Get the ref currently being analyzed.
|
||||||
|
*/
|
||||||
|
function getRef() {
|
||||||
|
// Will be in the form "refs/heads/master" on a push event
|
||||||
|
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||||
|
const ref = getRequiredEnvParam('GITHUB_REF');
|
||||||
|
// For pull request refs we want to convert from the 'merge' ref
|
||||||
|
// to the 'head' ref, as that is what we want to analyse.
|
||||||
|
// There should have been some code earlier in the workflow to do
|
||||||
|
// the checkout, but we have no way of verifying that here.
|
||||||
|
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||||
|
if (pull_ref_regex.test(ref)) {
|
||||||
|
return ref.replace(pull_ref_regex, 'refs/pull/$1/head');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return ref;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.getRef = getRef;
|
||||||
/**
|
/**
|
||||||
* Compose a StatusReport.
|
* Compose a StatusReport.
|
||||||
*
|
*
|
||||||
@@ -158,7 +143,20 @@ exports.getLanguages = getLanguages;
|
|||||||
* @param exception Exception (only supply if status is 'failure')
|
* @param exception Exception (only supply if status is 'failure')
|
||||||
*/
|
*/
|
||||||
async function createStatusReport(actionName, status, cause, exception) {
|
async function createStatusReport(actionName, status, cause, exception) {
|
||||||
|
var _a, _b;
|
||||||
|
// If this is not the init action starting up or aborting then try to load the config.
|
||||||
|
// If it fails then carry because it's important to still send the status report.
|
||||||
|
let config = undefined;
|
||||||
|
if (actionName !== 'init' || (status !== 'starting' && status !== 'aborted')) {
|
||||||
|
try {
|
||||||
|
config = await configUtils.getConfig();
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
core.error('Unable to load config: ' + e);
|
||||||
|
}
|
||||||
|
}
|
||||||
const commitOid = process.env['GITHUB_SHA'] || '';
|
const commitOid = process.env['GITHUB_SHA'] || '';
|
||||||
|
const ref = getRef();
|
||||||
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
|
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
|
||||||
let workflowRunID = -1;
|
let workflowRunID = -1;
|
||||||
if (workflowRunIDStr) {
|
if (workflowRunIDStr) {
|
||||||
@@ -166,15 +164,18 @@ async function createStatusReport(actionName, status, cause, exception) {
|
|||||||
}
|
}
|
||||||
const workflowName = process.env['GITHUB_WORKFLOW'] || '';
|
const workflowName = process.env['GITHUB_WORKFLOW'] || '';
|
||||||
const jobName = process.env['GITHUB_JOB'] || '';
|
const jobName = process.env['GITHUB_JOB'] || '';
|
||||||
const languages = (await getLanguages()).sort().join(',');
|
const analysis_key = await getAnalysisKey();
|
||||||
|
const languages = ((_b = (_a = config) === null || _a === void 0 ? void 0 : _a.languages) === null || _b === void 0 ? void 0 : _b.join(',')) || "";
|
||||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT] || new Date().toISOString();
|
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT] || new Date().toISOString();
|
||||||
core.exportVariable(sharedEnv.CODEQL_ACTION_STARTED_AT, startedAt);
|
core.exportVariable(sharedEnv.CODEQL_ACTION_STARTED_AT, startedAt);
|
||||||
let statusReport = {
|
let statusReport = {
|
||||||
workflow_run_id: workflowRunID,
|
workflow_run_id: workflowRunID,
|
||||||
workflow_name: workflowName,
|
workflow_name: workflowName,
|
||||||
job_name: jobName,
|
job_name: jobName,
|
||||||
|
analysis_key: analysis_key,
|
||||||
languages: languages,
|
languages: languages,
|
||||||
commit_oid: commitOid,
|
commit_oid: commitOid,
|
||||||
|
ref: ref,
|
||||||
action_name: actionName,
|
action_name: actionName,
|
||||||
action_oid: "unknown",
|
action_oid: "unknown",
|
||||||
started_at: startedAt,
|
started_at: startedAt,
|
||||||
@@ -187,7 +188,7 @@ async function createStatusReport(actionName, status, cause, exception) {
|
|||||||
if (exception) {
|
if (exception) {
|
||||||
statusReport.exception = exception;
|
statusReport.exception = exception;
|
||||||
}
|
}
|
||||||
if (status === 'success' || status === 'failure') {
|
if (status === 'success' || status === 'failure' || status === 'aborted') {
|
||||||
statusReport.completed_at = new Date().toISOString();
|
statusReport.completed_at = new Date().toISOString();
|
||||||
}
|
}
|
||||||
let matrix = core.getInput('matrix');
|
let matrix = core.getInput('matrix');
|
||||||
@@ -199,21 +200,19 @@ async function createStatusReport(actionName, status, cause, exception) {
|
|||||||
/**
|
/**
|
||||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||||
*
|
*
|
||||||
* Returns the status code of the response to the status request, or
|
* Returns the status code of the response to the status request.
|
||||||
* undefined if the given statusReport is undefined or no response was
|
|
||||||
* received.
|
|
||||||
*/
|
*/
|
||||||
async function sendStatusReport(statusReport) {
|
async function sendStatusReport(statusReport) {
|
||||||
var _a;
|
|
||||||
const statusReportJSON = JSON.stringify(statusReport);
|
const statusReportJSON = JSON.stringify(statusReport);
|
||||||
core.debug('Sending status report: ' + statusReportJSON);
|
core.debug('Sending status report: ' + statusReportJSON);
|
||||||
const githubToken = core.getInput('token');
|
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
|
||||||
const ph = new auth.BearerCredentialHandler(githubToken);
|
const [owner, repo] = nwo.split("/");
|
||||||
const client = new http.HttpClient('Code Scanning : Status Report', [ph]);
|
const statusResponse = await api.getApiClient().request('PUT /repos/:owner/:repo/code-scanning/analysis/status', {
|
||||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY']
|
owner: owner,
|
||||||
+ '/code-scanning/analysis/status';
|
repo: repo,
|
||||||
const res = await client.put(url, statusReportJSON);
|
data: statusReportJSON,
|
||||||
return (_a = res.message) === null || _a === void 0 ? void 0 : _a.statusCode;
|
});
|
||||||
|
return statusResponse.status;
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Send a status report that an action is starting.
|
* Send a status report that an action is starting.
|
||||||
@@ -262,6 +261,16 @@ async function reportActionSucceeded(action) {
|
|||||||
await sendStatusReport(await createStatusReport(action, 'success'));
|
await sendStatusReport(await createStatusReport(action, 'success'));
|
||||||
}
|
}
|
||||||
exports.reportActionSucceeded = reportActionSucceeded;
|
exports.reportActionSucceeded = reportActionSucceeded;
|
||||||
|
/**
|
||||||
|
* Report that an action has been aborted.
|
||||||
|
*
|
||||||
|
* Note that the started_at date is always that of the `init` action, since
|
||||||
|
* this is likely to give a more useful duration when inspecting events.
|
||||||
|
*/
|
||||||
|
async function reportActionAborted(action, cause) {
|
||||||
|
await sendStatusReport(await createStatusReport(action, 'aborted', cause));
|
||||||
|
}
|
||||||
|
exports.reportActionAborted = reportActionAborted;
|
||||||
/**
|
/**
|
||||||
* Get the array of all the tool names contained in the given sarif contents.
|
* Get the array of all the tool names contained in the given sarif contents.
|
||||||
*
|
*
|
||||||
@@ -280,3 +289,67 @@ function getToolNames(sarifContents) {
|
|||||||
return Object.keys(toolNames);
|
return Object.keys(toolNames);
|
||||||
}
|
}
|
||||||
exports.getToolNames = getToolNames;
|
exports.getToolNames = getToolNames;
|
||||||
|
// Creates a random temporary directory, runs the given body, and then deletes the directory.
|
||||||
|
// Mostly intended for use within tests.
|
||||||
|
async function withTmpDir(body) {
|
||||||
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
|
||||||
|
const realSubdir = path.join(tmpDir, 'real');
|
||||||
|
fs.mkdirSync(realSubdir);
|
||||||
|
const symlinkSubdir = path.join(tmpDir, 'symlink');
|
||||||
|
fs.symlinkSync(realSubdir, symlinkSubdir, 'dir');
|
||||||
|
const result = await body(symlinkSubdir);
|
||||||
|
fs.rmdirSync(tmpDir, { recursive: true });
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
exports.withTmpDir = withTmpDir;
|
||||||
|
/**
|
||||||
|
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
||||||
|
* specified, the total available memory will be used minus 256 MB.
|
||||||
|
*
|
||||||
|
* @returns string
|
||||||
|
*/
|
||||||
|
function getMemoryFlag() {
|
||||||
|
let memoryToUseMegaBytes;
|
||||||
|
const memoryToUseString = core.getInput("ram");
|
||||||
|
if (memoryToUseString) {
|
||||||
|
memoryToUseMegaBytes = Number(memoryToUseString);
|
||||||
|
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
|
||||||
|
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const totalMemoryBytes = os.totalmem();
|
||||||
|
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||||
|
const systemReservedMemoryMegaBytes = 256;
|
||||||
|
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
|
||||||
|
}
|
||||||
|
return "--ram=" + Math.floor(memoryToUseMegaBytes);
|
||||||
|
}
|
||||||
|
exports.getMemoryFlag = getMemoryFlag;
|
||||||
|
/**
|
||||||
|
* Get the codeql `--threads` value specified for the `threads` input. The value
|
||||||
|
* defaults to 1. The value will be capped to the number of available CPUs.
|
||||||
|
*
|
||||||
|
* @returns string
|
||||||
|
*/
|
||||||
|
function getThreadsFlag() {
|
||||||
|
let numThreads = 1;
|
||||||
|
const numThreadsString = core.getInput("threads");
|
||||||
|
if (numThreadsString) {
|
||||||
|
numThreads = Number(numThreadsString);
|
||||||
|
if (Number.isNaN(numThreads)) {
|
||||||
|
throw new Error(`Invalid threads setting "${numThreadsString}", specified.`);
|
||||||
|
}
|
||||||
|
const maxThreads = os.cpus().length;
|
||||||
|
if (numThreads > maxThreads) {
|
||||||
|
numThreads = maxThreads;
|
||||||
|
}
|
||||||
|
const minThreads = -maxThreads;
|
||||||
|
if (numThreads < minThreads) {
|
||||||
|
numThreads = minThreads;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return `--threads=${numThreads}`;
|
||||||
|
}
|
||||||
|
exports.getThreadsFlag = getThreadsFlag;
|
||||||
|
//# sourceMappingURL=util.js.map
|
||||||
1
lib/util.js.map
Normal file
1
lib/util.js.map
Normal file
File diff suppressed because one or more lines are too long
64
lib/util.test.js
generated
Normal file
64
lib/util.test.js
generated
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const os = __importStar(require("os"));
|
||||||
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
const util = __importStar(require("./util"));
|
||||||
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
|
ava_1.default('getToolNames', t => {
|
||||||
|
const input = fs.readFileSync(__dirname + '/../src/testdata/tool-names.sarif', 'utf8');
|
||||||
|
const toolNames = util.getToolNames(input);
|
||||||
|
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
|
||||||
|
});
|
||||||
|
ava_1.default('getMemoryFlag() should return the correct --ram flag', t => {
|
||||||
|
const totalMem = Math.floor(os.totalmem() / (1024 * 1024));
|
||||||
|
const tests = {
|
||||||
|
"": `--ram=${totalMem - 256}`,
|
||||||
|
"512": "--ram=512",
|
||||||
|
};
|
||||||
|
for (const [input, expectedFlag] of Object.entries(tests)) {
|
||||||
|
process.env['INPUT_RAM'] = input;
|
||||||
|
const flag = util.getMemoryFlag();
|
||||||
|
t.deepEqual(flag, expectedFlag);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
ava_1.default('getMemoryFlag() throws if the ram input is < 0 or NaN', t => {
|
||||||
|
for (const input of ["-1", "hello!"]) {
|
||||||
|
process.env['INPUT_RAM'] = input;
|
||||||
|
t.throws(util.getMemoryFlag);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
ava_1.default('getThreadsFlag() should return the correct --threads flag', t => {
|
||||||
|
const numCpus = os.cpus().length;
|
||||||
|
const tests = {
|
||||||
|
"0": "--threads=0",
|
||||||
|
"1": "--threads=1",
|
||||||
|
[`${numCpus + 1}`]: `--threads=${numCpus}`,
|
||||||
|
[`${-numCpus - 1}`]: `--threads=${-numCpus}`
|
||||||
|
};
|
||||||
|
for (const [input, expectedFlag] of Object.entries(tests)) {
|
||||||
|
process.env['INPUT_THREADS'] = input;
|
||||||
|
const flag = util.getThreadsFlag();
|
||||||
|
t.deepEqual(flag, expectedFlag);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
ava_1.default('getThreadsFlag() throws if the threads input is not an integer', t => {
|
||||||
|
process.env['INPUT_THREADS'] = "hello!";
|
||||||
|
t.throws(util.getThreadsFlag);
|
||||||
|
});
|
||||||
|
ava_1.default('getRef() throws on the empty string', t => {
|
||||||
|
process.env["GITHUB_REF"] = "";
|
||||||
|
t.throws(util.getRef);
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=util.test.js.map
|
||||||
1
lib/util.test.js.map
Normal file
1
lib/util.test.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"util.test.js","sourceRoot":"","sources":["../src/util.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,uCAAyB;AAEzB,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACvB,MAAM,KAAK,GAAG,EAAE,CAAC,YAAY,CAAC,SAAS,GAAG,mCAAmC,EAAE,MAAM,CAAC,CAAC;IACvF,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,+BAA+B,EAAE,QAAQ,CAAC,CAAC,CAAC;AACtE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,sDAAsD,EAAE,CAAC,CAAC,EAAE;IAE/D,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC;IAE3D,MAAM,KAAK,GAAG;QACZ,EAAE,EAAE,SAAS,QAAQ,GAAG,GAAG,EAAE;QAC7B,KAAK,EAAE,WAAW;KACnB,CAAC;IAEF,KAAK,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAEzD,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC;QAEjC,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,EAAE,CAAC;QAClC,CAAC,CAAC,SAAS,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;KACjC;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,uDAAuD,EAAE,CAAC,CAAC,EAAE;IAChE,KAAK,MAAM,KAAK,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,EAAE;QACpC,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC;QACjC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;KAC9B;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,2DAA2D,EAAE,CAAC,CAAC,EAAE;IAEpE,MAAM,OAAO,GAAG,EAAE,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC;IAEjC,MAAM,KAAK,GAAG;QACZ,GAAG,EAAE,aAAa;QAClB,GAAG,EAAE,aAAa;QAClB,CAAC,GAAG,OAAO,GAAG,CAAC,EAAE,CAAC,EAAE,aAAa,OAAO,EAAE;QAC1C,CAAC,GAAG,CAAC,OAAO,GAAG,CAAC,EAAE,CAAC,EAAE,aAAa,CAAC,OAAO,EAAE;KAC7C,CAAC;IAEF,KAAK,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAEzD,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,GAAG,KAAK,CAAC;QAErC,MAAM,IAAI,GAAG,IAAI,CAAC,cAAc,EAAE,CAAC;QACnC,CAAC,CAAC,SAAS,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;KACjC;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,gEAAgE,EAAE,CAAC,CAAC,EAAE;IACzE,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,GAAG,QAAQ,CAAC;IACxC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;AAChC,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE;IAC9C,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC;IAC/B,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACxB,CAAC,CAAC,CAAC"}
|
||||||
1
node_modules/.bin/atob
generated
vendored
1
node_modules/.bin/atob
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../atob/bin/atob.js
|
|
||||||
1
node_modules/.bin/ava
generated
vendored
Symbolic link
1
node_modules/.bin/ava
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../ava/cli.js
|
||||||
1
node_modules/.bin/escodegen
generated
vendored
1
node_modules/.bin/escodegen
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../escodegen/bin/escodegen.js
|
|
||||||
1
node_modules/.bin/esgenerate
generated
vendored
1
node_modules/.bin/esgenerate
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../escodegen/bin/esgenerate.js
|
|
||||||
1
node_modules/.bin/esparse
generated
vendored
1
node_modules/.bin/esparse
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../esprima/bin/esparse.js
|
|
||||||
1
node_modules/.bin/esvalidate
generated
vendored
1
node_modules/.bin/esvalidate
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../esprima/bin/esvalidate.js
|
|
||||||
1
node_modules/.bin/jest
generated
vendored
1
node_modules/.bin/jest
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../jest/bin/jest.js
|
|
||||||
1
node_modules/.bin/jest-runtime
generated
vendored
1
node_modules/.bin/jest-runtime
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../jest-runtime/bin/jest-runtime.js
|
|
||||||
1
node_modules/.bin/jsesc
generated
vendored
1
node_modules/.bin/jsesc
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../jsesc/bin/jsesc
|
|
||||||
1
node_modules/.bin/json5
generated
vendored
1
node_modules/.bin/json5
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../json5/lib/cli.js
|
|
||||||
1
node_modules/.bin/parser
generated
vendored
1
node_modules/.bin/parser
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../@babel/parser/bin/babel-parser.js
|
|
||||||
1
node_modules/.bin/rc
generated
vendored
Symbolic link
1
node_modules/.bin/rc
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../rc/cli.js
|
||||||
1
node_modules/.bin/removeNPMAbsolutePaths
generated
vendored
Symbolic link
1
node_modules/.bin/removeNPMAbsolutePaths
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../removeNPMAbsolutePaths/bin/removeNPMAbsolutePaths
|
||||||
1
node_modules/.bin/sane
generated
vendored
1
node_modules/.bin/sane
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../sane/src/cli.js
|
|
||||||
2
node_modules/.bin/semver
generated
vendored
2
node_modules/.bin/semver
generated
vendored
@@ -1 +1 @@
|
|||||||
../semver/bin/semver
|
../semver/bin/semver.js
|
||||||
1
node_modules/.bin/sshpk-conv
generated
vendored
1
node_modules/.bin/sshpk-conv
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../sshpk/bin/sshpk-conv
|
|
||||||
1
node_modules/.bin/sshpk-sign
generated
vendored
1
node_modules/.bin/sshpk-sign
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../sshpk/bin/sshpk-sign
|
|
||||||
1
node_modules/.bin/sshpk-verify
generated
vendored
1
node_modules/.bin/sshpk-verify
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../sshpk/bin/sshpk-verify
|
|
||||||
1
node_modules/.bin/ts-jest
generated
vendored
1
node_modules/.bin/ts-jest
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../ts-jest/cli.js
|
|
||||||
1
node_modules/.bin/watch
generated
vendored
1
node_modules/.bin/watch
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../@cnakazawa/watch/cli.js
|
|
||||||
74
node_modules/@actions/github/README.md
generated
vendored
Normal file
74
node_modules/@actions/github/README.md
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
# `@actions/github`
|
||||||
|
|
||||||
|
> A hydrated Octokit client.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
Returns an authenticated Octokit client that follows the machine [proxy settings](https://help.github.com/en/actions/hosting-your-own-runners/using-a-proxy-server-with-self-hosted-runners). See https://octokit.github.io/rest.js for the API.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const github = require('@actions/github');
|
||||||
|
const core = require('@actions/core');
|
||||||
|
|
||||||
|
async function run() {
|
||||||
|
// This should be a token with access to your repository scoped in as a secret.
|
||||||
|
// The YML workflow will need to set myToken with the GitHub Secret Token
|
||||||
|
// myToken: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
// https://help.github.com/en/actions/automating-your-workflow-with-github-actions/authenticating-with-the-github_token#about-the-github_token-secret
|
||||||
|
const myToken = core.getInput('myToken');
|
||||||
|
|
||||||
|
const octokit = new github.GitHub(myToken);
|
||||||
|
|
||||||
|
const { data: pullRequest } = await octokit.pulls.get({
|
||||||
|
owner: 'octokit',
|
||||||
|
repo: 'rest.js',
|
||||||
|
pull_number: 123,
|
||||||
|
mediaType: {
|
||||||
|
format: 'diff'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(pullRequest);
|
||||||
|
}
|
||||||
|
|
||||||
|
run();
|
||||||
|
```
|
||||||
|
|
||||||
|
You can pass client options, as specified by [Octokit](https://octokit.github.io/rest.js/), as a second argument to the `GitHub` constructor.
|
||||||
|
|
||||||
|
You can also make GraphQL requests. See https://github.com/octokit/graphql.js for the API.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const result = await octokit.graphql(query, variables);
|
||||||
|
```
|
||||||
|
|
||||||
|
Finally, you can get the context of the current action:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const github = require('@actions/github');
|
||||||
|
|
||||||
|
const context = github.context;
|
||||||
|
|
||||||
|
const newIssue = await octokit.issues.create({
|
||||||
|
...context.repo,
|
||||||
|
title: 'New issue!',
|
||||||
|
body: 'Hello Universe!'
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Webhook payload typescript definitions
|
||||||
|
|
||||||
|
The npm module `@octokit/webhooks` provides type definitions for the response payloads. You can cast the payload to these types for better type information.
|
||||||
|
|
||||||
|
First, install the npm module `npm install @octokit/webhooks`
|
||||||
|
|
||||||
|
Then, assert the type based on the eventName
|
||||||
|
```ts
|
||||||
|
import * as core from '@actions/core'
|
||||||
|
import * as github from '@actions/github'
|
||||||
|
import * as Webhooks from '@octokit/webhooks'
|
||||||
|
if (github.context.eventName === 'push') {
|
||||||
|
const pushPayload = github.context.payload as Webhooks.WebhookPayloadPush
|
||||||
|
core.info(`The head commit is: ${pushPayload.head}`)
|
||||||
|
}
|
||||||
|
```
|
||||||
26
node_modules/@actions/github/lib/context.d.ts
generated
vendored
Normal file
26
node_modules/@actions/github/lib/context.d.ts
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import { WebhookPayload } from './interfaces';
|
||||||
|
export declare class Context {
|
||||||
|
/**
|
||||||
|
* Webhook payload object that triggered the workflow
|
||||||
|
*/
|
||||||
|
payload: WebhookPayload;
|
||||||
|
eventName: string;
|
||||||
|
sha: string;
|
||||||
|
ref: string;
|
||||||
|
workflow: string;
|
||||||
|
action: string;
|
||||||
|
actor: string;
|
||||||
|
/**
|
||||||
|
* Hydrate the context from the environment
|
||||||
|
*/
|
||||||
|
constructor();
|
||||||
|
get issue(): {
|
||||||
|
owner: string;
|
||||||
|
repo: string;
|
||||||
|
number: number;
|
||||||
|
};
|
||||||
|
get repo(): {
|
||||||
|
owner: string;
|
||||||
|
repo: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
46
node_modules/@actions/github/lib/context.js
generated
vendored
Normal file
46
node_modules/@actions/github/lib/context.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const fs_1 = require("fs");
|
||||||
|
const os_1 = require("os");
|
||||||
|
class Context {
|
||||||
|
/**
|
||||||
|
* Hydrate the context from the environment
|
||||||
|
*/
|
||||||
|
constructor() {
|
||||||
|
this.payload = {};
|
||||||
|
if (process.env.GITHUB_EVENT_PATH) {
|
||||||
|
if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
|
||||||
|
this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const path = process.env.GITHUB_EVENT_PATH;
|
||||||
|
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.eventName = process.env.GITHUB_EVENT_NAME;
|
||||||
|
this.sha = process.env.GITHUB_SHA;
|
||||||
|
this.ref = process.env.GITHUB_REF;
|
||||||
|
this.workflow = process.env.GITHUB_WORKFLOW;
|
||||||
|
this.action = process.env.GITHUB_ACTION;
|
||||||
|
this.actor = process.env.GITHUB_ACTOR;
|
||||||
|
}
|
||||||
|
get issue() {
|
||||||
|
const payload = this.payload;
|
||||||
|
return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
|
||||||
|
}
|
||||||
|
get repo() {
|
||||||
|
if (process.env.GITHUB_REPOSITORY) {
|
||||||
|
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
|
||||||
|
return { owner, repo };
|
||||||
|
}
|
||||||
|
if (this.payload.repository) {
|
||||||
|
return {
|
||||||
|
owner: this.payload.repository.owner.login,
|
||||||
|
repo: this.payload.repository.name
|
||||||
|
};
|
||||||
|
}
|
||||||
|
throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.Context = Context;
|
||||||
|
//# sourceMappingURL=context.js.map
|
||||||
1
node_modules/@actions/github/lib/context.js.map
generated
vendored
Normal file
1
node_modules/@actions/github/lib/context.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"context.js","sourceRoot":"","sources":["../src/context.ts"],"names":[],"mappings":";;AAEA,2BAA2C;AAC3C,2BAAsB;AAEtB,MAAa,OAAO;IAalB;;OAEG;IACH;QACE,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;QACjB,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,IAAI,eAAU,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;gBAC7C,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,KAAK,CACvB,iBAAY,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,EAAC,QAAQ,EAAE,MAAM,EAAC,CAAC,CAChE,CAAA;aACF;iBAAM;gBACL,MAAM,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAA;gBAC1C,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,qBAAqB,IAAI,kBAAkB,QAAG,EAAE,CAAC,CAAA;aACvE;SACF;QACD,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,iBAA2B,CAAA;QACxD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,eAAyB,CAAA;QACrD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,aAAuB,CAAA;QACjD,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,YAAsB,CAAA;IACjD,CAAC;IAED,IAAI,KAAK;QACP,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAA;QAE5B,uCACK,IAAI,CAAC,IAAI,KACZ,MAAM,EAAE,CAAC,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,YAAY,IAAI,OAAO,CAAC,CAAC,MAAM,IAClE;IACH,CAAC;IAED,IAAI,IAAI;QACN,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;YAC9D,OAAO,EAAC,KAAK,EAAE,IAAI,EAAC,CAAA;SACrB;QAED,IAAI,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YAC3B,OAAO;gBACL,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK;gBAC1C,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI;aACnC,CAAA;SACF;QAED,MAAM,IAAI,KAAK,CACb,kFAAkF,CACnF,CAAA;IACH,CAAC;CACF;AA9DD,0BA8DC"}
|
||||||
27
node_modules/@actions/github/lib/github.d.ts
generated
vendored
Normal file
27
node_modules/@actions/github/lib/github.d.ts
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import { graphql as GraphQL } from '@octokit/graphql/dist-types/types';
|
||||||
|
import { Octokit } from '@octokit/rest';
|
||||||
|
import * as Context from './context';
|
||||||
|
export declare const context: Context.Context;
|
||||||
|
export declare class GitHub extends Octokit {
|
||||||
|
graphql: GraphQL;
|
||||||
|
/**
|
||||||
|
* Sets up the REST client and GraphQL client with auth and proxy support.
|
||||||
|
* The parameter `token` or `opts.auth` must be supplied. The GraphQL client
|
||||||
|
* authorization is not setup when `opts.auth` is a function or object.
|
||||||
|
*
|
||||||
|
* @param token Auth token
|
||||||
|
* @param opts Octokit options
|
||||||
|
*/
|
||||||
|
constructor(token: string, opts?: Omit<Octokit.Options, 'auth'>);
|
||||||
|
constructor(opts: Octokit.Options);
|
||||||
|
/**
|
||||||
|
* Disambiguates the constructor overload parameters
|
||||||
|
*/
|
||||||
|
private static disambiguate;
|
||||||
|
private static getOctokitOptions;
|
||||||
|
private static getGraphQL;
|
||||||
|
private static getAuthString;
|
||||||
|
private static getProxyAgent;
|
||||||
|
private static getApiBaseUrl;
|
||||||
|
private static getGraphQLBaseUrl;
|
||||||
|
}
|
||||||
108
node_modules/@actions/github/lib/github.js
generated
vendored
Normal file
108
node_modules/@actions/github/lib/github.js
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
// Originally pulled from https://github.com/JasonEtco/actions-toolkit/blob/master/src/github.ts
|
||||||
|
const graphql_1 = require("@octokit/graphql");
|
||||||
|
const rest_1 = require("@octokit/rest");
|
||||||
|
const Context = __importStar(require("./context"));
|
||||||
|
const httpClient = __importStar(require("@actions/http-client"));
|
||||||
|
// We need this in order to extend Octokit
|
||||||
|
rest_1.Octokit.prototype = new rest_1.Octokit();
|
||||||
|
exports.context = new Context.Context();
|
||||||
|
class GitHub extends rest_1.Octokit {
|
||||||
|
constructor(token, opts) {
|
||||||
|
super(GitHub.getOctokitOptions(GitHub.disambiguate(token, opts)));
|
||||||
|
this.graphql = GitHub.getGraphQL(GitHub.disambiguate(token, opts));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Disambiguates the constructor overload parameters
|
||||||
|
*/
|
||||||
|
static disambiguate(token, opts) {
|
||||||
|
return [
|
||||||
|
typeof token === 'string' ? token : '',
|
||||||
|
typeof token === 'object' ? token : opts || {}
|
||||||
|
];
|
||||||
|
}
|
||||||
|
static getOctokitOptions(args) {
|
||||||
|
const token = args[0];
|
||||||
|
const options = Object.assign({}, args[1]); // Shallow clone - don't mutate the object provided by the caller
|
||||||
|
// Base URL - GHES or Dotcom
|
||||||
|
options.baseUrl = options.baseUrl || this.getApiBaseUrl();
|
||||||
|
// Auth
|
||||||
|
const auth = GitHub.getAuthString(token, options);
|
||||||
|
if (auth) {
|
||||||
|
options.auth = auth;
|
||||||
|
}
|
||||||
|
// Proxy
|
||||||
|
const agent = GitHub.getProxyAgent(options.baseUrl, options);
|
||||||
|
if (agent) {
|
||||||
|
// Shallow clone - don't mutate the object provided by the caller
|
||||||
|
options.request = options.request ? Object.assign({}, options.request) : {};
|
||||||
|
// Set the agent
|
||||||
|
options.request.agent = agent;
|
||||||
|
}
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
static getGraphQL(args) {
|
||||||
|
const defaults = {};
|
||||||
|
defaults.baseUrl = this.getGraphQLBaseUrl();
|
||||||
|
const token = args[0];
|
||||||
|
const options = args[1];
|
||||||
|
// Authorization
|
||||||
|
const auth = this.getAuthString(token, options);
|
||||||
|
if (auth) {
|
||||||
|
defaults.headers = {
|
||||||
|
authorization: auth
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// Proxy
|
||||||
|
const agent = GitHub.getProxyAgent(defaults.baseUrl, options);
|
||||||
|
if (agent) {
|
||||||
|
defaults.request = { agent };
|
||||||
|
}
|
||||||
|
return graphql_1.graphql.defaults(defaults);
|
||||||
|
}
|
||||||
|
static getAuthString(token, options) {
|
||||||
|
// Validate args
|
||||||
|
if (!token && !options.auth) {
|
||||||
|
throw new Error('Parameter token or opts.auth is required');
|
||||||
|
}
|
||||||
|
else if (token && options.auth) {
|
||||||
|
throw new Error('Parameters token and opts.auth may not both be specified');
|
||||||
|
}
|
||||||
|
return typeof options.auth === 'string' ? options.auth : `token ${token}`;
|
||||||
|
}
|
||||||
|
static getProxyAgent(destinationUrl, options) {
|
||||||
|
var _a;
|
||||||
|
if (!((_a = options.request) === null || _a === void 0 ? void 0 : _a.agent)) {
|
||||||
|
if (httpClient.getProxyUrl(destinationUrl)) {
|
||||||
|
const hc = new httpClient.HttpClient();
|
||||||
|
return hc.getAgent(destinationUrl);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
static getApiBaseUrl() {
|
||||||
|
return process.env['GITHUB_API_URL'] || 'https://api.github.com';
|
||||||
|
}
|
||||||
|
static getGraphQLBaseUrl() {
|
||||||
|
let url = process.env['GITHUB_GRAPHQL_URL'] || 'https://api.github.com/graphql';
|
||||||
|
// Shouldn't be a trailing slash, but remove if so
|
||||||
|
if (url.endsWith('/')) {
|
||||||
|
url = url.substr(0, url.length - 1);
|
||||||
|
}
|
||||||
|
// Remove trailing "/graphql"
|
||||||
|
if (url.toUpperCase().endsWith('/GRAPHQL')) {
|
||||||
|
url = url.substr(0, url.length - '/graphql'.length);
|
||||||
|
}
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.GitHub = GitHub;
|
||||||
|
//# sourceMappingURL=github.js.map
|
||||||
1
node_modules/@actions/github/lib/github.js.map
generated
vendored
Normal file
1
node_modules/@actions/github/lib/github.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"github.js","sourceRoot":"","sources":["../src/github.ts"],"names":[],"mappings":";;;;;;;;;AAAA,gGAAgG;AAChG,8CAAwC;AAUxC,wCAAqC;AACrC,mDAAoC;AAEpC,iEAAkD;AAElD,0CAA0C;AAC1C,cAAO,CAAC,SAAS,GAAG,IAAI,cAAO,EAAE,CAAA;AAEpB,QAAA,OAAO,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,CAAA;AAE5C,MAAa,MAAO,SAAQ,cAAO;IAiBjC,YAAY,KAA+B,EAAE,IAAsB;QACjE,KAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC,MAAM,CAAC,YAAY,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,CAAC,CAAA;QAEjE,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,YAAY,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,CAAA;IACpE,CAAC;IAED;;OAEG;IACK,MAAM,CAAC,YAAY,CACzB,KAA+B,EAC/B,IAAsB;QAEtB,OAAO;YACL,OAAO,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;YACtC,OAAO,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE;SAC/C,CAAA;IACH,CAAC;IAEO,MAAM,CAAC,iBAAiB,CAC9B,IAA+B;QAE/B,MAAM,KAAK,GAAG,IAAI,CAAC,CAAC,CAAC,CAAA;QACrB,MAAM,OAAO,qBAAO,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA,CAAC,iEAAiE;QAE9F,4BAA4B;QAC5B,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,IAAI,CAAC,aAAa,EAAE,CAAA;QAEzD,OAAO;QACP,MAAM,IAAI,GAAG,MAAM,CAAC,aAAa,CAAC,KAAK,EAAE,OAAO,CAAC,CAAA;QACjD,IAAI,IAAI,EAAE;YACR,OAAO,CAAC,IAAI,GAAG,IAAI,CAAA;SACpB;QAED,QAAQ;QACR,MAAM,KAAK,GAAG,MAAM,CAAC,aAAa,CAAC,OAAO,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;QAC5D,IAAI,KAAK,EAAE;YACT,iEAAiE;YACjE,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,mBAAK,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC,EAAE,CAAA;YAE7D,gBAAgB;YAChB,OAAO,CAAC,OAAO,CAAC,KAAK,GAAG,KAAK,CAAA;SAC9B;QAED,OAAO,OAAO,CAAA;IAChB,CAAC;IAEO,MAAM,CAAC,UAAU,CAAC,IAA+B;QACvD,MAAM,QAAQ,GAA6B,EAAE,CAAA;QAC7C,QAAQ,CAAC,OAAO,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAA;QAC3C,MAAM,KAAK,GAAG,IAAI,CAAC,CAAC,CAAC,CAAA;QACrB,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,CAAA;QAEvB,gBAAgB;QAChB,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,EAAE,OAAO,CAAC,CAAA;QAC/C,IAAI,IAAI,EAAE;YACR,QAAQ,CAAC,OAAO,GAAG;gBACjB,aAAa,EAAE,IAAI;aACpB,CAAA;SACF;QAED,QAAQ;QACR,MAAM,KAAK,GAAG,MAAM,CAAC,aAAa,CAAC,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;QAC7D,IAAI,KAAK,EAAE;YACT,QAAQ,CAAC,OAAO,GAAG,EAAC,KAAK,EAAC,CAAA;SAC3B;QAED,OAAO,iBAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAA;IACnC,CAAC;IAEO,MAAM,CAAC,aAAa,CAC1B,KAAa,EACb,OAAwB;QAExB,gBAAgB;QAChB,IAAI,CAAC,KAAK,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAA;SAC5D;aAAM,IAAI,KAAK,IAAI,OAAO,CAAC,IAAI,EAAE;YAChC,MAAM,IAAI,KAAK,CACb,0DAA0D,CAC3D,CAAA;SACF;QAED,OAAO,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,KAAK,EAAE,CAAA;IAC3E,CAAC;IAEO,MAAM,CAAC,aAAa,CAC1B,cAAsB,EACtB,OAAwB;;QAExB,IAAI,QAAC,OAAO,CAAC,OAAO,0CAAE,KAAK,CAAA,EAAE;YAC3B,IAAI,UAAU,CAAC,WAAW,CAAC,cAAc,CAAC,EAAE;gBAC1C,MAAM,EAAE,GAAG,IAAI,UAAU,CAAC,UAAU,EAAE,CAAA;gBACtC,OAAO,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAA;aACnC;SACF;QAED,OAAO,SAAS,CAAA;IAClB,CAAC;IAEO,MAAM,CAAC,aAAa;QAC1B,OAAO,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,IAAI,wBAAwB,CAAA;IAClE,CAAC;IAEO,MAAM,CAAC,iBAAiB;QAC9B,IAAI,GAAG,GACL,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,IAAI,gCAAgC,CAAA;QAEvE,kDAAkD;QAClD,IAAI,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YACrB,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;SACpC;QAED,6BAA6B;QAC7B,IAAI,GAAG,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE;YAC1C,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,MAAM,GAAG,UAAU,CAAC,MAAM,CAAC,CAAA;SACpD;QACD,OAAO,GAAG,CAAA;IACZ,CAAC;CACF;AAxID,wBAwIC"}
|
||||||
36
node_modules/@actions/github/lib/interfaces.d.ts
generated
vendored
Normal file
36
node_modules/@actions/github/lib/interfaces.d.ts
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
export interface PayloadRepository {
|
||||||
|
[key: string]: any;
|
||||||
|
full_name?: string;
|
||||||
|
name: string;
|
||||||
|
owner: {
|
||||||
|
[key: string]: any;
|
||||||
|
login: string;
|
||||||
|
name?: string;
|
||||||
|
};
|
||||||
|
html_url?: string;
|
||||||
|
}
|
||||||
|
export interface WebhookPayload {
|
||||||
|
[key: string]: any;
|
||||||
|
repository?: PayloadRepository;
|
||||||
|
issue?: {
|
||||||
|
[key: string]: any;
|
||||||
|
number: number;
|
||||||
|
html_url?: string;
|
||||||
|
body?: string;
|
||||||
|
};
|
||||||
|
pull_request?: {
|
||||||
|
[key: string]: any;
|
||||||
|
number: number;
|
||||||
|
html_url?: string;
|
||||||
|
body?: string;
|
||||||
|
};
|
||||||
|
sender?: {
|
||||||
|
[key: string]: any;
|
||||||
|
type: string;
|
||||||
|
};
|
||||||
|
action?: string;
|
||||||
|
installation?: {
|
||||||
|
id: number;
|
||||||
|
[key: string]: any;
|
||||||
|
};
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user