mirror of
https://github.com/github/codeql-action.git
synced 2025-12-08 00:38:30 +08:00
Compare commits
5 Commits
nickfyson/
...
disable-de
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
43a40e2811 | ||
|
|
dde2f47731 | ||
|
|
ef31b1a4fd | ||
|
|
0182aa3a4d | ||
|
|
6252e1a2e9 |
@@ -1,10 +0,0 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.ts]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
5
.github/ISSUE_TEMPLATE/config.yml
vendored
5
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,5 +0,0 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Contact GitHub Support
|
||||
url: https://support.github.com/contact?subject=Code+Scanning+Beta+Support&tags=code-scanning-support
|
||||
about: Contact Support about code scanning
|
||||
15
.github/codeql/codeql-config.yml
vendored
15
.github/codeql/codeql-config.yml
vendored
@@ -1,14 +1 @@
|
||||
name: "CodeQL config"
|
||||
queries:
|
||||
- name: Run custom queries
|
||||
uses: ./queries
|
||||
# Run all extra query suites, both because we want to
|
||||
# and because it'll act as extra testing. This is why
|
||||
# we include both even though one is a superset of the
|
||||
# other, because we're testing the parsing logic and
|
||||
# that the suites exist in the codeql bundle.
|
||||
- uses: security-extended
|
||||
- uses: security-and-quality
|
||||
paths-ignore:
|
||||
- tests
|
||||
- lib
|
||||
me: "CodeQL config"
|
||||
5
.github/pull_request_template.md
vendored
5
.github/pull_request_template.md
vendored
@@ -1,4 +1,7 @@
|
||||
### Merge / deployment checklist
|
||||
|
||||
- Run test builds as necessary. Can be on this repository or elsewhere as needed in order to test the change - please include links to tests in otehr repos!
|
||||
- [ ] CodeQL using init/finish actions
|
||||
- [ ] 3rd party tool using upload action
|
||||
- [ ] Confirm this change is backwards compatible with existing workflows.
|
||||
- [ ] Confirm the [readme](https://github.com/github/codeql-action/blob/master/README.md) has been updated if necessary.
|
||||
- [ ] Confirm the [readme](https://github.com/github/codeql-action/blob/master/README.md) has been updated if necessary.
|
||||
178
.github/update-release-branch.py
vendored
178
.github/update-release-branch.py
vendored
@@ -1,178 +0,0 @@
|
||||
import datetime
|
||||
from github import Github
|
||||
import random
|
||||
import requests
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# The branch being merged from.
|
||||
# This is the one that contains day-to-day development work.
|
||||
MAIN_BRANCH = 'main'
|
||||
# The branch being merged into.
|
||||
# This is the release branch that users reference.
|
||||
LATEST_RELEASE_BRANCH = 'v1'
|
||||
# Name of the remote
|
||||
ORIGIN = 'origin'
|
||||
|
||||
# Runs git with the given args and returns the stdout.
|
||||
# Raises an error if git does not exit successfully.
|
||||
def run_git(*args):
|
||||
cmd = ['git', *args]
|
||||
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if (p.returncode != 0):
|
||||
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
|
||||
return p.stdout.decode('ascii')
|
||||
|
||||
# Returns true if the given branch exists on the origin remote
|
||||
def branch_exists_on_remote(branch_name):
|
||||
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
||||
|
||||
# Opens a PR from the given branch to the release branch
|
||||
def open_pr(repo, all_commits, short_main_sha, branch_name):
|
||||
# Sort the commits into the pull requests that introduced them,
|
||||
# and any commits that don't have a pull request
|
||||
pull_requests = []
|
||||
commits_without_pull_requests = []
|
||||
for commit in all_commits:
|
||||
pr = get_pr_for_commit(repo, commit)
|
||||
|
||||
if pr is None:
|
||||
commits_without_pull_requests.append(commit)
|
||||
elif not any(p for p in pull_requests if p.number == pr.number):
|
||||
pull_requests.append(pr)
|
||||
|
||||
print('Found ' + str(len(pull_requests)) + ' pull requests')
|
||||
print('Found ' + str(len(commits_without_pull_requests)) + ' commits not in a pull request')
|
||||
|
||||
# Sort PRs and commits by age
|
||||
pull_requests = sorted(pull_requests, key=lambda pr: pr.number)
|
||||
commits_without_pull_requests = sorted(commits_without_pull_requests, key=lambda c: c.commit.author.date)
|
||||
|
||||
# Start constructing the body text
|
||||
body = 'Merging ' + short_main_sha + ' into ' + LATEST_RELEASE_BRANCH
|
||||
|
||||
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
|
||||
body += '\n\nConductor for this PR is @' + conductor
|
||||
|
||||
# List all PRs merged
|
||||
if len(pull_requests) > 0:
|
||||
body += '\n\nContains the following pull requests:'
|
||||
for pr in pull_requests:
|
||||
merger = get_merger_of_pr(repo, pr)
|
||||
body += '\n- #' + str(pr.number)
|
||||
body += ' - ' + pr.title
|
||||
body += ' (@' + merger + ')'
|
||||
|
||||
# List all commits not part of a PR
|
||||
if len(commits_without_pull_requests) > 0:
|
||||
body += '\n\nContains the following commits not from a pull request:'
|
||||
for commit in commits_without_pull_requests:
|
||||
body += '\n- ' + commit.sha
|
||||
body += ' - ' + get_truncated_commit_message(commit)
|
||||
body += ' (@' + commit.author.login + ')'
|
||||
|
||||
title = 'Merge ' + MAIN_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
|
||||
|
||||
# Create the pull request
|
||||
pr = repo.create_pull(title=title, body=body, head=branch_name, base=LATEST_RELEASE_BRANCH)
|
||||
print('Created PR #' + str(pr.number))
|
||||
|
||||
# Assign the conductor
|
||||
pr.add_to_assignees(conductor)
|
||||
print('Assigned PR to ' + conductor)
|
||||
|
||||
# Gets the person who should be in charge of the mergeback PR
|
||||
def get_conductor(repo, pull_requests, other_commits):
|
||||
# If there are any PRs then use whoever merged the last one
|
||||
if len(pull_requests) > 0:
|
||||
return get_merger_of_pr(repo, pull_requests[-1])
|
||||
|
||||
# Otherwise take the author of the latest commit
|
||||
return other_commits[-1].author.login
|
||||
|
||||
# Gets a list of the SHAs of all commits that have happened on main
|
||||
# since the release branched off.
|
||||
# This will not include any commits that exist on the release branch
|
||||
# that aren't on main.
|
||||
def get_commit_difference(repo):
|
||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '...' + MAIN_BRANCH).strip().split('\n')
|
||||
|
||||
# Convert to full-fledged commit objects
|
||||
commits = [repo.get_commit(c) for c in commits]
|
||||
|
||||
# Filter out merge commits for PRs
|
||||
return list(filter(lambda c: not is_pr_merge_commit(c), commits))
|
||||
|
||||
# Is the given commit the automatic merge commit from when merging a PR
|
||||
def is_pr_merge_commit(commit):
|
||||
return commit.committer.login == 'web-flow' and len(commit.parents) > 1
|
||||
|
||||
# Gets a copy of the commit message that should display nicely
|
||||
def get_truncated_commit_message(commit):
|
||||
message = commit.commit.message.split('\n')[0]
|
||||
if len(message) > 60:
|
||||
return message[:57] + '...'
|
||||
else:
|
||||
return message
|
||||
|
||||
# Converts a commit into the PR that introduced it to the main branch.
|
||||
# Returns the PR object, or None if no PR could be found.
|
||||
def get_pr_for_commit(repo, commit):
|
||||
prs = commit.get_pulls()
|
||||
|
||||
if prs.totalCount > 0:
|
||||
# In the case that there are multiple PRs, return the earliest one
|
||||
prs = list(prs)
|
||||
sorted(prs, key=lambda pr: int(pr.number))
|
||||
return prs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
# Get the person who merged the pull request.
|
||||
# For most cases this will be the same as the author, but for PRs opened
|
||||
# by external contributors getting the merger will get us the GitHub
|
||||
# employee who reviewed and merged the PR.
|
||||
def get_merger_of_pr(repo, pr):
|
||||
return repo.get_commit(pr.merge_commit_sha).author.login
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
raise Exception('Usage: update-release.branch.py <github token> <repository nwo>')
|
||||
github_token = sys.argv[1]
|
||||
repository_nwo = sys.argv[2]
|
||||
|
||||
repo = Github(github_token).get_repo(repository_nwo)
|
||||
|
||||
# Print what we intend to go
|
||||
print('Considering difference between ' + MAIN_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
|
||||
short_main_sha = run_git('rev-parse', '--short', MAIN_BRANCH).strip()
|
||||
print('Current head of ' + MAIN_BRANCH + ' is ' + short_main_sha)
|
||||
|
||||
# See if there are any commits to merge in
|
||||
commits = get_commit_difference(repo)
|
||||
if len(commits) == 0:
|
||||
print('No commits to merge from ' + MAIN_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
|
||||
return
|
||||
|
||||
# The branch name is based off of the name of branch being merged into
|
||||
# and the SHA of the branch being merged from. Thus if the branch already
|
||||
# exists we can assume we don't need to recreate it.
|
||||
new_branch_name = 'update-' + LATEST_RELEASE_BRANCH + '-' + short_main_sha
|
||||
print('Branch name is ' + new_branch_name)
|
||||
|
||||
# Check if the branch already exists. If so we can abort as this script
|
||||
# has already run on this combination of branches.
|
||||
if branch_exists_on_remote(new_branch_name):
|
||||
print('Branch ' + new_branch_name + ' already exists. Nothing to do.')
|
||||
return
|
||||
|
||||
# Create the new branch and push it to the remote
|
||||
print('Creating branch ' + new_branch_name)
|
||||
run_git('checkout', '-b', new_branch_name, MAIN_BRANCH)
|
||||
run_git('push', ORIGIN, new_branch_name)
|
||||
|
||||
# Open a PR to update the branch
|
||||
open_pr(repo, commits, short_main_sha, new_branch_name)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
40
.github/workflows/codeql.yml
vendored
40
.github/workflows/codeql.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: "CodeQL action"
|
||||
|
||||
on: [push, pull_request]
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -10,42 +10,8 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
# Must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head of the pull request.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
- uses: actions/checkout@v1
|
||||
- uses: ./init
|
||||
with:
|
||||
languages: javascript
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
- uses: ./analyze
|
||||
|
||||
test-root-action:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest,windows-latest,macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
# Must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head of the pull request.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
- uses: github/codeql-action@${{ 'testing_testing' }}
|
||||
with:
|
||||
languages: javascript
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
|
||||
154
.github/workflows/integration-testing.yml
vendored
154
.github/workflows/integration-testing.yml
vendored
@@ -1,148 +1,22 @@
|
||||
name: "Integration Testing"
|
||||
|
||||
on: [push, pull_request]
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
multi-language-repo_test-autodetect-languages:
|
||||
dispatch-events:
|
||||
if: github.event.repository.full_name == 'github/codeql-action'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
- name: Send repository dispatch events
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- run: |
|
||||
cd "$RUNNER_TEMP/codeql_databases"
|
||||
# List all directories as there will be precisely one directory per database
|
||||
# but there may be other files in this directory such as query suites.
|
||||
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
||||
[[ ! -d cpp ]] || \
|
||||
[[ ! -d csharp ]] || \
|
||||
[[ ! -d go ]] || \
|
||||
[[ ! -d java ]] || \
|
||||
[[ ! -d javascript ]] || \
|
||||
[[ ! -d python ]]; then
|
||||
echo "Did not find expected number of databases. Database dir contains: $(ls)"
|
||||
exit 1
|
||||
fi
|
||||
curl -X POST \
|
||||
-H "Authorization: Bearer ${{ secrets.CODEQL_TESTING_TOKEN }}" \
|
||||
-H "Accept: application/vnd.github.everest-preview+json" \
|
||||
https://api.github.com/repos/Anthophila/amazon-cognito-js-copy/dispatches \
|
||||
-d '{"event_type":"codeql-integration","client_payload": {"sha": "${{ github.sha }}"}}'
|
||||
|
||||
multi-language-repo_test-custom-queries-and-remote-config:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action
|
||||
with:
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
# Currently is not possible to analyze Go in conjunction with other languages in macos
|
||||
multi-language-repo_test-go-custom-queries:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/setup-go@v2
|
||||
if: ${{ matrix.os == 'macos-latest' }}
|
||||
with:
|
||||
go-version: '^1.13.1'
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action
|
||||
with:
|
||||
languages: go
|
||||
config-file: ./tests/multi-language-repo/.github/codeql/custom-queries.yml
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
multi-language-repo_rubocop:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
||||
- name: Install dependencies
|
||||
run: bundle install
|
||||
- name: Rubocop run
|
||||
run: |
|
||||
bash -c "
|
||||
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
||||
[[ $? -ne 2 ]]
|
||||
"
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
sarif_file: rubocop.sarif
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
test-proxy:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ubuntu:18.04
|
||||
options: --dns 127.0.0.1
|
||||
services:
|
||||
squid-proxy:
|
||||
image: datadog/squid:latest
|
||||
ports:
|
||||
- 3128:3128
|
||||
env:
|
||||
https_proxy: http://squid-proxy:3128
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action
|
||||
with:
|
||||
languages: javascript
|
||||
env:
|
||||
TEST_MODE: true
|
||||
curl -X POST \
|
||||
-H "Authorization: Bearer ${{ secrets.CODEQL_TESTING_TOKEN }}" \
|
||||
-H "Accept: application/vnd.github.everest-preview+json" \
|
||||
https://api.github.com/repos/Anthophila/electron-test-action/dispatches \
|
||||
-d '{"event_type":"codeql-integration","client_payload": {"sha": "${{ github.sha }}"}}'
|
||||
|
||||
27
.github/workflows/js-uptodate-check.yml
vendored
Normal file
27
.github/workflows/js-uptodate-check.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: "Check generated JavaScript"
|
||||
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
check-js:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Check generated JavaScript
|
||||
run: |
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
# Generate the JavaScript files
|
||||
npm run-script build
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: JavaScript files are up to date"
|
||||
12
.github/workflows/npm-test.yml
vendored
Normal file
12
.github/workflows/npm-test.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
name: "npm run-script test"
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
npm-test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: npm run-script test
|
||||
run: npm run-script test
|
||||
71
.github/workflows/pr-checks.yml
vendored
71
.github/workflows/pr-checks.yml
vendored
@@ -1,71 +0,0 @@
|
||||
name: "PR checks"
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
tslint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: tslint
|
||||
run: npm run-script lint
|
||||
|
||||
check-js:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Check generated JavaScript
|
||||
run: |
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
# Generate the JavaScript files
|
||||
npm run-script build
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||
git status
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: JavaScript files are up to date"
|
||||
|
||||
check-node-modules:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Check node modules up to date
|
||||
run: |
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Reinstall modules and then clean to remove absolute paths
|
||||
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||
npm ci
|
||||
npm run removeNPMAbsolutePaths
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
|
||||
git status
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: node_modules are up to date"
|
||||
|
||||
npm-test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: npm run-script test
|
||||
run: npm run-script test
|
||||
12
.github/workflows/ts-lint.yml
vendored
Normal file
12
.github/workflows/ts-lint.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
name: "TSLint"
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
tslint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: tslint
|
||||
run: npm run-script lint
|
||||
32
.github/workflows/update-release-branch.yml
vendored
32
.github/workflows/update-release-branch.yml
vendored
@@ -1,32 +0,0 @@
|
||||
name: Update release branch
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 9 * * 1
|
||||
repository_dispatch:
|
||||
# Example of how to trigger this:
|
||||
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
|
||||
# Replace <token> with a personal access token from this page: https://github.com/settings/tokens
|
||||
types: [update-release-branch]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
# Need full history so we calculate diffs
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.5
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install PyGithub==1.51 requests
|
||||
|
||||
- name: Update release branch
|
||||
run: python .github/update-release-branch.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }}
|
||||
25
.vscode/launch.json
vendored
25
.vscode/launch.json
vendored
@@ -1,25 +0,0 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Debug AVA test file",
|
||||
"runtimeExecutable": "${workspaceFolder}/node_modules/.bin/ava",
|
||||
"runtimeArgs": [
|
||||
"${file}",
|
||||
"--break",
|
||||
"--serial",
|
||||
"--timeout=20m"
|
||||
],
|
||||
"port": 9229,
|
||||
"outputCapture": "std",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**/*.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
10
.vscode/settings.json
vendored
10
.vscode/settings.json
vendored
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"files.exclude": {
|
||||
// include the defaults from VS Code
|
||||
"**/.git": true,
|
||||
"**/.DS_Store": true,
|
||||
|
||||
// transpiled JavaScript
|
||||
"lib": true,
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
# Contributing
|
||||
## Contributing
|
||||
|
||||
[fork]: https://github.com/github/codeql-action/fork
|
||||
[pr]: https://github.com/github/codeql-action/compare
|
||||
@@ -10,54 +10,13 @@ Contributions to this project are [released](https://help.github.com/articles/gi
|
||||
|
||||
Please note that this project is released with a [Contributor Code of Conduct][code-of-conduct]. By participating in this project you agree to abide by its terms.
|
||||
|
||||
## Development and Testing
|
||||
|
||||
Before you start, ensure that you have a recent version of node installed. You can see which version of node is used by the action in `init/action.yml`.
|
||||
|
||||
### Common tasks
|
||||
|
||||
* Transpile the TypeScript to JavaScript: `npm run build`. Note that the JavaScript files are committed to git.
|
||||
* Run tests: `npm run test`. You’ll need to ensure that the JavaScript files are up-to-date first by running the command above.
|
||||
* Run the linter: `npm run lint`.
|
||||
|
||||
This project also includes configuration to run tests from VSCode (with support for breakpoints) - open the test file you wish to run and choose "Debug AVA test file" from the Run menu in the Run panel.
|
||||
|
||||
### Running the action
|
||||
|
||||
To see the effect of your changes and to test them, push your changes in a branch and then look at the [Actions output](https://github.com/github/codeql-action/actions) for that branch. You can also exercise the code locally by running the automated tests.
|
||||
|
||||
### Running the action locally
|
||||
|
||||
It is possible to run this action locally via [act](https://github.com/nektos/act) via the following steps:
|
||||
|
||||
1. Create a GitHub [Personal Access Token](https://github.com/settings/tokens) (PAT).
|
||||
1. Install [act](https://github.com/nektos/act) v0.2.10 or greater.
|
||||
1. Add a `.env` file in the root of the project you are running:
|
||||
|
||||
```bash
|
||||
CODEQL_LOCAL_RUN=true
|
||||
|
||||
# Optional, for better logging
|
||||
GITHUB_JOB=<ANY_JOB_NAME>
|
||||
```
|
||||
|
||||
1. Run `act -j codeql -s GITHUB_TOKEN=<PAT>`
|
||||
|
||||
Running locally will generate the CodeQL database and run all the queries, but it will avoid uploading and reporting results to GitHub. Note that this must be done on a repository that _consumes_ this action, not this repository. The use case is to debug failures of this action on specific repositories.
|
||||
|
||||
### Integration tests
|
||||
|
||||
As well as the unit tests (see _Common tasks_ above), there are integration tests, defined in `.github/workflows/integration-testing.yml`. These are run by a CI check. Depending on the change you’re making, you may want to add a test to this file or extend an existing one.
|
||||
|
||||
## Submitting a pull request
|
||||
|
||||
1. [Fork][fork] and clone the repository
|
||||
2. Create a new branch: `git checkout -b my-branch-name`
|
||||
3. Make your change, add tests, and make sure the tests still pass
|
||||
4. Push to your fork and [submit a pull request][pr]
|
||||
5. Pat yourself on the back and wait for your pull request to be reviewed and merged.
|
||||
|
||||
If you're a GitHub staff member, you can merge your own PR once it's approved; for external contributors, GitHub staff will merge your PR once it's approved.
|
||||
5. Pat your self on the back and wait for your pull request to be reviewed and merged.
|
||||
|
||||
Here are a few things you can do that will increase the likelihood of your pull request being accepted:
|
||||
|
||||
|
||||
159
README.md
159
README.md
@@ -1,16 +1,9 @@
|
||||
# CodeQL Action
|
||||
|
||||
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||
|
||||
## License
|
||||
|
||||
This project is released under the [MIT License](LICENSE).
|
||||
|
||||
The underlying CodeQL CLI, used in this action, is licensed under the [GitHub CodeQL Terms and Conditions](https://securitylab.github.com/tools/codeql/license). As such, this action may be used on open source projects hosted on GitHub, and on private repositories that are owned by an organisation with GitHub Advanced Security enabled.
|
||||
|
||||
## Usage
|
||||
|
||||
This is a short walkthrough, but for more information read [configuring code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning).
|
||||
# CodeQL Action
|
||||
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/semmle/ql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||
|
||||
[Sign up for the Advanced Security beta](https://github.com/features/security/advanced-security/signup)
|
||||
|
||||
## Usage
|
||||
|
||||
To get code scanning results from CodeQL analysis on your repo you can use the following workflow as a template:
|
||||
|
||||
@@ -20,55 +13,48 @@ name: "Code Scanning - Action"
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 0 * * 0'
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
|
||||
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# Must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head of the pull request.
|
||||
# Only include this option if you are running this workflow on pull requests.
|
||||
fetch-depth: 2
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# If this run was triggered by a pull request event then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
# Only include this step if you are running this workflow on pull requests.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below).
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below).
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following
|
||||
# three lines and modify them (or add more) to build your code if your
|
||||
# project uses a compiled language
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
```
|
||||
|
||||
If you prefer to integrate this within an existing CI workflow, it should end up looking something like this:
|
||||
@@ -87,10 +73,21 @@ If you prefer to integrate this within an existing CI workflow, it should end up
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
```
|
||||
### Actions triggers
|
||||
The CodeQL action should be run on `push` events, and on a `schedule`. `Push` events allow us to do detailed analysis of the delta in a pull request, while the `schedule` event ensures that GitHub regularly scans the repository for the latest vulnerabilities, even if the repository becomes inactive. This action does not support the `pull_request` event.
|
||||
|
||||
### Configuration file
|
||||
### Configuration
|
||||
You may optionally specify additional queries for CodeQL to execute by using a config file. The queries must belong to a [QL pack](https://help.semmle.com/codeql/codeql-cli/reference/qlpack-overview.html) and can be in your repository or any public repository. You can choose a single .ql file, a folder containing multiple .ql files, a .qls [query suite](https://help.semmle.com/codeql/codeql-cli/procedures/query-suites.html) file, or any combination of the above. To use queries from other repositories use the same syntax as when [using an action](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepsuses).
|
||||
|
||||
Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`.
|
||||
You can disable the default queries using `disable-default-queries: true`.
|
||||
|
||||
You can choose to ignore some files or folders from the analysis, or include additional files/folders for analysis. This *only* works for Javascript and Python analysis.
|
||||
Identifying potential files for extraction:
|
||||
- Scans each folder that's defined as `paths` in turn, traversing subfolders and looking for relevant files.
|
||||
- If it finds a subfolder that's defined as `paths-ignore`, stop traversing.
|
||||
- If a file or folder is both in `paths` and `paths-ignore`, the `paths-ignore` is ignored.
|
||||
|
||||
Use the config-file parameter of the init action to enable the configuration file. For example:
|
||||
|
||||
```yaml
|
||||
- uses: github/codeql-action/init@v1
|
||||
@@ -98,8 +95,68 @@ Use the `config-file` parameter of the `init` action to enable the configuration
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
```
|
||||
|
||||
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration)."
|
||||
A config file looks like this:
|
||||
|
||||
```yaml
|
||||
name: "My CodeQL config"
|
||||
|
||||
disable-default-queries: true
|
||||
|
||||
queries:
|
||||
- name: In-repo queries (Runs the queries located in the my-queries folder of the repo)
|
||||
uses: ./my-queries
|
||||
- name: External Javascript QL pack (Runs a QL pack located in an external repo)
|
||||
uses: /Semmle/ql/javascript/ql/src/Electron@master
|
||||
- name: External query (Runs a single query located in an external QL pack)
|
||||
uses: Semmle/ql/javascript/ql/src/AngularJS/DeadAngularJSEventListener.ql@master
|
||||
- name: Select query suite (Runs a query suites)
|
||||
uses: ./codeql-querypacks/complex-python-querypack/rootAndBar.qls
|
||||
|
||||
paths:
|
||||
- src/util.ts
|
||||
|
||||
paths-ignore:
|
||||
- src
|
||||
- lib
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
|
||||
### Trouble with Go dependencies
|
||||
|
||||
#### If you use a vendor directory
|
||||
|
||||
Try passing
|
||||
|
||||
```yaml
|
||||
env:
|
||||
GOFLAGS: "-mod=vendor"
|
||||
```
|
||||
to `github/codeql-action/analyze`.
|
||||
|
||||
### If you do not use a vendor directory
|
||||
|
||||
Dependencies on public repositories should just work. If you have dependencies on private repositories, one option is to use `git config` and a [personal access token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) to authenticate when downloading dependencies. Add a section like
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- name: Configure git private repo access
|
||||
env:
|
||||
TOKEN: ${{ secrets.GITHUB_PAT }}
|
||||
run: |
|
||||
git config --global url."https://${TOKEN}@github.com/foo/bar".insteadOf "https://github.com/foo/bar"
|
||||
git config --global url."https://${TOKEN}@github.com/foo/baz".insteadOf "https://github.com/foo/baz"
|
||||
```
|
||||
before any codeql actions. A similar thing can also be done with a SSH key or deploy key.
|
||||
|
||||
### C# using dotnet version 2 on linux
|
||||
|
||||
This currently requires invoking `dotnet` with the `/p:UseSharedCompilation=false` flag. For example:
|
||||
```
|
||||
dotnet build /p:UseSharedCompilation=false
|
||||
```
|
||||
Version 3 does not require the additional flag.
|
||||
|
||||
## License
|
||||
|
||||
This project is released under the [MIT License](LICENSE).
|
||||
|
||||
47
action.yml
47
action.yml
@@ -1,47 +0,0 @@
|
||||
|
||||
name: 'CodeQL'
|
||||
description: 'TODO'
|
||||
author: 'GitHub'
|
||||
inputs:
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
default: ${{ toJson(matrix) }}
|
||||
# inputs for pre-hook
|
||||
tools:
|
||||
description: URL of CodeQL tools
|
||||
required: false
|
||||
# If not specified the Action will check in several places until it finds the CodeQL tools.
|
||||
languages:
|
||||
description: The languages to be analysed
|
||||
required: false
|
||||
config-file:
|
||||
description: Path of the config file to use
|
||||
required: false
|
||||
# inputs for main
|
||||
check_name:
|
||||
description: The name of the check run to add text to.
|
||||
required: false
|
||||
output:
|
||||
description: The path of the directory in which to save the SARIF results
|
||||
required: false
|
||||
default: '../results'
|
||||
upload:
|
||||
description: Upload the SARIF file
|
||||
required: false
|
||||
default: "true"
|
||||
ram:
|
||||
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
|
||||
required: false
|
||||
threads:
|
||||
description: The number of threads to be used by CodeQL.
|
||||
required: false
|
||||
default: "1"
|
||||
checkout_path:
|
||||
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
|
||||
required: false
|
||||
default: ${{ github.workspace }}
|
||||
runs:
|
||||
using: 'node12'
|
||||
pre: './lib/setup-tracer.js'
|
||||
main: './lib/finalize-db.js'
|
||||
@@ -4,7 +4,6 @@ author: 'GitHub'
|
||||
inputs:
|
||||
check_name:
|
||||
description: The name of the check run to add text to.
|
||||
required: false
|
||||
output:
|
||||
description: The path of the directory in which to save the SARIF results
|
||||
required: false
|
||||
@@ -12,18 +11,7 @@ inputs:
|
||||
upload:
|
||||
description: Upload the SARIF file
|
||||
required: false
|
||||
default: "true"
|
||||
ram:
|
||||
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
|
||||
required: false
|
||||
threads:
|
||||
description: The number of threads to be used by CodeQL.
|
||||
required: false
|
||||
default: "1"
|
||||
checkout_path:
|
||||
description: "The path at which the analyzed repository was checked out. Used to relativeize any absolute paths in the uploaded SARIF file."
|
||||
required: false
|
||||
default: ${{ github.workspace }}
|
||||
default: true
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
|
||||
@@ -5,14 +5,12 @@ inputs:
|
||||
tools:
|
||||
description: URL of CodeQL tools
|
||||
required: false
|
||||
# If not specified the Action will check in several places until it finds the CodeQL tools.
|
||||
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200427/codeql-bundle.tar.gz
|
||||
languages:
|
||||
description: The languages to be analysed
|
||||
required: false
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
default: ${{ toJson(matrix) }}
|
||||
config-file:
|
||||
description: Path of the config file to use
|
||||
required: false
|
||||
|
||||
11
jest.config.js
Normal file
11
jest.config.js
Normal file
@@ -0,0 +1,11 @@
|
||||
module.exports = {
|
||||
clearMocks: true,
|
||||
moduleFileExtensions: ['js', 'ts'],
|
||||
testEnvironment: 'node',
|
||||
testMatch: ['**/*.test.ts'],
|
||||
testRunner: 'jest-circus/runner',
|
||||
transform: {
|
||||
'^.+\\.ts$': 'ts-jest'
|
||||
},
|
||||
verbose: true
|
||||
}
|
||||
48
lib/analysis-paths.js
generated
48
lib/analysis-paths.js
generated
@@ -8,52 +8,20 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
function isInterpretedLanguage(language) {
|
||||
return language === 'javascript' || language === 'python';
|
||||
}
|
||||
// Matches a string containing only characters that are legal to include in paths on windows.
|
||||
exports.legalWindowsPathCharactersRegex = /^[^<>:"\|?]*$/;
|
||||
// Builds an environment variable suitable for LGTM_INDEX_INCLUDE or LGTM_INDEX_EXCLUDE
|
||||
function buildIncludeExcludeEnvVar(paths) {
|
||||
// Ignore anything containing a *
|
||||
paths = paths.filter(p => p.indexOf('*') === -1);
|
||||
// Some characters are illegal in path names in windows
|
||||
if (process.platform === 'win32') {
|
||||
paths = paths.filter(p => p.match(exports.legalWindowsPathCharactersRegex));
|
||||
}
|
||||
return paths.join('\n');
|
||||
}
|
||||
function includeAndExcludeAnalysisPaths(config) {
|
||||
// The 'LGTM_INDEX_INCLUDE' and 'LGTM_INDEX_EXCLUDE' environment variables
|
||||
// control which files/directories are traversed when scanning.
|
||||
// This allows including files that otherwise would not be scanned, or
|
||||
// excluding and not traversing entire file subtrees.
|
||||
// It does not understand globs or double-globs because that would require it to
|
||||
// traverse the entire file tree to determine which files are matched.
|
||||
// Any paths containing "*" are not included in these.
|
||||
function includeAndExcludeAnalysisPaths(config, languages) {
|
||||
if (config.paths.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_INCLUDE', buildIncludeExcludeEnvVar(config.paths));
|
||||
core.exportVariable('LGTM_INDEX_INCLUDE', config.paths.join('\n'));
|
||||
}
|
||||
if (config.pathsIgnore.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_EXCLUDE', buildIncludeExcludeEnvVar(config.pathsIgnore));
|
||||
core.exportVariable('LGTM_INDEX_EXCLUDE', config.pathsIgnore.join('\n'));
|
||||
}
|
||||
// The 'LGTM_INDEX_FILTERS' environment variable controls which files are
|
||||
// extracted or ignored. It does not control which directories are traversed.
|
||||
// This does understand the glob and double-glob syntax.
|
||||
const filters = [];
|
||||
filters.push(...config.paths.map(p => 'include:' + p));
|
||||
filters.push(...config.pathsIgnore.map(p => 'exclude:' + p));
|
||||
if (filters.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_FILTERS', filters.join('\n'));
|
||||
function isInterpretedLanguage(language) {
|
||||
return language === 'javascript' && language === 'python';
|
||||
}
|
||||
// Index include/exclude/filters only work in javascript and python.
|
||||
// If any other languages are detected/configured then show a warning.
|
||||
if ((config.paths.length !== 0 ||
|
||||
config.pathsIgnore.length !== 0 ||
|
||||
filters.length !== 0) &&
|
||||
!config.languages.every(isInterpretedLanguage)) {
|
||||
// Index include/exclude only work in javascript and python
|
||||
// If some other language is detected/configured show a warning
|
||||
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) && !languages.every(isInterpretedLanguage)) {
|
||||
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
|
||||
}
|
||||
}
|
||||
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;
|
||||
//# sourceMappingURL=analysis-paths.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEjD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACrE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACpF;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC;KAC1F;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACvD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC/D;IAED,oEAAoE;IACpE,sEAAsE;IACtE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;QACxB,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC;QAC/B,OAAO,CAAC,MAAM,KAAK,CAAC,CAAC;QACvB,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAClD,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC5G;AACH,CAAC;AAjCD,wEAiCC"}
|
||||
43
lib/analysis-paths.test.js
generated
43
lib/analysis-paths.test.js
generated
@@ -1,43 +0,0 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default("emptyPaths", async (t) => {
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
|
||||
t.is(process.env['LGTM_INDEX_FILTERS'], undefined);
|
||||
});
|
||||
ava_1.default("nonEmptyPaths", async (t) => {
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
paths: ['path1', 'path2', '**/path3'],
|
||||
pathsIgnore: ['path4', 'path5', 'path6/**'],
|
||||
originalUserInput: {},
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path4\npath5');
|
||||
t.is(process.env['LGTM_INDEX_FILTERS'], 'include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**');
|
||||
});
|
||||
//# sourceMappingURL=analysis-paths.test.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA2C;AAE3C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;QACT,iBAAiB,EAAE,EAAE;KACtB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QAC3C,iBAAiB,EAAE,EAAE;KACtB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,gGAAgG,CAAC,CAAC;AAC5I,CAAC,CAAC,CAAC"}
|
||||
26
lib/api-client.js
generated
26
lib/api-client.js
generated
@@ -1,26 +0,0 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||
const util_1 = require("./util");
|
||||
exports.getApiClient = function (allowLocalRun = false) {
|
||||
if (util_1.isLocalRun() && !allowLocalRun) {
|
||||
throw new Error('Invalid API call in local run');
|
||||
}
|
||||
return new github.GitHub(core.getInput('token'), {
|
||||
userAgent: "CodeQL Action",
|
||||
log: console_log_level_1.default({ level: "debug" })
|
||||
});
|
||||
};
|
||||
//# sourceMappingURL=api-client.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAEhD,iCAAoC;AAEvB,QAAA,YAAY,GAAG,UAAS,aAAa,GAAG,KAAK;IACxD,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,MAAM,CAAC,MAAM,CACtB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB;QACE,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACP,CAAC,CAAC"}
|
||||
53
lib/autobuild.js
generated
53
lib/autobuild.js
generated
@@ -8,58 +8,49 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils = __importStar(require("./config-utils"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const path = __importStar(require("path"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
||||
var _a, _b;
|
||||
const status = failingLanguage !== undefined || cause !== undefined ? 'failure' : 'success';
|
||||
const statusReportBase = await util.createStatusReportBase('autobuild', status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
autobuild_languages: allLanguages.join(','),
|
||||
autobuild_failure: failingLanguage,
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let language;
|
||||
var _a;
|
||||
try {
|
||||
util.prepareLocalRunEnvironment();
|
||||
if (!await util.sendStatusReport(await util.createStatusReportBase('autobuild', 'starting', startedAt), true)) {
|
||||
if (util.should_abort('autobuild', true) || !await util.reportActionStarting('autobuild')) {
|
||||
return;
|
||||
}
|
||||
const config = await config_utils.getConfig();
|
||||
// Attempt to find a language to autobuild
|
||||
// We want pick the dominant language in the repo from the ones we're able to build
|
||||
// The languages are sorted in order specified by user or by lines of code if we got
|
||||
// them from the GitHub API, so try to build the first language on the list.
|
||||
const autobuildLanguages = config.languages.filter(codeql_1.isTracedLanguage);
|
||||
language = autobuildLanguages[0];
|
||||
const language = (_a = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]) === null || _a === void 0 ? void 0 : _a.split(',')[0];
|
||||
if (!language) {
|
||||
core.info("None of the languages in this project require extra build steps");
|
||||
return;
|
||||
}
|
||||
core.debug(`Detected dominant traced language: ${language}`);
|
||||
if (autobuildLanguages.length > 1) {
|
||||
core.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages.slice(1).join(' and ')}, you must replace this block with custom build steps.`);
|
||||
}
|
||||
core.startGroup(`Attempting to automatically build ${language} code`);
|
||||
const codeQL = codeql_1.getCodeQL();
|
||||
await codeQL.runAutobuild(language);
|
||||
// TODO: share config accross actions better via env variables
|
||||
const codeqlCmd = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_CMD);
|
||||
const cmdName = process.platform === 'win32' ? 'autobuild.cmd' : 'autobuild.sh';
|
||||
const autobuildCmd = path.join(path.dirname(codeqlCmd), language, 'tools', cmdName);
|
||||
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
|
||||
// This is because of an issue with Azure pipelines timing out connections after 4 minutes
|
||||
// and Maven not properly handling closed connections
|
||||
// Otherwise long build processes will timeout when pulling down Java packages
|
||||
// https://developercommunity.visualstudio.com/content/problem/292284/maven-hosted-agent-connection-timeout.html
|
||||
let javaToolOptions = process.env['JAVA_TOOL_OPTIONS'] || "";
|
||||
process.env['JAVA_TOOL_OPTIONS'] = [...javaToolOptions.split(/\s+/), '-Dhttp.keepAlive=false', '-Dmaven.wagon.http.pool=false'].join(' ');
|
||||
await exec.exec(autobuildCmd);
|
||||
core.endGroup();
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
|
||||
console.log(error);
|
||||
await sendCompletedStatusReport(startedAt, [language], language, error);
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('autobuild', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
await sendCompletedStatusReport(startedAt, [language]);
|
||||
await util.reportActionSucceeded('autobuild');
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("autobuild action failed. " + e);
|
||||
core.setFailed("autobuild action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=autobuild.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,qCAAuD;AACvD,6DAA+C;AAC/C,6CAA+B;AAS/B,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GAAG,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5F,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CAAC,CAAC;IAChB,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,CAAC;IACb,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC7G,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,EAAE,CAAC;QAE9C,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,yBAAgB,CAAC,CAAC;QACrE,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEjC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAEpC,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;QACxE,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
331
lib/codeql.js
generated
331
lib/codeql.js
generated
@@ -1,331 +0,0 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const stream = __importStar(require("stream"));
|
||||
const globalutil = __importStar(require("util"));
|
||||
const v4_1 = __importDefault(require("uuid/v4"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const util = __importStar(require("./util"));
|
||||
/**
|
||||
* Stores the CodeQL object, and is populated by `setupCodeQL` or `getCodeQL`.
|
||||
* Can be overridden in tests using `setCodeQL`.
|
||||
*/
|
||||
let cachedCodeQL = undefined;
|
||||
/**
|
||||
* Environment variable used to store the location of the CodeQL CLI executable.
|
||||
* Value is set by setupCodeQL and read by getCodeQL.
|
||||
*/
|
||||
const CODEQL_ACTION_CMD = "CODEQL_ACTION_CMD";
|
||||
const CODEQL_BUNDLE_VERSION = "codeql-bundle-20200630";
|
||||
const CODEQL_BUNDLE_NAME = "codeql-bundle.tar.gz";
|
||||
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||
function getCodeQLActionRepository() {
|
||||
// Actions do not know their own repository name,
|
||||
// so we currently use this hack to find the name based on where our files are.
|
||||
// This can be removed once the change to the runner in https://github.com/actions/runner/pull/585 is deployed.
|
||||
const runnerTemp = util.getRequiredEnvParam("RUNNER_TEMP");
|
||||
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
|
||||
const relativeScriptPath = path.relative(actionsDirectory, __filename);
|
||||
// This handles the case where the Action does not come from an Action repository,
|
||||
// e.g. our integration tests which use the Action code from the current checkout.
|
||||
if (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath)) {
|
||||
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||
}
|
||||
const relativeScriptPathParts = relativeScriptPath.split(path.sep);
|
||||
return relativeScriptPathParts[0] + "/" + relativeScriptPathParts[1];
|
||||
}
|
||||
async function getCodeQLBundleDownloadURL() {
|
||||
const codeQLActionRepository = getCodeQLActionRepository();
|
||||
const potentialDownloadSources = [
|
||||
// This GitHub instance, and this Action.
|
||||
[util.getInstanceAPIURL(), codeQLActionRepository],
|
||||
// This GitHub instance, and the canonical Action.
|
||||
[util.getInstanceAPIURL(), CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
// GitHub.com, and the canonical Action.
|
||||
[util.GITHUB_DOTCOM_API_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
];
|
||||
// We now filter out any duplicates.
|
||||
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
|
||||
const uniqueDownloadSources = potentialDownloadSources.filter((url, index, self) => index === self.indexOf(url));
|
||||
for (let downloadSource of uniqueDownloadSources) {
|
||||
let [apiURL, repository] = downloadSource;
|
||||
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
|
||||
if (apiURL === util.GITHUB_DOTCOM_API_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
|
||||
break;
|
||||
}
|
||||
let [repositoryOwner, repositoryName] = repository.split("/");
|
||||
try {
|
||||
const release = await api.getApiClient().repos.getReleaseByTag({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
tag: CODEQL_BUNDLE_VERSION
|
||||
});
|
||||
for (let asset of release.data.assets) {
|
||||
if (asset.name === CODEQL_BUNDLE_NAME) {
|
||||
core.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
|
||||
return asset.url;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
core.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
|
||||
}
|
||||
}
|
||||
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${CODEQL_BUNDLE_NAME}`;
|
||||
}
|
||||
// We have to download CodeQL manually because the toolcache doesn't support Accept headers.
|
||||
// This can be removed once https://github.com/actions/toolkit/pull/530 is merged and released.
|
||||
async function toolcacheDownloadTool(url, headers) {
|
||||
const client = new http.HttpClient('CodeQL Action');
|
||||
const dest = path.join(util.getRequiredEnvParam('RUNNER_TEMP'), v4_1.default());
|
||||
const response = await client.get(url, headers);
|
||||
if (response.message.statusCode !== 200) {
|
||||
const err = new toolcache.HTTPError(response.message.statusCode);
|
||||
core.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
||||
throw err;
|
||||
}
|
||||
const pipeline = globalutil.promisify(stream.pipeline);
|
||||
fs.mkdirSync(path.dirname(dest), { recursive: true });
|
||||
await pipeline(response.message, fs.createWriteStream(dest));
|
||||
return dest;
|
||||
}
|
||||
async function setupCodeQL() {
|
||||
try {
|
||||
let codeqlURL = core.getInput('tools');
|
||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
|
||||
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
}
|
||||
else {
|
||||
if (!codeqlURL) {
|
||||
codeqlURL = await getCodeQLBundleDownloadURL();
|
||||
}
|
||||
const headers = { accept: 'application/octet-stream' };
|
||||
// We only want to provide an authorization header if we are downloading
|
||||
// from the same GitHub instance the Action is running on.
|
||||
// This avoids leaking Enterprise tokens to dotcom.
|
||||
if (codeqlURL.startsWith(util.getInstanceAPIURL() + "/")) {
|
||||
core.debug('Downloading CodeQL bundle with token.');
|
||||
let token = core.getInput('token', { required: true });
|
||||
headers.authorization = `token ${token}`;
|
||||
}
|
||||
else {
|
||||
core.debug('Downloading CodeQL bundle without token.');
|
||||
}
|
||||
let codeqlPath = await toolcacheDownloadTool(codeqlURL, headers);
|
||||
core.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
|
||||
}
|
||||
let codeqlCmd = path.join(codeqlFolder, 'codeql', 'codeql');
|
||||
if (process.platform === 'win32') {
|
||||
codeqlCmd += ".exe";
|
||||
}
|
||||
else if (process.platform !== 'linux' && process.platform !== 'darwin') {
|
||||
throw new Error("Unsupported platform: " + process.platform);
|
||||
}
|
||||
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
|
||||
core.exportVariable(CODEQL_ACTION_CMD, codeqlCmd);
|
||||
return cachedCodeQL;
|
||||
}
|
||||
catch (e) {
|
||||
core.error(e);
|
||||
throw new Error("Unable to download and extract CodeQL CLI");
|
||||
}
|
||||
}
|
||||
exports.setupCodeQL = setupCodeQL;
|
||||
function getCodeQLURLVersion(url) {
|
||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
||||
if (match === null || match.length < 2) {
|
||||
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
|
||||
}
|
||||
let version = match[1];
|
||||
if (!semver.valid(version)) {
|
||||
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||
version = '0.0.0-' + version;
|
||||
}
|
||||
const s = semver.clean(version);
|
||||
if (!s) {
|
||||
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
exports.getCodeQLURLVersion = getCodeQLURLVersion;
|
||||
function getCodeQL() {
|
||||
if (cachedCodeQL === undefined) {
|
||||
const codeqlCmd = util.getRequiredEnvParam(CODEQL_ACTION_CMD);
|
||||
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
|
||||
}
|
||||
return cachedCodeQL;
|
||||
}
|
||||
exports.getCodeQL = getCodeQL;
|
||||
function resolveFunction(partialCodeql, methodName) {
|
||||
if (typeof partialCodeql[methodName] !== 'function') {
|
||||
const dummyMethod = () => {
|
||||
throw new Error('CodeQL ' + methodName + ' method not correctly defined');
|
||||
};
|
||||
return dummyMethod;
|
||||
}
|
||||
return partialCodeql[methodName];
|
||||
}
|
||||
/**
|
||||
* Set the functionality for CodeQL methods. Only for use in tests.
|
||||
*
|
||||
* Accepts a partial object and any undefined methods will be implemented
|
||||
* to immediately throw an exception indicating which method is missing.
|
||||
*/
|
||||
function setCodeQL(partialCodeql) {
|
||||
cachedCodeQL = {
|
||||
getDir: resolveFunction(partialCodeql, 'getDir'),
|
||||
printVersion: resolveFunction(partialCodeql, 'printVersion'),
|
||||
getTracerEnv: resolveFunction(partialCodeql, 'getTracerEnv'),
|
||||
databaseInit: resolveFunction(partialCodeql, 'databaseInit'),
|
||||
runAutobuild: resolveFunction(partialCodeql, 'runAutobuild'),
|
||||
extractScannedLanguage: resolveFunction(partialCodeql, 'extractScannedLanguage'),
|
||||
finalizeDatabase: resolveFunction(partialCodeql, 'finalizeDatabase'),
|
||||
resolveQueries: resolveFunction(partialCodeql, 'resolveQueries'),
|
||||
databaseAnalyze: resolveFunction(partialCodeql, 'databaseAnalyze')
|
||||
};
|
||||
}
|
||||
exports.setCodeQL = setCodeQL;
|
||||
function getCodeQLForCmd(cmd) {
|
||||
return {
|
||||
getDir: function () {
|
||||
return path.dirname(cmd);
|
||||
},
|
||||
printVersion: async function () {
|
||||
await exec.exec(cmd, [
|
||||
'version',
|
||||
'--format=json'
|
||||
]);
|
||||
},
|
||||
getTracerEnv: async function (databasePath, compilerSpec) {
|
||||
let envFile = path.resolve(databasePath, 'working', 'env.tmp');
|
||||
const compilerSpecArg = compilerSpec ? ["--compiler-spec=" + compilerSpec] : [];
|
||||
await exec.exec(cmd, [
|
||||
'database',
|
||||
'trace-command',
|
||||
databasePath,
|
||||
...compilerSpecArg,
|
||||
process.execPath,
|
||||
path.resolve(__dirname, 'tracer-env.js'),
|
||||
envFile
|
||||
]);
|
||||
return JSON.parse(fs.readFileSync(envFile, 'utf-8'));
|
||||
},
|
||||
databaseInit: async function (databasePath, language, sourceRoot) {
|
||||
await exec.exec(cmd, [
|
||||
'database',
|
||||
'init',
|
||||
databasePath,
|
||||
'--language=' + language,
|
||||
'--source-root=' + sourceRoot,
|
||||
]);
|
||||
},
|
||||
runAutobuild: async function (language) {
|
||||
const cmdName = process.platform === 'win32' ? 'autobuild.cmd' : 'autobuild.sh';
|
||||
const autobuildCmd = path.join(path.dirname(cmd), language, 'tools', cmdName);
|
||||
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
|
||||
// This is because of an issue with Azure pipelines timing out connections after 4 minutes
|
||||
// and Maven not properly handling closed connections
|
||||
// Otherwise long build processes will timeout when pulling down Java packages
|
||||
// https://developercommunity.visualstudio.com/content/problem/292284/maven-hosted-agent-connection-timeout.html
|
||||
let javaToolOptions = process.env['JAVA_TOOL_OPTIONS'] || "";
|
||||
process.env['JAVA_TOOL_OPTIONS'] = [...javaToolOptions.split(/\s+/), '-Dhttp.keepAlive=false', '-Dmaven.wagon.http.pool=false'].join(' ');
|
||||
await exec.exec(autobuildCmd);
|
||||
},
|
||||
extractScannedLanguage: async function (databasePath, language) {
|
||||
// Get extractor location
|
||||
let extractorPath = '';
|
||||
await exec.exec(cmd, [
|
||||
'resolve',
|
||||
'extractor',
|
||||
'--format=json',
|
||||
'--language=' + language
|
||||
], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => { extractorPath += data.toString(); },
|
||||
stderr: (data) => { process.stderr.write(data); }
|
||||
}
|
||||
});
|
||||
// Set trace command
|
||||
const ext = process.platform === 'win32' ? '.cmd' : '.sh';
|
||||
const traceCommand = path.resolve(JSON.parse(extractorPath), 'tools', 'autobuild' + ext);
|
||||
// Run trace command
|
||||
await exec.exec(cmd, [
|
||||
'database',
|
||||
'trace-command',
|
||||
databasePath,
|
||||
'--',
|
||||
traceCommand
|
||||
]);
|
||||
},
|
||||
finalizeDatabase: async function (databasePath) {
|
||||
await exec.exec(cmd, [
|
||||
'database',
|
||||
'finalize',
|
||||
databasePath
|
||||
]);
|
||||
},
|
||||
resolveQueries: async function (queries, extraSearchPath) {
|
||||
const codeqlArgs = [
|
||||
'resolve',
|
||||
'queries',
|
||||
...queries,
|
||||
'--format=bylanguage'
|
||||
];
|
||||
if (extraSearchPath !== undefined) {
|
||||
codeqlArgs.push('--search-path', extraSearchPath);
|
||||
}
|
||||
let output = '';
|
||||
await exec.exec(cmd, codeqlArgs, {
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
output += data.toString();
|
||||
}
|
||||
}
|
||||
});
|
||||
return JSON.parse(output);
|
||||
},
|
||||
databaseAnalyze: async function (databasePath, sarifFile, querySuite) {
|
||||
await exec.exec(cmd, [
|
||||
'database',
|
||||
'analyze',
|
||||
util.getMemoryFlag(),
|
||||
util.getThreadsFlag(),
|
||||
databasePath,
|
||||
'--format=sarif-latest',
|
||||
'--output=' + sarifFile,
|
||||
'--no-sarif-add-snippets',
|
||||
querySuite
|
||||
]);
|
||||
}
|
||||
};
|
||||
}
|
||||
function isTracedLanguage(language) {
|
||||
return ['cpp', 'java', 'csharp'].includes(language);
|
||||
}
|
||||
exports.isTracedLanguage = isTracedLanguage;
|
||||
function isScannedLanguage(language) {
|
||||
return !isTracedLanguage(language);
|
||||
}
|
||||
exports.isScannedLanguage = isScannedLanguage;
|
||||
//# sourceMappingURL=codeql.js.map
|
||||
File diff suppressed because one or more lines are too long
60
lib/codeql.test.js
generated
60
lib/codeql.test.js
generated
@@ -1,60 +0,0 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const nock_1 = __importDefault(require("nock"));
|
||||
const path = __importStar(require("path"));
|
||||
const codeql = __importStar(require("./codeql"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default('download codeql bundle cache', async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
|
||||
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
|
||||
const versions = ['20200601', '20200610'];
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
const version = versions[i];
|
||||
nock_1.default('https://example.com')
|
||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||
await codeql.setupCodeQL();
|
||||
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
|
||||
}
|
||||
const cachedVersions = toolcache.findAllVersions('CodeQL');
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
ava_1.default('parse codeql bundle url version', t => {
|
||||
const tests = {
|
||||
'20200601': '0.0.0-20200601',
|
||||
'20200601.0': '0.0.0-20200601.0',
|
||||
'20200601.0.0': '20200601.0.0',
|
||||
'1.2.3': '1.2.3',
|
||||
'1.2.3-alpha': '1.2.3-alpha',
|
||||
'1.2.3-beta.1': '1.2.3-beta.1',
|
||||
};
|
||||
for (const [version, expectedVersion] of Object.entries(tests)) {
|
||||
const url = `https://github.com/.../codeql-bundle-${version}/...`;
|
||||
try {
|
||||
const parsedVersion = codeql.getCodeQLURLVersion(url);
|
||||
t.deepEqual(parsedVersion, expectedVersion);
|
||||
}
|
||||
catch (e) {
|
||||
t.fail(e.message);
|
||||
}
|
||||
}
|
||||
});
|
||||
//# sourceMappingURL=codeql.test.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"codeql.test.js","sourceRoot":"","sources":["../src/codeql.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,iDAAmC;AACnC,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,MAAM,CAAC,WAAW,EAAE,CAAC;YAE3B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,MAAM,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YACtD,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC"}
|
||||
627
lib/config-utils.js
generated
627
lib/config-utils.js
generated
@@ -8,553 +8,128 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const path = __importStar(require("path"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const util = __importStar(require("./util"));
|
||||
// Property names from the user-supplied config file.
|
||||
const NAME_PROPERTY = 'name';
|
||||
const DISABLE_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
|
||||
const QUERIES_PROPERTY = 'queries';
|
||||
const QUERIES_USES_PROPERTY = 'uses';
|
||||
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
|
||||
const PATHS_PROPERTY = 'paths';
|
||||
// All the languages supported by CodeQL
|
||||
const ALL_LANGUAGES = ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'];
|
||||
// Some alternate names for languages
|
||||
const LANGUAGE_ALIASES = {
|
||||
'c': 'cpp',
|
||||
'typescript': 'javascript',
|
||||
};
|
||||
/**
|
||||
* A list of queries from https://github.com/github/codeql that
|
||||
* we don't want to run. Disabling them here is a quicker alternative to
|
||||
* disabling them in the code scanning query suites. Queries should also
|
||||
* be disabled in the suites, and removed from this list here once the
|
||||
* bundle is updated to make those suite changes live.
|
||||
*
|
||||
* Format is a map from language to an array of path suffixes of .ql files.
|
||||
*/
|
||||
const DISABLED_BUILTIN_QUERIES = {
|
||||
'csharp': [
|
||||
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
|
||||
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
|
||||
]
|
||||
};
|
||||
function queryIsDisabled(language, query) {
|
||||
return (DISABLED_BUILTIN_QUERIES[language] || [])
|
||||
.some(disabledQuery => query.endsWith(disabledQuery));
|
||||
}
|
||||
/**
|
||||
* Asserts that the noDeclaredLanguage and multipleDeclaredLanguages fields are
|
||||
* both empty and errors if they are not.
|
||||
*/
|
||||
function validateQueries(resolvedQueries) {
|
||||
const noDeclaredLanguage = resolvedQueries.noDeclaredLanguage;
|
||||
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
||||
if (noDeclaredLanguageQueries.length !== 0) {
|
||||
throw new Error('The following queries do not declare a language. ' +
|
||||
'Their qlpack.yml files are either missing or is invalid.\n' +
|
||||
noDeclaredLanguageQueries.join('\n'));
|
||||
}
|
||||
const multipleDeclaredLanguages = resolvedQueries.multipleDeclaredLanguages;
|
||||
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
|
||||
if (multipleDeclaredLanguagesQueries.length !== 0) {
|
||||
throw new Error('The following queries declare multiple languages. ' +
|
||||
'Their qlpack.yml files are either missing or is invalid.\n' +
|
||||
multipleDeclaredLanguagesQueries.join('\n'));
|
||||
class ExternalQuery {
|
||||
constructor(repository, ref) {
|
||||
this.path = '';
|
||||
this.repository = repository;
|
||||
this.ref = ref;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Run 'codeql resolve queries' and add the results to resultMap
|
||||
*/
|
||||
async function runResolveQueries(resultMap, toResolve, extraSearchPath, errorOnInvalidQueries) {
|
||||
const codeQl = codeql_1.getCodeQL();
|
||||
const resolvedQueries = await codeQl.resolveQueries(toResolve, extraSearchPath);
|
||||
for (const [language, queries] of Object.entries(resolvedQueries.byLanguage)) {
|
||||
if (resultMap[language] === undefined) {
|
||||
resultMap[language] = [];
|
||||
exports.ExternalQuery = ExternalQuery;
|
||||
class Config {
|
||||
constructor() {
|
||||
this.name = "";
|
||||
this.disableDefaultQueries = false;
|
||||
this.additionalQueries = [];
|
||||
this.externalQueries = [];
|
||||
this.pathsIgnore = [];
|
||||
this.paths = [];
|
||||
}
|
||||
addQuery(queryUses) {
|
||||
// The logic for parsing the string is based on what actions does for
|
||||
// parsing the 'uses' actions in the workflow file
|
||||
if (queryUses === "") {
|
||||
throw '"uses" value for queries cannot be blank';
|
||||
}
|
||||
resultMap[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
|
||||
}
|
||||
if (errorOnInvalidQueries) {
|
||||
validateQueries(resolvedQueries);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get the set of queries included by default.
|
||||
*/
|
||||
async function addDefaultQueries(languages, resultMap) {
|
||||
const suites = languages.map(l => l + '-code-scanning.qls');
|
||||
await runResolveQueries(resultMap, suites, undefined, false);
|
||||
}
|
||||
// The set of acceptable values for built-in suites from the codeql bundle
|
||||
const builtinSuites = ['security-extended', 'security-and-quality'];
|
||||
/**
|
||||
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
||||
* Throws an error if suiteName is not a valid builtin suite.
|
||||
*/
|
||||
async function addBuiltinSuiteQueries(configFile, languages, resultMap, suiteName) {
|
||||
const suite = builtinSuites.find((suite) => suite === suiteName);
|
||||
if (!suite) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
||||
}
|
||||
const suites = languages.map(l => l + '-' + suiteName + '.qls');
|
||||
await runResolveQueries(resultMap, suites, undefined, false);
|
||||
}
|
||||
/**
|
||||
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
|
||||
*/
|
||||
async function addRemoteQueries(configFile, resultMap, queryUses) {
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
const ref = tok[1];
|
||||
tok = tok[0].split('/');
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length < 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
// Check none of the parts of the repository name are empty
|
||||
if (tok[0].trim() === '' || tok[1].trim() === '') {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
const nwo = tok[0] + '/' + tok[1];
|
||||
// Checkout the external repository
|
||||
const rootOfRepo = await externalQueries.checkoutExternalRepository(nwo, ref);
|
||||
const queryPath = tok.length > 2
|
||||
? path.join(rootOfRepo, tok.slice(2).join('/'))
|
||||
: rootOfRepo;
|
||||
await runResolveQueries(resultMap, [queryPath], rootOfRepo, true);
|
||||
}
|
||||
/**
|
||||
* Parse a query 'uses' field to a discrete set of query files and update resultMap.
|
||||
*
|
||||
* The logic for parsing the string is based on what actions does for
|
||||
* parsing the 'uses' actions in the workflow file. So it can handle
|
||||
* local paths starting with './', or references to remote repos, or
|
||||
* a finite set of hardcoded terms for builtin suites.
|
||||
*/
|
||||
async function parseQueryUses(configFile, languages, resultMap, queryUses) {
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
// now we're using the pre-hook we have to retrieve even 'local' queries using the API
|
||||
const remoteQueryUses = util.getRequiredEnvParam("GITHUB_REPOSITORY") + "/"
|
||||
+ queryUses.substr(2) + "@" + util.getRef();
|
||||
await addRemoteQueries(configFile, resultMap, remoteQueryUses);
|
||||
return;
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
|
||||
await addBuiltinSuiteQueries(configFile, languages, resultMap, queryUses);
|
||||
return;
|
||||
}
|
||||
// Otherwise, must be a reference to another repo
|
||||
await addRemoteQueries(configFile, resultMap, queryUses);
|
||||
}
|
||||
// Regex validating stars in paths or paths-ignore entries.
|
||||
// The intention is to only allow ** to appear when immediately
|
||||
// preceded and followed by a slash.
|
||||
const pathStarsRegex = /.*(?:\*\*[^/].*|\*\*$|[^/]\*\*.*)/;
|
||||
// Characters that are supported by filters in workflows, but not by us.
|
||||
// See https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
|
||||
const filterPatternCharactersRegex = /.*[\?\+\[\]!].*/;
|
||||
// Checks that a paths of paths-ignore entry is valid, possibly modifying it
|
||||
// to make it valid, or if not possible then throws an error.
|
||||
function validateAndSanitisePath(originalPath, propertyName, configFile) {
|
||||
// Take a copy so we don't modify the original path, so we can still construct error messages
|
||||
let path = originalPath;
|
||||
// All paths are relative to the src root, so strip off leading slashes.
|
||||
while (path.charAt(0) === '/') {
|
||||
path = path.substring(1);
|
||||
}
|
||||
// Trailing ** are redundant, so strip them off
|
||||
if (path.endsWith('/**')) {
|
||||
path = path.substring(0, path.length - 2);
|
||||
}
|
||||
// An empty path is not allowed as it's meaningless
|
||||
if (path === '') {
|
||||
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" is not an invalid path. ' +
|
||||
'It is not necessary to include it, and it is not allowed to exclude it.'));
|
||||
}
|
||||
// Check for illegal uses of **
|
||||
if (path.match(pathStarsRegex)) {
|
||||
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an invalid "**" wildcard. ' +
|
||||
'They must be immediately preceeded and followed by a slash as in "/**/", or come at the start or end.'));
|
||||
}
|
||||
// Check for other regex characters that we don't support.
|
||||
// Output a warning so the user knows, but otherwise continue normally.
|
||||
if (path.match(filterPatternCharactersRegex)) {
|
||||
core.warning(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an unsupported character. ' +
|
||||
'The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.'));
|
||||
}
|
||||
// Ban any uses of backslash for now.
|
||||
// This may not play nicely with project layouts.
|
||||
// This restriction can be lifted later if we determine they are ok.
|
||||
if (path.indexOf('\\') !== -1) {
|
||||
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an "\\" character. These are not allowed in filters. ' +
|
||||
'If running on windows we recommend using "/" instead for path filters.'));
|
||||
}
|
||||
return path;
|
||||
}
|
||||
exports.validateAndSanitisePath = validateAndSanitisePath;
|
||||
function getNameInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
|
||||
}
|
||||
exports.getNameInvalid = getNameInvalid;
|
||||
function getDisableDefaultQueriesInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, DISABLE_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
|
||||
}
|
||||
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
|
||||
function getQueriesInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
|
||||
}
|
||||
exports.getQueriesInvalid = getQueriesInvalid;
|
||||
function getQueryUsesInvalid(configFile, queryUses) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'must be a built-in suite (' + builtinSuites.join(' or ') +
|
||||
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
|
||||
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
|
||||
}
|
||||
exports.getQueryUsesInvalid = getQueryUsesInvalid;
|
||||
function getPathsIgnoreInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
exports.getPathsIgnoreInvalid = getPathsIgnoreInvalid;
|
||||
function getPathsInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
exports.getPathsInvalid = getPathsInvalid;
|
||||
function getLocalPathOutsideOfRepository(configFile, localPath) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" is outside of the repository');
|
||||
}
|
||||
exports.getLocalPathOutsideOfRepository = getLocalPathOutsideOfRepository;
|
||||
function getLocalPathDoesNotExist(configFile, localPath) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" does not exist in the repository');
|
||||
}
|
||||
exports.getLocalPathDoesNotExist = getLocalPathDoesNotExist;
|
||||
function getConfigFileRepoFormatInvalidMessage(configFile) {
|
||||
let error = 'The configuration file "' + configFile + '" is not a supported remote file reference.';
|
||||
error += ' Expected format <owner>/<repository>/<file-path>@<ref>';
|
||||
return error;
|
||||
}
|
||||
exports.getConfigFileRepoFormatInvalidMessage = getConfigFileRepoFormatInvalidMessage;
|
||||
function getConfigFileFormatInvalidMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" could not be read';
|
||||
}
|
||||
exports.getConfigFileFormatInvalidMessage = getConfigFileFormatInvalidMessage;
|
||||
function getConfigFileDirectoryGivenMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" looks like a directory, not a file';
|
||||
}
|
||||
exports.getConfigFileDirectoryGivenMessage = getConfigFileDirectoryGivenMessage;
|
||||
function getConfigFilePropertyError(configFile, property, error) {
|
||||
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
|
||||
}
|
||||
function getNoLanguagesError() {
|
||||
return "Did not detect any languages to analyze. " +
|
||||
"Please update input in workflow or check that GitHub detects the correct languages in your repository.";
|
||||
}
|
||||
exports.getNoLanguagesError = getNoLanguagesError;
|
||||
function getUnknownLanguagesError(languages) {
|
||||
return "Did not recognise the following languages: " + languages.join(', ');
|
||||
}
|
||||
exports.getUnknownLanguagesError = getUnknownLanguagesError;
|
||||
/**
|
||||
* Gets the set of languages in the current repository
|
||||
*/
|
||||
async function getLanguagesInRepo() {
|
||||
var _a;
|
||||
// Translate between GitHub's API names for languages and ours
|
||||
const codeqlLanguages = {
|
||||
'C': 'cpp',
|
||||
'C++': 'cpp',
|
||||
'C#': 'csharp',
|
||||
'Go': 'go',
|
||||
'Java': 'java',
|
||||
'JavaScript': 'javascript',
|
||||
'TypeScript': 'javascript',
|
||||
'Python': 'python',
|
||||
};
|
||||
let repo_nwo = (_a = process.env['GITHUB_REPOSITORY']) === null || _a === void 0 ? void 0 : _a.split("/");
|
||||
if (repo_nwo) {
|
||||
let owner = repo_nwo[0];
|
||||
let repo = repo_nwo[1];
|
||||
core.debug(`GitHub repo ${owner} ${repo}`);
|
||||
const response = await api.getApiClient(true).repos.listLanguages({
|
||||
owner,
|
||||
repo
|
||||
});
|
||||
core.debug("Languages API response: " + JSON.stringify(response));
|
||||
// The GitHub API is going to return languages in order of popularity,
|
||||
// When we pick a language to autobuild we want to pick the most popular traced language
|
||||
// Since sets in javascript maintain insertion order, using a set here and then splatting it
|
||||
// into an array gives us an array of languages ordered by popularity
|
||||
let languages = new Set();
|
||||
for (let lang in response.data) {
|
||||
if (lang in codeqlLanguages) {
|
||||
languages.add(codeqlLanguages[lang]);
|
||||
}
|
||||
if (queryUses.startsWith("./")) {
|
||||
this.additionalQueries.push(queryUses.slice(2));
|
||||
return;
|
||||
}
|
||||
return [...languages];
|
||||
}
|
||||
else {
|
||||
return [];
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
}
|
||||
const ref = tok[1];
|
||||
tok = tok[0].split('/');
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length > 3) {
|
||||
tok = [tok[0], tok[1], tok.slice(2).join('/')];
|
||||
}
|
||||
if (tok.length < 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
}
|
||||
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
|
||||
if (tok.length === 3) {
|
||||
external.path = tok[2];
|
||||
}
|
||||
this.externalQueries.push(external);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get the languages to analyse.
|
||||
*
|
||||
* The result is obtained from the action input parameter 'languages' if that
|
||||
* has been set, otherwise it is deduced as all languages in the repo that
|
||||
* can be analysed.
|
||||
*
|
||||
* If no languages could be detected from either the workflow or the repository
|
||||
* then throw an error.
|
||||
*/
|
||||
async function getLanguages() {
|
||||
// Obtain from action input 'languages' if set
|
||||
let languages = core.getInput('languages', { required: false })
|
||||
.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
core.info("Languages from configuration: " + JSON.stringify(languages));
|
||||
if (languages.length === 0) {
|
||||
// Obtain languages as all languages in the repo that can be analysed
|
||||
languages = await getLanguagesInRepo();
|
||||
core.info("Automatically detected languages: " + JSON.stringify(languages));
|
||||
}
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
if (languages.length === 0) {
|
||||
throw new Error(getNoLanguagesError());
|
||||
}
|
||||
// Make sure they are supported
|
||||
const checkedLanguages = [];
|
||||
const unknownLanguages = [];
|
||||
for (let language of languages) {
|
||||
// Normalise to lower case
|
||||
language = language.toLowerCase();
|
||||
// Resolve any known aliases
|
||||
if (language in LANGUAGE_ALIASES) {
|
||||
language = LANGUAGE_ALIASES[language];
|
||||
}
|
||||
const checkedLanguage = ALL_LANGUAGES.find(l => l === language);
|
||||
if (checkedLanguage === undefined) {
|
||||
unknownLanguages.push(language);
|
||||
}
|
||||
else if (checkedLanguages.indexOf(checkedLanguage) === -1) {
|
||||
checkedLanguages.push(checkedLanguage);
|
||||
}
|
||||
}
|
||||
if (unknownLanguages.length > 0) {
|
||||
throw new Error(getUnknownLanguagesError(unknownLanguages));
|
||||
}
|
||||
return checkedLanguages;
|
||||
}
|
||||
/**
|
||||
* Get the default config for when the user has not supplied one.
|
||||
*/
|
||||
async function getDefaultConfig() {
|
||||
const languages = await getLanguages();
|
||||
const queries = {};
|
||||
await addDefaultQueries(languages, queries);
|
||||
return {
|
||||
languages: languages,
|
||||
queries: queries,
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
};
|
||||
}
|
||||
exports.getDefaultConfig = getDefaultConfig;
|
||||
/**
|
||||
* Load the config from the given file.
|
||||
*/
|
||||
async function loadConfig(configFile) {
|
||||
let parsedYAML;
|
||||
if (isLocal(configFile)) {
|
||||
// Even if its local we want to retrieve the config using the api.
|
||||
// For using the api we have to remove the starting "./"
|
||||
const configFilePath = configFile.substr(2);
|
||||
const remote = util.getRequiredEnvParam("GITHUB_REPOSITORY") + "/" + configFilePath
|
||||
+ "@" + util.getRef();
|
||||
parsedYAML = await getRemoteConfig(remote);
|
||||
}
|
||||
else {
|
||||
parsedYAML = await getRemoteConfig(configFile);
|
||||
}
|
||||
// Validate that the 'name' property is syntactically correct,
|
||||
// even though we don't use the value yet.
|
||||
if (NAME_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
if (parsedYAML[NAME_PROPERTY].length === 0) {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
}
|
||||
const languages = await getLanguages();
|
||||
const queries = {};
|
||||
const pathsIgnore = [];
|
||||
const paths = [];
|
||||
let disableDefaultQueries = false;
|
||||
if (DISABLE_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
||||
throw new Error(getDisableDefaultQueriesInvalid(configFile));
|
||||
}
|
||||
disableDefaultQueries = parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY];
|
||||
}
|
||||
if (!disableDefaultQueries) {
|
||||
await addDefaultQueries(languages, queries);
|
||||
}
|
||||
if (QUERIES_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getQueriesInvalid(configFile));
|
||||
}
|
||||
for (const query of parsedYAML[QUERIES_PROPERTY]) {
|
||||
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
await parseQueryUses(configFile, languages, queries, query[QUERIES_USES_PROPERTY]);
|
||||
}
|
||||
}
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile));
|
||||
});
|
||||
}
|
||||
if (PATHS_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile));
|
||||
});
|
||||
}
|
||||
// The list of queries should not be empty for any language. If it is then
|
||||
// it is a user configuration error.
|
||||
for (const language of languages) {
|
||||
if (queries[language] === undefined || queries[language].length === 0) {
|
||||
throw new Error(`Did not detect any queries to run for ${language}. ` +
|
||||
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
||||
}
|
||||
}
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
pathsIgnore,
|
||||
paths,
|
||||
originalUserInput: parsedYAML
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Load and return the config.
|
||||
*
|
||||
* This will parse the config from the user input if present, or generate
|
||||
* a default config. The parsed config is then stored to a known location.
|
||||
*/
|
||||
async function initConfig() {
|
||||
exports.Config = Config;
|
||||
const configFolder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
function initConfig() {
|
||||
const configFile = core.getInput('config-file');
|
||||
let config;
|
||||
const config = new Config();
|
||||
// If no config file was provided create an empty one
|
||||
if (configFile === '') {
|
||||
core.debug('No configuration file was provided');
|
||||
config = await getDefaultConfig();
|
||||
return config;
|
||||
}
|
||||
else {
|
||||
config = await loadConfig(configFile);
|
||||
try {
|
||||
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
if (parsedYAML.name && typeof parsedYAML.name === "string") {
|
||||
config.name = parsedYAML.name;
|
||||
}
|
||||
if (parsedYAML['disable-default-queries'] && typeof parsedYAML['disable-default-queries'] === "boolean") {
|
||||
config.disableDefaultQueries = parsedYAML['disable-default-queries'];
|
||||
}
|
||||
const queries = parsedYAML.queries;
|
||||
if (queries && queries instanceof Array) {
|
||||
queries.forEach(query => {
|
||||
if (query.uses && typeof query.uses === "string") {
|
||||
config.addQuery(query.uses);
|
||||
}
|
||||
});
|
||||
}
|
||||
const pathsIgnore = parsedYAML['paths-ignore'];
|
||||
if (pathsIgnore && queries instanceof Array) {
|
||||
pathsIgnore.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
config.pathsIgnore.push(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
const paths = parsedYAML.paths;
|
||||
if (paths && paths instanceof Array) {
|
||||
paths.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
config.paths.push(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
core.setFailed(err);
|
||||
}
|
||||
// Save the config so we can easily access it again in the future
|
||||
await saveConfig(config);
|
||||
return config;
|
||||
}
|
||||
exports.initConfig = initConfig;
|
||||
function isLocal(configPath) {
|
||||
// If the path starts with ./, look locally
|
||||
if (configPath.indexOf("./") === 0) {
|
||||
return true;
|
||||
}
|
||||
return (configPath.indexOf("@") === -1);
|
||||
}
|
||||
async function getRemoteConfig(configFile) {
|
||||
// retrieve the various parts of the config location, and ensure they're present
|
||||
const format = new RegExp('(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)');
|
||||
const pieces = format.exec(configFile);
|
||||
// 5 = 4 groups + the whole expression
|
||||
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
||||
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
|
||||
}
|
||||
const response = await api.getApiClient(true).repos.getContents({
|
||||
owner: pieces.groups.owner,
|
||||
repo: pieces.groups.repo,
|
||||
path: pieces.groups.path,
|
||||
ref: pieces.groups.ref,
|
||||
});
|
||||
let fileContents;
|
||||
if ("content" in response.data && response.data.content !== undefined) {
|
||||
fileContents = response.data.content;
|
||||
}
|
||||
else if (Array.isArray(response.data)) {
|
||||
throw new Error(getConfigFileDirectoryGivenMessage(configFile));
|
||||
}
|
||||
else {
|
||||
throw new Error(getConfigFileFormatInvalidMessage(configFile));
|
||||
}
|
||||
return yaml.safeLoad(Buffer.from(fileContents, 'base64').toString('binary'));
|
||||
}
|
||||
/**
|
||||
* Get the file path where the parsed config will be stored.
|
||||
*/
|
||||
function getPathToParsedConfigFile() {
|
||||
return path.join(util.getRequiredEnvParam('RUNNER_TEMP'), 'config');
|
||||
}
|
||||
exports.getPathToParsedConfigFile = getPathToParsedConfigFile;
|
||||
/**
|
||||
* Store the given config to the path returned from getPathToParsedConfigFile.
|
||||
*/
|
||||
async function saveConfig(config) {
|
||||
const configString = JSON.stringify(config);
|
||||
const configFile = getPathToParsedConfigFile();
|
||||
fs.mkdirSync(path.dirname(configFile), { recursive: true });
|
||||
fs.writeFileSync(configFile, configString, 'utf8');
|
||||
await io.mkdirP(configFolder);
|
||||
fs.writeFileSync(path.join(configFolder, 'config'), configString, 'utf8');
|
||||
core.debug('Saved config:');
|
||||
core.debug(configString);
|
||||
}
|
||||
/**
|
||||
* Get the config.
|
||||
*
|
||||
* If this is the first time in a workflow that this is being called then
|
||||
* this will parse the config from the user input. The parsed config is then
|
||||
* stored to a known location. On the second and further calls, this will
|
||||
* return the contents of the parsed config from the known location.
|
||||
*/
|
||||
async function getConfig() {
|
||||
const configFile = getPathToParsedConfigFile();
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||
async function loadConfig() {
|
||||
const configFile = path.join(configFolder, 'config');
|
||||
if (fs.existsSync(configFile)) {
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
core.debug(configString);
|
||||
return JSON.parse(configString);
|
||||
}
|
||||
else {
|
||||
const config = initConfig();
|
||||
core.debug('Initialized config:');
|
||||
core.debug(JSON.stringify(config));
|
||||
await saveConfig(config);
|
||||
return config;
|
||||
}
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
core.debug(configString);
|
||||
return JSON.parse(configString);
|
||||
}
|
||||
exports.getConfig = getConfig;
|
||||
//# sourceMappingURL=config-utils.js.map
|
||||
exports.loadConfig = loadConfig;
|
||||
|
||||
File diff suppressed because one or more lines are too long
429
lib/config-utils.test.js
generated
429
lib/config-utils.test.js
generated
@@ -1,429 +0,0 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const sinon_1 = __importDefault(require("sinon"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const CodeQL = __importStar(require("./codeql"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
function setInput(name, value) {
|
||||
// Transformation copied from
|
||||
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
|
||||
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
|
||||
if (value !== undefined) {
|
||||
process.env[envVar] = value;
|
||||
}
|
||||
else {
|
||||
delete process.env[envVar];
|
||||
}
|
||||
}
|
||||
function mockGetContents(content) {
|
||||
// Passing an auth token is required, so we just use a dummy value
|
||||
let client = new github.GitHub('123');
|
||||
const response = {
|
||||
data: content
|
||||
};
|
||||
const spyGetContents = sinon_1.default.stub(client.repos, "getContents").resolves(response);
|
||||
sinon_1.default.stub(api, "getApiClient").value(() => client);
|
||||
return spyGetContents;
|
||||
}
|
||||
function mockListLanguages(languages) {
|
||||
// Passing an auth token is required, so we just use a dummy value
|
||||
let client = new github.GitHub('123');
|
||||
const response = {
|
||||
data: {},
|
||||
};
|
||||
for (const language of languages) {
|
||||
response.data[language] = 123;
|
||||
}
|
||||
sinon_1.default.stub(client.repos, "listLanguages").resolves(response);
|
||||
sinon_1.default.stub(api, "getApiClient").value(() => client);
|
||||
}
|
||||
ava_1.default("load empty config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', undefined);
|
||||
setInput('languages', 'javascript,python');
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
byLanguage: {},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
const config = await configUtils.initConfig();
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig());
|
||||
});
|
||||
});
|
||||
ava_1.default("loading config saves config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', undefined);
|
||||
setInput('languages', 'javascript,python');
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
byLanguage: {},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
// Sanity check the saved config file does not already exist
|
||||
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile()));
|
||||
// Sanity check that getConfig throws before we have called initConfig
|
||||
await t.throwsAsync(configUtils.getConfig);
|
||||
const config1 = await configUtils.initConfig();
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile()));
|
||||
// And that same newly-initialised config should now be returned by getConfig
|
||||
const config2 = await configUtils.getConfig();
|
||||
t.deepEqual(config1, config2);
|
||||
});
|
||||
});
|
||||
ava_1.default("load non-local input with invalid repo syntax", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
// no filename given, just a repo
|
||||
setInput('config-file', 'octo-org/codeql-config@main');
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage('octo-org/codeql-config@main')));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("load non-empty input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_REPOSITORY'] = "octo-org/codeql-config";
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
byLanguage: {
|
||||
'javascript': {
|
||||
'/foo/a.ql': {},
|
||||
'/bar/b.ql': {},
|
||||
},
|
||||
},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
// Just create a generic config object with non-default values for all fields
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- uses: ./foo
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
const dummyResponse = {
|
||||
content: Buffer.from(inputFileContents).toString("base64"),
|
||||
};
|
||||
const spyGetContents = mockGetContents(dummyResponse);
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = {
|
||||
languages: ['javascript'],
|
||||
queries: { 'javascript': ['/foo/a.ql', '/bar/b.ql'] },
|
||||
pathsIgnore: ['a', 'b'],
|
||||
paths: ['c/d'],
|
||||
originalUserInput: {
|
||||
name: 'my config',
|
||||
'disable-default-queries': true,
|
||||
queries: [{ uses: './foo' }],
|
||||
'paths-ignore': ['a', 'b'],
|
||||
paths: ['c/d'],
|
||||
},
|
||||
};
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
const actualConfig = await configUtils.initConfig();
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
t.assert(spyGetContents.called);
|
||||
});
|
||||
});
|
||||
ava_1.default("default queries are used", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_REPOSITORY'] = "octo-org/codeql-config";
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
// Check that the default behaviour is to add the default queries.
|
||||
// In this case if a config file is specified but does not include
|
||||
// the disable-default-queries field.
|
||||
// We determine this by whether CodeQL.resolveQueries is called
|
||||
// with the correct arguments.
|
||||
const resolveQueriesArgs = [];
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function (queries, extraSearchPath) {
|
||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||
return {
|
||||
byLanguage: {
|
||||
'javascript': {
|
||||
'foo.ql': {},
|
||||
},
|
||||
},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
// The important point of this config is that it doesn't specify
|
||||
// the disable-default-queries field.
|
||||
// Any other details are hopefully irrelevant for this tetst.
|
||||
const inputFileContents = `
|
||||
paths:
|
||||
- foo`;
|
||||
const dummyResponse = {
|
||||
content: Buffer.from(inputFileContents).toString("base64"),
|
||||
};
|
||||
const spyGetContents = mockGetContents(dummyResponse);
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
await configUtils.initConfig();
|
||||
// Check resolve queries was called correctly
|
||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||
t.deepEqual(resolveQueriesArgs[0].queries, ['javascript-code-scanning.qls']);
|
||||
t.deepEqual(resolveQueriesArgs[0].extraSearchPath, undefined);
|
||||
t.assert(spyGetContents.called);
|
||||
});
|
||||
});
|
||||
ava_1.default("API client used when reading remote config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
byLanguage: {
|
||||
'javascript': {
|
||||
'foo.ql': {},
|
||||
},
|
||||
},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- uses: ./
|
||||
- uses: ./foo
|
||||
- uses: foo/bar@dev
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
const dummyResponse = {
|
||||
content: Buffer.from(inputFileContents).toString("base64"),
|
||||
};
|
||||
const spyGetContents = mockGetContents(dummyResponse);
|
||||
// Create checkout directory for remote queries repository
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo/bar'), { recursive: true });
|
||||
setInput('config-file', 'octo-org/codeql-config/config.yaml@main');
|
||||
setInput('languages', 'javascript');
|
||||
await configUtils.initConfig();
|
||||
t.assert(spyGetContents.called);
|
||||
});
|
||||
});
|
||||
ava_1.default("Remote config handles the case where a directory is provided", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const dummyResponse = []; // directories are returned as arrays
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = 'octo-org/codeql-config/config.yaml@main';
|
||||
setInput('config-file', repoReference);
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDirectoryGivenMessage(repoReference)));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("Invalid format of remote config handled correctly", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const dummyResponse = {
|
||||
// note no "content" property here
|
||||
};
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = 'octo-org/codeql-config/config.yaml@main';
|
||||
setInput('config-file', repoReference);
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference)));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("No detected languages", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
mockListLanguages([]);
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getNoLanguagesError()));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("Unknown languages", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('languages', 'ruby,english');
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getUnknownLanguagesError(['ruby', 'english'])));
|
||||
}
|
||||
});
|
||||
});
|
||||
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
|
||||
ava_1.default("load invalid input - " + testName, async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_REPOSITORY'] = "octo-org/codeql-config";
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
byLanguage: {},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
const dummyResponse = {
|
||||
content: Buffer.from(inputFileContents).toString("base64"),
|
||||
};
|
||||
const spyGetContents = mockGetContents(dummyResponse);
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(expectedErrorMessageGenerator("input")));
|
||||
t.assert(spyGetContents.called);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
doInvalidInputTest('name invalid type', `
|
||||
name:
|
||||
- foo: bar`, configUtils.getNameInvalid);
|
||||
doInvalidInputTest('disable-default-queries invalid type', `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
|
||||
doInvalidInputTest('queries invalid type', `queries: foo`, configUtils.getQueriesInvalid);
|
||||
doInvalidInputTest('paths-ignore invalid type', `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
|
||||
doInvalidInputTest('paths invalid type', `paths: 17`, configUtils.getPathsInvalid);
|
||||
doInvalidInputTest('queries uses invalid type', `
|
||||
queries:
|
||||
- uses:
|
||||
- hello: world`, configUtils.getQueryUsesInvalid);
|
||||
function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) {
|
||||
// Invalid contents of a "queries.uses" field.
|
||||
// Should fail with the expected error message
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
queries:
|
||||
- name: foo
|
||||
uses: ` + input;
|
||||
doInvalidInputTest("queries uses \"" + input + "\"", inputFileContents, expectedErrorMessageGenerator);
|
||||
}
|
||||
// Various "uses" fields, and the errors they should produce
|
||||
doInvalidQueryUsesTest("''", c => configUtils.getQueryUsesInvalid(c, undefined));
|
||||
doInvalidQueryUsesTest("foo/bar", c => configUtils.getQueryUsesInvalid(c, "foo/bar"));
|
||||
doInvalidQueryUsesTest("foo/bar@v1@v2", c => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
|
||||
doInvalidQueryUsesTest("foo@master", c => configUtils.getQueryUsesInvalid(c, "foo@master"));
|
||||
doInvalidQueryUsesTest("https://github.com/foo/bar@master", c => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
|
||||
doInvalidQueryUsesTest("./foo", c => configUtils.getLocalPathDoesNotExist(c, "foo"));
|
||||
// doInvalidQueryUsesTest(
|
||||
// "./..",
|
||||
// c => configUtils.getLocalPathOutsideOfRepository(c, ".."));
|
||||
const validPaths = [
|
||||
'foo',
|
||||
'foo/',
|
||||
'foo/**',
|
||||
'foo/**/',
|
||||
'foo/**/**',
|
||||
'foo/**/bar/**/baz',
|
||||
'**/',
|
||||
'**/foo',
|
||||
'/foo',
|
||||
];
|
||||
const invalidPaths = [
|
||||
'a/***/b',
|
||||
'a/**b',
|
||||
'a/b**',
|
||||
'**',
|
||||
];
|
||||
ava_1.default('path validations', t => {
|
||||
// Dummy values to pass to validateAndSanitisePath
|
||||
const propertyName = 'paths';
|
||||
const configFile = './.github/codeql/config.yml';
|
||||
for (const path of validPaths) {
|
||||
t.truthy(configUtils.validateAndSanitisePath(path, propertyName, configFile));
|
||||
}
|
||||
for (const path of invalidPaths) {
|
||||
t.throws(() => configUtils.validateAndSanitisePath(path, propertyName, configFile));
|
||||
}
|
||||
});
|
||||
ava_1.default('path sanitisation', t => {
|
||||
// Dummy values to pass to validateAndSanitisePath
|
||||
const propertyName = 'paths';
|
||||
const configFile = './.github/codeql/config.yml';
|
||||
// Valid paths are not modified
|
||||
t.deepEqual(configUtils.validateAndSanitisePath('foo/bar', propertyName, configFile), 'foo/bar');
|
||||
// Trailing stars are stripped
|
||||
t.deepEqual(configUtils.validateAndSanitisePath('foo/**', propertyName, configFile), 'foo/');
|
||||
});
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
File diff suppressed because one or more lines are too long
35
lib/external-queries.js
generated
35
lib/external-queries.js
generated
@@ -11,24 +11,21 @@ const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const util = __importStar(require("./util"));
|
||||
/**
|
||||
* Check out repository at the given ref, and return the directory of the checkout.
|
||||
*/
|
||||
async function checkoutExternalRepository(repository, ref) {
|
||||
const folder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
core.info('Checking out ' + repository);
|
||||
const checkoutLocation = path.join(folder, repository);
|
||||
if (!fs.existsSync(checkoutLocation)) {
|
||||
const repoURL = 'https://github.com/' + repository + '.git';
|
||||
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
|
||||
await exec.exec('git', [
|
||||
'--work-tree=' + checkoutLocation,
|
||||
'--git-dir=' + checkoutLocation + '/.git',
|
||||
'checkout', ref,
|
||||
]);
|
||||
async function checkoutExternalQueries(config) {
|
||||
const folder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
for (const externalQuery of config.externalQueries) {
|
||||
core.info('Checking out ' + externalQuery.repository);
|
||||
const checkoutLocation = path.join(folder, externalQuery.repository);
|
||||
if (!fs.existsSync(checkoutLocation)) {
|
||||
const repoURL = 'https://github.com/' + externalQuery.repository + '.git';
|
||||
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
|
||||
await exec.exec('git', [
|
||||
'--work-tree=' + checkoutLocation,
|
||||
'--git-dir=' + checkoutLocation + '/.git',
|
||||
'checkout', externalQuery.ref,
|
||||
]);
|
||||
}
|
||||
config.additionalQueries.push(path.join(checkoutLocation, externalQuery.path));
|
||||
}
|
||||
return checkoutLocation;
|
||||
}
|
||||
exports.checkoutExternalRepository = checkoutExternalRepository;
|
||||
//# sourceMappingURL=external-queries.js.map
|
||||
exports.checkoutExternalQueries = checkoutExternalQueries;
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAE7B,6CAA+B;AAE/B;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAAC,UAAkB,EAAE,GAAW;IAC9E,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC;IAEvD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,UAAU,CAAC,CAAC;IAExC,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;IACvD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,UAAU,GAAG,MAAM,CAAC;QAC5D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;QAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YACrB,cAAc,GAAG,gBAAgB;YACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;YACzC,UAAU,EAAE,GAAG;SAChB,CAAC,CAAC;KACJ;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAjBD,gEAiBC"}
|
||||
28
lib/external-queries.test.js
generated
28
lib/external-queries.test.js
generated
@@ -1,28 +0,0 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default("checkoutExternalQueries", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
await externalQueries.checkoutExternalRepository("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b");
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in the default branch
|
||||
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=external-queries.test.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,oEAAsD;AACtD,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACxC,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QACpC,MAAM,eAAe,CAAC,0BAA0B,CAAC,kBAAkB,EAAE,0CAA0C,CAAC,CAAC;QAEjH,mGAAmG;QACnG,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IAC/E,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
152
lib/finalize-db.js
generated
152
lib/finalize-db.js
generated
@@ -8,106 +8,134 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function sendStatusReport(startedAt, queriesStats, uploadStats, error) {
|
||||
var _a, _b, _c;
|
||||
const status = ((_a = queriesStats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined ? 'failure' : 'success';
|
||||
const statusReportBase = await util.createStatusReportBase('finish', status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
...(queriesStats || {}),
|
||||
...(uploadStats || {}),
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function createdDBForScannedLanguages(databaseFolder, config) {
|
||||
const codeql = codeql_1.getCodeQL();
|
||||
for (const language of config.languages) {
|
||||
if (codeql_1.isScannedLanguage(language)) {
|
||||
async function createdDBForScannedLanguages(codeqlCmd, databaseFolder) {
|
||||
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
||||
if (scannedLanguages) {
|
||||
for (const language of scannedLanguages.split(',')) {
|
||||
core.startGroup('Extracting ' + language);
|
||||
await codeql.extractScannedLanguage(path.join(databaseFolder, language), language);
|
||||
// Get extractor location
|
||||
let extractorPath = '';
|
||||
await exec.exec(codeqlCmd, ['resolve', 'extractor', '--format=json', '--language=' + language], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => { extractorPath += data.toString(); },
|
||||
stderr: (data) => { process.stderr.write(data); }
|
||||
}
|
||||
});
|
||||
// Set trace command
|
||||
const ext = process.platform === 'win32' ? '.cmd' : '.sh';
|
||||
const traceCommand = path.resolve(JSON.parse(extractorPath), 'tools', 'autobuild' + ext);
|
||||
// Run trace command
|
||||
await exec.exec(codeqlCmd, ['database', 'trace-command', path.join(databaseFolder, language), '--', traceCommand]);
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
}
|
||||
async function finalizeDatabaseCreation(databaseFolder, config) {
|
||||
await createdDBForScannedLanguages(databaseFolder, config);
|
||||
const codeql = codeql_1.getCodeQL();
|
||||
for (const language of config.languages) {
|
||||
async function finalizeDatabaseCreation(codeqlCmd, databaseFolder) {
|
||||
await createdDBForScannedLanguages(codeqlCmd, databaseFolder);
|
||||
const languages = process.env[sharedEnv.CODEQL_ACTION_LANGUAGES] || '';
|
||||
for (const language of languages.split(',')) {
|
||||
core.startGroup('Finalizing ' + language);
|
||||
await codeql.finalizeDatabase(path.join(databaseFolder, language));
|
||||
await exec.exec(codeqlCmd, ['database', 'finalize', path.join(databaseFolder, language)]);
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(databaseFolder, sarifFolder, config) {
|
||||
const codeql = codeql_1.getCodeQL();
|
||||
for (let language of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + language);
|
||||
const queries = config.queries[language] || [];
|
||||
if (queries.length === 0) {
|
||||
throw new Error('Unable to analyse ' + language + ' as no queries were selected for this language');
|
||||
async function resolveQueryLanguages(codeqlCmd, config) {
|
||||
let res = new Map();
|
||||
if (config.additionalQueries.length !== 0) {
|
||||
let resolveQueriesOutput = '';
|
||||
const options = {
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
resolveQueriesOutput += data.toString();
|
||||
}
|
||||
}
|
||||
};
|
||||
await exec.exec(codeqlCmd, [
|
||||
'resolve',
|
||||
'queries',
|
||||
...config.additionalQueries,
|
||||
'--format=bylanguage'
|
||||
], options);
|
||||
const resolveQueriesOutputObject = JSON.parse(resolveQueriesOutput);
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
res[language] = Object.keys(queries);
|
||||
}
|
||||
try {
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = path.join(databaseFolder, language + '-queries.qls');
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
core.debug('Query suite file for ' + language + '...\n' + querySuiteContents);
|
||||
const sarifFile = path.join(sarifFolder, language + '.sarif');
|
||||
await codeql.databaseAnalyze(path.join(databaseFolder, language), sarifFile, querySuite);
|
||||
core.debug('SARIF results for database ' + language + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
const noDeclaredLanguage = resolveQueriesOutputObject.noDeclaredLanguage;
|
||||
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
||||
if (noDeclaredLanguageQueries.length !== 0) {
|
||||
core.warning('Some queries do not declare a language:\n' + noDeclaredLanguageQueries.join('\n'));
|
||||
}
|
||||
catch (e) {
|
||||
// For now the fields about query performance are not populated
|
||||
return {
|
||||
analyze_failure_language: language,
|
||||
};
|
||||
const multipleDeclaredLanguages = resolveQueriesOutputObject.multipleDeclaredLanguages;
|
||||
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
|
||||
if (multipleDeclaredLanguagesQueries.length !== 0) {
|
||||
core.warning('Some queries declare multiple languages:\n' + multipleDeclaredLanguagesQueries.join('\n'));
|
||||
}
|
||||
}
|
||||
return {};
|
||||
return res;
|
||||
}
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(codeqlCmd, databaseFolder, sarifFolder, config) {
|
||||
const queriesPerLanguage = await resolveQueryLanguages(codeqlCmd, config);
|
||||
for (let database of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + database);
|
||||
const queries = [];
|
||||
if (!config.disableDefaultQueries) {
|
||||
queries.push(database + '-code-scanning.qls');
|
||||
}
|
||||
queries.push(...(queriesPerLanguage[database] || []));
|
||||
const sarifFile = path.join(sarifFolder, database + '.sarif');
|
||||
await exec.exec(codeqlCmd, [
|
||||
'database',
|
||||
'analyze',
|
||||
path.join(databaseFolder, database),
|
||||
'--format=sarif-latest',
|
||||
'--output=' + sarifFile,
|
||||
'--no-sarif-add-snippets',
|
||||
...queries
|
||||
]);
|
||||
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let queriesStats = undefined;
|
||||
let uploadStats = undefined;
|
||||
try {
|
||||
util.prepareLocalRunEnvironment();
|
||||
if (!await util.sendStatusReport(await util.createStatusReportBase('finish', 'starting', startedAt), true)) {
|
||||
if (util.should_abort('finish', true) || !await util.reportActionStarting('finish')) {
|
||||
return;
|
||||
}
|
||||
const config = await configUtils.getConfig();
|
||||
const config = await configUtils.loadConfig();
|
||||
core.exportVariable(sharedEnv.ODASA_TRACER_CONFIGURATION, '');
|
||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||
const databaseFolder = util.getCodeQLDatabasesDir();
|
||||
const codeqlCmd = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_CMD);
|
||||
const databaseFolder = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_DATABASE_DIR);
|
||||
const sarifFolder = core.getInput('output');
|
||||
fs.mkdirSync(sarifFolder, { recursive: true });
|
||||
await io.mkdirP(sarifFolder);
|
||||
core.info('Finalizing database creation');
|
||||
await finalizeDatabaseCreation(databaseFolder, config);
|
||||
await finalizeDatabaseCreation(codeqlCmd, databaseFolder);
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
core.info('Analyzing database');
|
||||
queriesStats = await runQueries(databaseFolder, sarifFolder, config);
|
||||
await runQueries(codeqlCmd, databaseFolder, sarifFolder, config);
|
||||
if ('true' === core.getInput('upload')) {
|
||||
uploadStats = await upload_lib.upload(sarifFolder);
|
||||
await upload_lib.upload(sarifFolder);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
console.log(error);
|
||||
await sendStatusReport(startedAt, queriesStats, uploadStats, error);
|
||||
await util.reportActionFailed('finish', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
await sendStatusReport(startedAt, queriesStats, uploadStats);
|
||||
await util.reportActionSucceeded('finish');
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("analyze action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=finalize-db.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"finalize-db.js","sourceRoot":"","sources":["../src/finalize-db.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAE7B,qCAAwD;AACxD,4DAA8C;AAC9C,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAiC/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,YAA6C,EAC7C,WAAsD,EACtD,KAAa;;IAEb,MAAM,MAAM,GAAG,OAAA,YAAY,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IACnH,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,QAAE,KAAK,0CAAE,OAAO,QAAE,KAAK,0CAAE,KAAK,CAAC,CAAC;IACtH,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,YAAY,IAAI,EAAE,CAAC;QACvB,GAAG,CAAC,WAAW,IAAI,EAAE,CAAC;KACvB,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,4BAA4B,CAAC,cAAsB,EAAE,MAA0B;IAC5F,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,0BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;YAC1C,MAAM,MAAM,CAAC,sBAAsB,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,EAAE,QAAQ,CAAC,CAAC;YACnF,IAAI,CAAC,QAAQ,EAAE,CAAC;SACjB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CAAC,cAAsB,EAAE,MAA0B;IACxF,MAAM,4BAA4B,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;IAE3D,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;QAC1C,MAAM,MAAM,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC,CAAC;QACnE,IAAI,CAAC,QAAQ,EAAE,CAAC;KACjB;AACH,CAAC;AAED,2DAA2D;AAC3D,KAAK,UAAU,UAAU,CACvB,cAAsB,EACtB,WAAmB,EACnB,MAA0B;IAE1B,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,IAAI,QAAQ,IAAI,EAAE,CAAC,WAAW,CAAC,cAAc,CAAC,EAAE;QACnD,IAAI,CAAC,UAAU,CAAC,YAAY,GAAG,QAAQ,CAAC,CAAC;QAEzC,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;QAC/C,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,oBAAoB,GAAG,QAAQ,GAAG,gDAAgD,CAAC,CAAC;SACrG;QAED,IAAI;YACF,uEAAuE;YACvE,2EAA2E;YAC3E,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,GAAG,cAAc,CAAC,CAAC;YACxE,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACxE,EAAE,CAAC,aAAa,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;YACjD,IAAI,CAAC,KAAK,CAAC,uBAAuB,GAAG,QAAQ,GAAG,OAAO,GAAG,kBAAkB,CAAC,CAAC;YAE9E,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,QAAQ,GAAG,QAAQ,CAAC,CAAC;YAE9D,MAAM,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,EAAE,SAAS,EAAE,UAAU,CAAC,CAAC;YAEzF,IAAI,CAAC,KAAK,CAAC,6BAA6B,GAAG,QAAQ,GAAG,eAAe,GAAG,SAAS,GAAG,GAAG,CAAC,CAAC;YACzF,IAAI,CAAC,QAAQ,EAAE,CAAC;SAEjB;QAAC,OAAO,CAAC,EAAE;YACV,+DAA+D;YAC/D,OAAO;gBACL,wBAAwB,EAAE,QAAQ;aACnC,CAAC;SACH;KACF;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAoC,SAAS,CAAC;IAC9D,IAAI,WAAW,GAA8C,SAAS,CAAC;IACvE,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC1G,OAAO;SACR;QACD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,EAAE,CAAC;QAE7C,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,0BAA0B,EAAE,EAAE,CAAC,CAAC;QAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;QAEzD,MAAM,cAAc,GAAG,IAAI,CAAC,qBAAqB,EAAE,CAAC;QAEpD,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;QAC5C,EAAE,CAAC,SAAS,CAAC,WAAW,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAE/C,IAAI,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;QAC1C,MAAM,wBAAwB,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;QAEvD,IAAI,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QAChC,YAAY,GAAG,MAAM,UAAU,CAAC,cAAc,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;QAErE,IAAI,MAAM,KAAK,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACtC,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;SACpD;KAEF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,gBAAgB,CAAC,SAAS,EAAE,YAAY,EAAE,WAAW,EAAE,KAAK,CAAC,CAAC;QACpE,OAAO;KACR;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,YAAY,EAAE,WAAW,CAAC,CAAC;AAC/D,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,yBAAyB,GAAG,CAAC,CAAC,CAAC;IAC9C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
2
lib/finalize-db.test.js
generated
2
lib/finalize-db.test.js
generated
@@ -1,2 +0,0 @@
|
||||
"use strict";
|
||||
//# sourceMappingURL=finalize-db.test.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"finalize-db.test.js","sourceRoot":"","sources":["../src/finalize-db.test.ts"],"names":[],"mappings":""}
|
||||
26
lib/fingerprints.js
generated
26
lib/fingerprints.js
generated
@@ -146,10 +146,10 @@ function locationUpdateCallback(result, location) {
|
||||
result.partialFingerprints.primaryLocationLineHash = hash;
|
||||
}
|
||||
else if (existingFingerprint !== hash) {
|
||||
core.warning('Calculated fingerprint of ' + hash +
|
||||
' for file ' + location.physicalLocation.artifactLocation.uri +
|
||||
' line ' + lineNumber +
|
||||
', but found existing inconsistent fingerprint value ' + existingFingerprint);
|
||||
core.warning("Calculated fingerprint of " + hash +
|
||||
" for file " + location.physicalLocation.artifactLocation.uri +
|
||||
" line " + lineNumber +
|
||||
", but found existing inconsistent fingerprint value " + existingFingerprint);
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -164,14 +164,14 @@ function resolveUriToFile(location, artifacts) {
|
||||
location.index < 0 ||
|
||||
location.index >= artifacts.length ||
|
||||
typeof artifacts[location.index].location !== 'object') {
|
||||
core.debug(`Ignoring location as URI "${location.index}" is invalid`);
|
||||
core.debug('Ignoring location as index "' + location.index + '" is invalid');
|
||||
return undefined;
|
||||
}
|
||||
location = artifacts[location.index].location;
|
||||
}
|
||||
// Get the URI and decode
|
||||
if (typeof location.uri !== 'string') {
|
||||
core.debug(`Ignoring location as index "${location.uri}" is invalid`);
|
||||
core.debug('Ignoring location as uri "' + location.uri + '" is invalid');
|
||||
return undefined;
|
||||
}
|
||||
let uri = decodeURIComponent(location.uri);
|
||||
@@ -181,13 +181,13 @@ function resolveUriToFile(location, artifacts) {
|
||||
uri = uri.substring(fileUriPrefix.length);
|
||||
}
|
||||
if (uri.indexOf('://') !== -1) {
|
||||
core.debug(`Ignoring location URI "${uri}" as the scheme is not recognised`);
|
||||
core.debug('Ignoring location URI "' + uri + "' as the scheme is not recognised");
|
||||
return undefined;
|
||||
}
|
||||
// Discard any absolute paths that aren't in the src root
|
||||
const srcRootPrefix = process.env['GITHUB_WORKSPACE'] + '/';
|
||||
if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
|
||||
core.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
|
||||
core.debug('Ignoring location URI "' + uri + "' as it is outside of the src root");
|
||||
return undefined;
|
||||
}
|
||||
// Just assume a relative path is relative to the src root.
|
||||
@@ -198,7 +198,7 @@ function resolveUriToFile(location, artifacts) {
|
||||
}
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(uri)) {
|
||||
core.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
|
||||
core.debug("Unable to compute fingerprint for non-existent file: " + uri);
|
||||
return undefined;
|
||||
}
|
||||
return uri;
|
||||
@@ -207,7 +207,6 @@ exports.resolveUriToFile = resolveUriToFile;
|
||||
// Compute fingerprints for results in the given sarif file
|
||||
// and return an updated sarif file contents.
|
||||
function addFingerprints(sarifContents) {
|
||||
var _a, _b;
|
||||
let sarif = JSON.parse(sarifContents);
|
||||
// Gather together results for the same file and construct
|
||||
// callbacks to accept hashes for that file and update the location
|
||||
@@ -218,8 +217,10 @@ function addFingerprints(sarifContents) {
|
||||
for (const result of run.results || []) {
|
||||
// Check the primary location is defined correctly and is in the src root
|
||||
const primaryLocation = (result.locations || [])[0];
|
||||
if (!((_b = (_a = primaryLocation) === null || _a === void 0 ? void 0 : _a.physicalLocation) === null || _b === void 0 ? void 0 : _b.artifactLocation)) {
|
||||
core.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`);
|
||||
if (!primaryLocation ||
|
||||
!primaryLocation.physicalLocation ||
|
||||
!primaryLocation.physicalLocation.artifactLocation) {
|
||||
core.debug("Unable to compute fingerprint for invalid location: " + JSON.stringify(primaryLocation));
|
||||
continue;
|
||||
}
|
||||
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts);
|
||||
@@ -244,4 +245,3 @@ function addFingerprints(sarifContents) {
|
||||
return JSON.stringify(sarif);
|
||||
}
|
||||
exports.addFingerprints = addFingerprints;
|
||||
//# sourceMappingURL=fingerprints.js.map
|
||||
File diff suppressed because one or more lines are too long
159
lib/fingerprints.test.js
generated
159
lib/fingerprints.test.js
generated
@@ -1,159 +0,0 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const fingerprints = __importStar(require("./fingerprints"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
function testHash(t, input, expectedHashes) {
|
||||
let index = 0;
|
||||
let callback = function (lineNumber, hash) {
|
||||
t.is(lineNumber, index + 1);
|
||||
t.is(hash, expectedHashes[index]);
|
||||
index++;
|
||||
};
|
||||
fingerprints.hash(callback, input);
|
||||
t.is(index, input.split(/\r\n|\r|\n/).length);
|
||||
}
|
||||
ava_1.default('hash', (t) => {
|
||||
// Try empty file
|
||||
testHash(t, "", ["c129715d7a2bc9a3:1"]);
|
||||
// Try various combinations of newline characters
|
||||
testHash(t, " a\nb\n \t\tc\n d", [
|
||||
"271789c17abda88f:1",
|
||||
"54703d4cd895b18:1",
|
||||
"180aee12dab6264:1",
|
||||
"a23a3dc5e078b07b:1"
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
|
||||
"8b7cf3e952e7aeb2:1",
|
||||
"b1ae1287ec4718d9:1",
|
||||
"bff680108adb0fcc:1",
|
||||
"c6805c5e1288b612:1",
|
||||
"b86d3392aea1be30:1",
|
||||
"e6ceba753e1a442:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End\n", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\r\r\r \t\tGreetings\r End\r", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\r\nworld!!!\r\n\r\n\r\n \t\tGreetings\r\n End\r\n", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\r\n\n\r \t\tGreetings\r End\r\n", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
// Try repeating line that will generate identical hashes
|
||||
testHash(t, "Lorem ipsum dolor sit amet.\n".repeat(10), [
|
||||
"a7f2ff13bc495cf2:1",
|
||||
"a7f2ff13bc495cf2:2",
|
||||
"a7f2ff13bc495cf2:3",
|
||||
"a7f2ff13bc495cf2:4",
|
||||
"a7f2ff13bc495cf2:5",
|
||||
"a7f2ff13bc495cf2:6",
|
||||
"a7f2ff1481e87703:1",
|
||||
"a9cf91f7bbf1862b:1",
|
||||
"55ec222b86bcae53:1",
|
||||
"cc97dc7b1d7d8f7b:1",
|
||||
"c129715d7a2bc9a3:1"
|
||||
]);
|
||||
});
|
||||
function testResolveUriToFile(uri, index, artifactsURIs) {
|
||||
const location = { "uri": uri, "index": index };
|
||||
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
|
||||
return fingerprints.resolveUriToFile(location, artifacts);
|
||||
}
|
||||
ava_1.default('resolveUriToFile', t => {
|
||||
// The resolveUriToFile method checks that the file exists and is in the right directory
|
||||
// so we need to give it real files to look at. We will use this file as an example.
|
||||
// For this to work we require the current working directory to be a parent, but this
|
||||
// should generally always be the case so this is fine.
|
||||
const cwd = process.cwd();
|
||||
const filepath = __filename;
|
||||
t.true(filepath.startsWith(cwd + '/'));
|
||||
const relativeFilepaht = filepath.substring(cwd.length + 1);
|
||||
process.env['GITHUB_WORKSPACE'] = cwd;
|
||||
// Absolute paths are unmodified
|
||||
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
|
||||
// Relative paths are made absolute
|
||||
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile('file://' + relativeFilepaht, undefined, []), filepath);
|
||||
// Absolute paths outside the src root are discarded
|
||||
t.is(testResolveUriToFile('/src/foo/bar.js', undefined, []), undefined);
|
||||
t.is(testResolveUriToFile('file:///src/foo/bar.js', undefined, []), undefined);
|
||||
// Other schemes are discarded
|
||||
t.is(testResolveUriToFile('https://' + filepath, undefined, []), undefined);
|
||||
t.is(testResolveUriToFile('ftp://' + filepath, undefined, []), undefined);
|
||||
// Invalid URIs are discarded
|
||||
t.is(testResolveUriToFile(1, undefined, []), undefined);
|
||||
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
|
||||
// Non-existant files are discarded
|
||||
t.is(testResolveUriToFile(filepath + '2', undefined, []), undefined);
|
||||
// Index is resolved
|
||||
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
|
||||
t.is(testResolveUriToFile(undefined, 1, ['foo', filepath]), filepath);
|
||||
// Invalid indexes are discarded
|
||||
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
|
||||
t.is(testResolveUriToFile(undefined, '0', [filepath]), undefined);
|
||||
});
|
||||
ava_1.default('addFingerprints', t => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.input.sarif').toString();
|
||||
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.expected.sarif').toString();
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||
});
|
||||
ava_1.default('missingRegions', t => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.input.sarif').toString();
|
||||
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.expected.sarif').toString();
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||
});
|
||||
//# sourceMappingURL=fingerprints.test.js.map
|
||||
File diff suppressed because one or more lines are too long
47
lib/setup-tools.js
generated
47
lib/setup-tools.js
generated
@@ -10,7 +10,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const path = __importStar(require("path"));
|
||||
const semver = __importStar(require("semver"));
|
||||
class CodeQLSetup {
|
||||
constructor(codeqlDist) {
|
||||
this.dist = codeqlDist;
|
||||
@@ -20,7 +19,7 @@ class CodeQLSetup {
|
||||
if (process.platform === 'win32') {
|
||||
this.platform = 'win64';
|
||||
if (this.cmd.endsWith('codeql')) {
|
||||
this.cmd += ".exe";
|
||||
this.cmd += ".cmd";
|
||||
}
|
||||
}
|
||||
else if (process.platform === 'linux') {
|
||||
@@ -36,41 +35,17 @@ class CodeQLSetup {
|
||||
}
|
||||
exports.CodeQLSetup = CodeQLSetup;
|
||||
async function setupCodeQL() {
|
||||
try {
|
||||
const codeqlURL = core.getInput('tools', { required: true });
|
||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL);
|
||||
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
}
|
||||
else {
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
|
||||
}
|
||||
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
||||
const version = '1.0.0';
|
||||
const codeqlURL = core.getInput('tools', { required: true });
|
||||
let codeqlFolder = toolcache.find('CodeQL', version);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
}
|
||||
catch (e) {
|
||||
core.error(e);
|
||||
throw new Error("Unable to download and extract CodeQL CLI");
|
||||
else {
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', version);
|
||||
}
|
||||
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
||||
}
|
||||
exports.setupCodeQL = setupCodeQL;
|
||||
function getCodeQLURLVersion(url) {
|
||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
||||
if (match === null || match.length < 2) {
|
||||
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
|
||||
}
|
||||
let version = match[1];
|
||||
if (!semver.valid(version)) {
|
||||
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||
version = '0.0.0-' + version;
|
||||
}
|
||||
const s = semver.clean(version);
|
||||
if (!s) {
|
||||
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
exports.getCodeQLURLVersion = getCodeQLURLVersion;
|
||||
//# sourceMappingURL=setup-tools.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"setup-tools.js","sourceRoot":"","sources":["../src/setup-tools.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,+DAAiD;AACjD,2CAA6B;AAC7B,+CAAiC;AAEjC,MAAa,WAAW;IAMtB,YAAY,UAAkB;QAC5B,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAC3C,4BAA4B;QAC5B,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;YACxB,IAAI,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBAC/B,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC;aACpB;SACF;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YACvC,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;SAC3B;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACxC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;SACzB;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;SAC7D;IACH,CAAC;CACF;AAxBD,kCAwBC;AAEM,KAAK,UAAU,WAAW;IAC/B,IAAI;QACF,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;QAC7D,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAExD,IAAI,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,gBAAgB,CAAC,CAAC;QAC9D,IAAI,YAAY,EAAE;YAChB,IAAI,CAAC,KAAK,CAAC,yBAAyB,YAAY,EAAE,CAAC,CAAC;SACrD;aAAM;YACL,MAAM,UAAU,GAAG,MAAM,SAAS,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,eAAe,GAAG,MAAM,SAAS,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC/D,YAAY,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,eAAe,EAAE,QAAQ,EAAE,gBAAgB,CAAC,CAAC;SACtF;QACD,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAE3D;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACd,MAAM,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC;KAC9D;AACH,CAAC;AAnBD,kCAmBC;AAED,SAAgB,mBAAmB,CAAC,GAAW;IAE7C,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,CAAC,wBAAwB,CAAC,CAAC;IAClD,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,wBAAwB,GAAG,iCAAiC,CAAC,CAAC;KAC/E;IAED,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IAEvB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE;QAC1B,IAAI,CAAC,KAAK,CAAC,kBAAkB,OAAO,gEAAgE,OAAO,GAAG,CAAC,CAAC;QAChH,OAAO,GAAG,QAAQ,GAAG,OAAO,CAAC;KAC9B;IAED,MAAM,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;IAChC,IAAI,CAAC,CAAC,EAAE;QACN,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,iDAAiD,OAAO,UAAU,CAAC,CAAC;KAC/G;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AApBD,kDAoBC"}
|
||||
60
lib/setup-tools.test.js
generated
60
lib/setup-tools.test.js
generated
@@ -1,60 +0,0 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const nock_1 = __importDefault(require("nock"));
|
||||
const path = __importStar(require("path"));
|
||||
const setupTools = __importStar(require("./setup-tools"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.silenceDebugOutput(ava_1.default);
|
||||
ava_1.default('download codeql bundle cache', async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
|
||||
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
|
||||
const versions = ['20200601', '20200610'];
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
const version = versions[i];
|
||||
nock_1.default('https://example.com')
|
||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||
await setupTools.setupCodeQL();
|
||||
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
|
||||
}
|
||||
const cachedVersions = toolcache.findAllVersions('CodeQL');
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
ava_1.default('parse codeql bundle url version', t => {
|
||||
const tests = {
|
||||
'20200601': '0.0.0-20200601',
|
||||
'20200601.0': '0.0.0-20200601.0',
|
||||
'20200601.0.0': '20200601.0.0',
|
||||
'1.2.3': '1.2.3',
|
||||
'1.2.3-alpha': '1.2.3-alpha',
|
||||
'1.2.3-beta.1': '1.2.3-beta.1',
|
||||
};
|
||||
for (const [version, expectedVersion] of Object.entries(tests)) {
|
||||
const url = `https://github.com/.../codeql-bundle-${version}/...`;
|
||||
try {
|
||||
const parsedVersion = setupTools.getCodeQLURLVersion(url);
|
||||
t.deepEqual(parsedVersion, expectedVersion);
|
||||
}
|
||||
catch (e) {
|
||||
t.fail(e.message);
|
||||
}
|
||||
}
|
||||
});
|
||||
//# sourceMappingURL=setup-tools.test.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"setup-tools.test.js","sourceRoot":"","sources":["../src/setup-tools.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,0DAA4C;AAC5C,mDAAmD;AACnD,6CAA+B;AAE/B,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,UAAU,CAAC,WAAW,EAAE,CAAC;YAE/B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,UAAU,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YAC1D,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC"}
|
||||
122
lib/setup-tracer.js
generated
122
lib/setup-tracer.js
generated
@@ -9,11 +9,13 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const setuptools = __importStar(require("./setup-tools"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util = __importStar(require("./util"));
|
||||
const CRITICAL_TRACER_VARS = new Set(['SEMMLE_PRELOAD_libtrace',
|
||||
,
|
||||
@@ -26,7 +28,12 @@ const CRITICAL_TRACER_VARS = new Set(['SEMMLE_PRELOAD_libtrace',
|
||||
'SEMMLE_JAVA_TOOL_OPTIONS'
|
||||
]);
|
||||
async function tracerConfig(codeql, database, compilerSpec) {
|
||||
const env = await codeql.getTracerEnv(database, compilerSpec);
|
||||
const compilerSpecArg = compilerSpec ? ["--compiler-spec=" + compilerSpec] : [];
|
||||
let envFile = path.resolve(database, 'working', 'env.tmp');
|
||||
await exec.exec(codeql.cmd, ['database', 'trace-command', database,
|
||||
...compilerSpecArg,
|
||||
process.execPath, path.resolve(__dirname, 'tracer-env.js'), envFile]);
|
||||
const env = JSON.parse(fs.readFileSync(envFile, 'utf-8'));
|
||||
const config = env['ODASA_TRACER_CONFIGURATION'];
|
||||
const info = { spec: config, env: {} };
|
||||
// Extract critical tracer variables from the environment
|
||||
@@ -56,7 +63,7 @@ function concatTracerConfigs(configs) {
|
||||
const env = {};
|
||||
let copyExecutables = false;
|
||||
let envSize = 0;
|
||||
for (const v of configs) {
|
||||
for (let v of Object.values(configs)) {
|
||||
for (let e of Object.entries(v.env)) {
|
||||
const name = e[0];
|
||||
const value = e[1];
|
||||
@@ -93,13 +100,12 @@ function concatTracerConfigs(configs) {
|
||||
totalCount += count;
|
||||
totalLines.push(...lines.slice(2));
|
||||
}
|
||||
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
|
||||
const spec = path.resolve(tempFolder, 'compound-spec');
|
||||
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
|
||||
const newLogFilePath = path.resolve(util.workspaceFolder(), 'compound-build-tracer.log');
|
||||
const spec = path.resolve(util.workspaceFolder(), 'compound-spec');
|
||||
const tempFolder = path.resolve(util.workspaceFolder(), 'compound-temp');
|
||||
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
|
||||
if (copyExecutables) {
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = tempFolder;
|
||||
envSize += 1;
|
||||
}
|
||||
fs.writeFileSync(spec, newSpecContent.join('\n'));
|
||||
@@ -119,51 +125,28 @@ function concatTracerConfigs(configs) {
|
||||
fs.writeFileSync(envPath, buffer);
|
||||
return { env, spec };
|
||||
}
|
||||
async function sendSuccessStatusReport(startedAt, config) {
|
||||
const statusReportBase = await util.createStatusReportBase('init', 'success', startedAt);
|
||||
const languages = config.languages.join(',');
|
||||
const workflowLanguages = core.getInput('languages', { required: false });
|
||||
const paths = (config.originalUserInput.paths || []).join(',');
|
||||
const pathsIgnore = (config.originalUserInput['paths-ignore'] || []).join(',');
|
||||
const disableDefaultQueries = config.originalUserInput['disable-default-queries'] ? languages : '';
|
||||
const queries = (config.originalUserInput.queries || []).map(q => q.uses).join(',');
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
languages: languages,
|
||||
workflow_languages: workflowLanguages,
|
||||
paths: paths,
|
||||
paths_ignore: pathsIgnore,
|
||||
disable_default_queries: disableDefaultQueries,
|
||||
queries: queries,
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let config;
|
||||
let codeql;
|
||||
try {
|
||||
util.prepareLocalRunEnvironment();
|
||||
if (!await util.sendStatusReport(await util.createStatusReportBase('init', 'starting', startedAt), true)) {
|
||||
if (util.should_abort('init', false) || !await util.reportActionStarting('init')) {
|
||||
return;
|
||||
}
|
||||
core.startGroup('Setup CodeQL tools');
|
||||
codeql = await codeql_1.setupCodeQL();
|
||||
await codeql.printVersion();
|
||||
core.endGroup();
|
||||
// The config file MUST be parsed in the init action
|
||||
const config = await configUtils.loadConfig();
|
||||
core.startGroup('Load language configuration');
|
||||
config = await configUtils.initConfig();
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
const languages = await util.getLanguages();
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
if (languages.length === 0) {
|
||||
core.setFailed("Did not detect any languages to analyze. Please update input in workflow.");
|
||||
return;
|
||||
}
|
||||
core.endGroup();
|
||||
}
|
||||
catch (e) {
|
||||
core.setFailed(e.message);
|
||||
console.log(e);
|
||||
await util.sendStatusReport(await util.createStatusReportBase('init', 'aborted', startedAt, e.message));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, languages);
|
||||
const sourceRoot = path.resolve();
|
||||
core.startGroup('Setup CodeQL tools');
|
||||
const codeqlSetup = await setuptools.setupCodeQL();
|
||||
await exec.exec(codeqlSetup.cmd, ['version', '--format=json']);
|
||||
core.endGroup();
|
||||
// Forward Go flags
|
||||
const goFlags = process.env['GOFLAGS'];
|
||||
if (goFlags) {
|
||||
@@ -173,52 +156,59 @@ async function run() {
|
||||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
||||
core.exportVariable('CODEQL_RAM', codeqlRam);
|
||||
const databaseFolder = util.getCodeQLDatabasesDir();
|
||||
fs.mkdirSync(databaseFolder, { recursive: true });
|
||||
let tracedLanguageConfigs = [];
|
||||
const databaseFolder = path.resolve(util.workspaceFolder(), 'codeql_databases');
|
||||
await io.mkdirP(databaseFolder);
|
||||
let tracedLanguages = {};
|
||||
let scannedLanguages = [];
|
||||
// TODO: replace this code once CodeQL supports multi-language tracing
|
||||
for (let language of config.languages) {
|
||||
for (let language of languages) {
|
||||
const languageDatabase = path.join(databaseFolder, language);
|
||||
// Init language database
|
||||
await codeql.databaseInit(languageDatabase, language, sourceRoot);
|
||||
await exec.exec(codeqlSetup.cmd, ['database', 'init', languageDatabase, '--language=' + language, '--source-root=' + sourceRoot]);
|
||||
// TODO: add better detection of 'traced languages' instead of using a hard coded list
|
||||
if (codeql_1.isTracedLanguage(language)) {
|
||||
const config = await tracerConfig(codeql, languageDatabase);
|
||||
tracedLanguageConfigs.push(config);
|
||||
if (['cpp', 'java', 'csharp'].includes(language)) {
|
||||
const config = await tracerConfig(codeqlSetup, languageDatabase);
|
||||
tracedLanguages[language] = config;
|
||||
}
|
||||
else {
|
||||
scannedLanguages.push(language);
|
||||
}
|
||||
}
|
||||
if (tracedLanguageConfigs.length > 0) {
|
||||
const mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs);
|
||||
const tracedLanguageKeys = Object.keys(tracedLanguages);
|
||||
if (tracedLanguageKeys.length > 0) {
|
||||
const mainTracerConfig = concatTracerConfigs(tracedLanguages);
|
||||
if (mainTracerConfig.spec) {
|
||||
for (let entry of Object.entries(mainTracerConfig.env)) {
|
||||
core.exportVariable(entry[0], entry[1]);
|
||||
}
|
||||
core.exportVariable('ODASA_TRACER_CONFIGURATION', mainTracerConfig.spec);
|
||||
if (process.platform === 'darwin') {
|
||||
core.exportVariable('DYLD_INSERT_LIBRARIES', path.join(codeql.getDir(), 'tools', 'osx64', 'libtrace.dylib'));
|
||||
core.exportVariable('DYLD_INSERT_LIBRARIES', path.join(codeqlSetup.tools, 'osx64', 'libtrace.dylib'));
|
||||
}
|
||||
else if (process.platform === 'win32') {
|
||||
await exec.exec('powershell', [
|
||||
path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
||||
path.resolve(codeql.getDir(), 'tools', 'win64', 'tracer.exe'),
|
||||
], { env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
|
||||
await exec.exec('powershell', [path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
||||
path.resolve(codeqlSetup.tools, 'win64', 'tracer.exe')], { env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
|
||||
}
|
||||
else {
|
||||
core.exportVariable('LD_PRELOAD', path.join(codeql.getDir(), 'tools', 'linux64', '${LIB}trace.so'));
|
||||
core.exportVariable('LD_PRELOAD', path.join(codeqlSetup.tools, 'linux64', '${LIB}trace.so'));
|
||||
}
|
||||
}
|
||||
}
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES, scannedLanguages.join(','));
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES, tracedLanguageKeys.join(','));
|
||||
// TODO: make this a "private" environment variable of the action
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_DATABASE_DIR, databaseFolder);
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_CMD, codeqlSetup.cmd);
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
console.log(error);
|
||||
await util.sendStatusReport(await util.createStatusReportBase('init', 'failure', startedAt, error.message, error.stack));
|
||||
await util.reportActionFailed('init', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
await sendSuccessStatusReport(startedAt, config);
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
await util.reportActionSucceeded('init');
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("init action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=setup-tracer.js.map
|
||||
File diff suppressed because one or more lines are too long
10
lib/shared-environment.js
generated
10
lib/shared-environment.js
generated
@@ -1,10 +1,16 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CODEQL_ACTION_CMD = 'CODEQL_ACTION_CMD';
|
||||
exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
|
||||
exports.CODEQL_ACTION_LANGUAGES = 'CODEQL_ACTION_LANGUAGES';
|
||||
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
|
||||
exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
|
||||
exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
||||
// The time at which the first action (normally init) started executing.
|
||||
// If a workflow invokes a different action without first invoking the init
|
||||
// action (i.e. the upload action is being used by a third-party integrator)
|
||||
// then this variable will be assigned the start time of the action invoked
|
||||
// rather that the init action.
|
||||
exports.CODEQL_WORKFLOW_STARTED_AT = 'CODEQL_WORKFLOW_STARTED_AT';
|
||||
//# sourceMappingURL=shared-environment.js.map
|
||||
exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
|
||||
// Populated when the init action completes successfully
|
||||
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"}
|
||||
22
lib/test-utils.js
generated
22
lib/test-utils.js
generated
@@ -1,22 +0,0 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
function silenceDebugOutput(test) {
|
||||
const typedTest = test;
|
||||
typedTest.beforeEach(t => {
|
||||
const processStdoutWrite = process.stdout.write.bind(process.stdout);
|
||||
t.context.write = processStdoutWrite;
|
||||
process.stdout.write = (str, encoding, cb) => {
|
||||
// Core library will directly call process.stdout.write for commands
|
||||
// We don't want :: commands to be executed by the runner during tests
|
||||
if (!str.match(/^::/)) {
|
||||
processStdoutWrite(str, encoding, cb);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
});
|
||||
typedTest.afterEach(t => {
|
||||
process.stdout.write = t.context.write;
|
||||
});
|
||||
}
|
||||
exports.silenceDebugOutput = silenceDebugOutput;
|
||||
//# sourceMappingURL=test-utils.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"test-utils.js","sourceRoot":"","sources":["../src/test-utils.ts"],"names":[],"mappings":";;AAEA,SAAgB,kBAAkB,CAAC,IAAwB;IACzD,MAAM,SAAS,GAAG,IAAmC,CAAC;IAEtD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACrB,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,KAAK,GAAG,kBAAkB,CAAC;QACrC,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,GAAQ,EAAE,QAAc,EAAE,EAA0B,EAAE,EAAE;YAC5E,oEAAoE;YACpE,sEAAsE;YACtE,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACnB,kBAAkB,CAAC,GAAG,EAAE,QAAQ,EAAE,EAAE,CAAC,CAAC;aACzC;YACD,OAAO,IAAI,CAAC;QAChB,CAAC,CAAC;IACN,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE;QACpB,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC;IAC3C,CAAC,CAAC,CAAC;AACL,CAAC;AAnBD,gDAmBC"}
|
||||
75
lib/testing-utils.js
generated
75
lib/testing-utils.js
generated
@@ -1,75 +0,0 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const sinon_1 = __importDefault(require("sinon"));
|
||||
const CodeQL = __importStar(require("./codeql"));
|
||||
function wrapOutput(context) {
|
||||
// Function signature taken from Socket.write.
|
||||
// Note there are two overloads:
|
||||
// write(buffer: Uint8Array | string, cb?: (err?: Error) => void): boolean;
|
||||
// write(str: Uint8Array | string, encoding?: string, cb?: (err?: Error) => void): boolean;
|
||||
return (chunk, encoding, cb) => {
|
||||
// Work out which method overload we are in
|
||||
if (cb === undefined && typeof encoding === 'function') {
|
||||
cb = encoding;
|
||||
encoding = undefined;
|
||||
}
|
||||
// Record the output
|
||||
if (typeof chunk === 'string') {
|
||||
context.testOutput += chunk;
|
||||
}
|
||||
else {
|
||||
context.testOutput += new TextDecoder(encoding || 'utf-8').decode(chunk);
|
||||
}
|
||||
// Satisfy contract by calling callback when done
|
||||
if (cb !== undefined && typeof cb === 'function') {
|
||||
cb();
|
||||
}
|
||||
return true;
|
||||
};
|
||||
}
|
||||
function setupTests(test) {
|
||||
const typedTest = test;
|
||||
typedTest.beforeEach(t => {
|
||||
// Set an empty CodeQL object so that all method calls will fail
|
||||
// unless the test explicitly sets one up.
|
||||
CodeQL.setCodeQL({});
|
||||
// Replace stdout and stderr so we can record output during tests
|
||||
t.context.testOutput = "";
|
||||
const processStdoutWrite = process.stdout.write.bind(process.stdout);
|
||||
t.context.stdoutWrite = processStdoutWrite;
|
||||
process.stdout.write = wrapOutput(t.context);
|
||||
const processStderrWrite = process.stderr.write.bind(process.stderr);
|
||||
t.context.stderrWrite = processStderrWrite;
|
||||
process.stderr.write = wrapOutput(t.context);
|
||||
// Many tests modify environment variables. Take a copy now so that
|
||||
// we reset them after the test to keep tests independent of each other.
|
||||
// process.env only has strings fields, so a shallow copy is fine.
|
||||
t.context.env = {};
|
||||
Object.assign(t.context.env, process.env);
|
||||
});
|
||||
typedTest.afterEach.always(t => {
|
||||
// Restore stdout and stderr
|
||||
// The captured output is only replayed if the test failed
|
||||
process.stdout.write = t.context.stdoutWrite;
|
||||
process.stderr.write = t.context.stderrWrite;
|
||||
if (!t.passed) {
|
||||
process.stdout.write(t.context.testOutput);
|
||||
}
|
||||
// Undo any modifications made by sinon
|
||||
sinon_1.default.restore();
|
||||
// Undo any modifications to the env
|
||||
process.env = t.context.env;
|
||||
});
|
||||
}
|
||||
exports.setupTests = setupTests;
|
||||
//# sourceMappingURL=testing-utils.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AACA,kDAA0B;AAE1B,iDAAmC;AAInC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CAAC,KAA0B,EAAE,QAAiB,EAAE,EAA0B,EAAW,EAAE;QAC5F,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACvB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;QAC7B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,eAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC"}
|
||||
1
lib/tracer-env.js
generated
1
lib/tracer-env.js
generated
@@ -18,4 +18,3 @@ for (let entry of Object.entries(process.env)) {
|
||||
}
|
||||
process.stdout.write(process.argv[2]);
|
||||
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');
|
||||
//# sourceMappingURL=tracer-env.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"tracer-env.js","sourceRoot":"","sources":["../src/tracer-env.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AAEzB,MAAM,GAAG,GAAG,EAAE,CAAC;AACf,KAAK,IAAI,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;IAC7C,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACrB,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACvB,IAAI,OAAO,KAAK,KAAK,WAAW,IAAI,GAAG,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;QACtF,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KAClB;CACF;AACD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,CAAC"}
|
||||
231
lib/upload-lib.js
generated
231
lib/upload-lib.js
generated
@@ -11,15 +11,28 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const auth = __importStar(require("@actions/http-client/auth"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const file_url_1 = __importDefault(require("file-url"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const jsonschema = __importStar(require("jsonschema"));
|
||||
const path = __importStar(require("path"));
|
||||
const zlib_1 = __importDefault(require("zlib"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const fingerprints = __importStar(require("./fingerprints"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util = __importStar(require("./util"));
|
||||
// Construct the location of the sentinel file for detecting multiple uploads.
|
||||
// The returned location should be writable.
|
||||
async function getSentinelFilePath() {
|
||||
// Use the temp dir instead of placing next to the sarif file because of
|
||||
// issues with docker actions. The directory containing the sarif file
|
||||
// may not be writable by us.
|
||||
const uploadsTmpDir = path.join(process.env['RUNNER_TEMP'] || '/tmp/codeql-action', 'uploads');
|
||||
await io.mkdirP(uploadsTmpDir);
|
||||
// Hash the absolute path so we'll behave correctly in the unlikely
|
||||
// scenario a file is referenced twice with different paths.
|
||||
return path.join(uploadsTmpDir, 'codeql-action-upload-sentinel');
|
||||
}
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
function combineSarifFiles(sarifFiles) {
|
||||
@@ -41,171 +54,87 @@ function combineSarifFiles(sarifFiles) {
|
||||
return JSON.stringify(combinedSarif);
|
||||
}
|
||||
exports.combineSarifFiles = combineSarifFiles;
|
||||
// Upload the given payload.
|
||||
// If the request fails then this will retry a small number of times.
|
||||
async function uploadPayload(payload) {
|
||||
core.info('Uploading results');
|
||||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env['TEST_MODE'] === 'true' || false;
|
||||
if (testMode) {
|
||||
return;
|
||||
}
|
||||
const [owner, repo] = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
||||
// Make up to 4 attempts to upload, and sleep for these
|
||||
// number of seconds between each attempt.
|
||||
// We don't want to backoff too much to avoid wasting action
|
||||
// minutes, but just waiting a little bit could maybe help.
|
||||
const backoffPeriods = [1, 5, 15];
|
||||
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
|
||||
const response = await api.getApiClient().request("PUT /repos/:owner/:repo/code-scanning/analysis", ({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
data: payload,
|
||||
}));
|
||||
core.debug('response status: ' + response.status);
|
||||
const statusCode = response.status;
|
||||
if (statusCode === 202) {
|
||||
core.info("Successfully uploaded results");
|
||||
return;
|
||||
}
|
||||
const requestID = response.headers["x-github-request-id"];
|
||||
// On any other status code that's not 5xx mark the upload as failed
|
||||
if (!statusCode || statusCode < 500 || statusCode >= 600) {
|
||||
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
}
|
||||
// On a 5xx status code we may retry the request
|
||||
if (attempt < backoffPeriods.length) {
|
||||
// Log the failure as a warning but don't mark the action as failed yet
|
||||
core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
|
||||
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
|
||||
' seconds: (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
// Sleep for the backoff period
|
||||
await new Promise(r => setTimeout(r, backoffPeriods[attempt] * 1000));
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
// If the upload fails with 5xx then we assume it is a temporary problem
|
||||
// and not an error that the user has caused or can fix.
|
||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
}
|
||||
}
|
||||
// This case shouldn't ever happen as the final iteration of the loop
|
||||
// will always throw an error instead of exiting to here.
|
||||
throw new Error('Upload failed');
|
||||
}
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function upload(input) {
|
||||
if (fs.lstatSync(input).isDirectory()) {
|
||||
const sarifFiles = fs.readdirSync(input)
|
||||
.filter(f => f.endsWith(".sarif"))
|
||||
.map(f => path.resolve(input, f));
|
||||
if (sarifFiles.length === 0) {
|
||||
throw new Error("No SARIF files found to upload in \"" + input + "\".");
|
||||
}
|
||||
return await uploadFiles(sarifFiles);
|
||||
await uploadFiles(sarifFiles);
|
||||
}
|
||||
else {
|
||||
return await uploadFiles([input]);
|
||||
await uploadFiles([input]);
|
||||
}
|
||||
}
|
||||
exports.upload = upload;
|
||||
// Counts the number of results in the given SARIF file
|
||||
function countResultsInSarif(sarif) {
|
||||
let numResults = 0;
|
||||
for (const run of JSON.parse(sarif).runs) {
|
||||
numResults += run.results.length;
|
||||
}
|
||||
return numResults;
|
||||
}
|
||||
exports.countResultsInSarif = countResultsInSarif;
|
||||
// Validates that the given file path refers to a valid SARIF file.
|
||||
// Throws an error if the file is invalid.
|
||||
function validateSarifFileSchema(sarifFilePath) {
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
|
||||
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
if (!result.valid) {
|
||||
// Output the more verbose error messages in groups as these may be very large.
|
||||
for (const error of result.errors) {
|
||||
core.startGroup("Error details: " + error.stack);
|
||||
core.info(JSON.stringify(error, null, 2));
|
||||
core.endGroup();
|
||||
}
|
||||
// Set the main error message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const sarifErrors = result.errors.map(e => "- " + e.stack);
|
||||
throw new Error("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + sarifErrors.join("\n"));
|
||||
}
|
||||
}
|
||||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles) {
|
||||
core.startGroup("Uploading results");
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
try {
|
||||
// Check if an upload has happened before. If so then abort.
|
||||
// This is intended to catch when the finish and upload-sarif actions
|
||||
// are used together, and then the upload-sarif action is invoked twice.
|
||||
const sentinelFile = await getSentinelFilePath();
|
||||
if (fs.existsSync(sentinelFile)) {
|
||||
core.info("Aborting as an upload has already happened from this job");
|
||||
return;
|
||||
}
|
||||
const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRequiredEnvParam('GITHUB_REF'); // it's in the form "refs/heads/master"
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
core.debug("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||
let checkoutPath = core.getInput('checkout_path');
|
||||
let checkoutURI = file_url_1.default(checkoutPath);
|
||||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
return;
|
||||
}
|
||||
let matrix = core.getInput('matrix');
|
||||
if (matrix === "null" || matrix === "") {
|
||||
matrix = undefined;
|
||||
}
|
||||
const payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
"checkout_uri": checkoutURI,
|
||||
"environment": matrix,
|
||||
"started_at": startedAt
|
||||
});
|
||||
core.info('Uploading results');
|
||||
const githubToken = core.getInput('token');
|
||||
const ph = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY'] + '/code-scanning/analysis';
|
||||
const res = await client.put(url, payload);
|
||||
const requestID = res.message.headers["x-github-request-id"];
|
||||
core.debug('response status: ' + res.message.statusCode);
|
||||
if (res.message.statusCode === 500) {
|
||||
// If the upload fails with 500 then we assume it is a temporary problem
|
||||
// with turbo-scan and not an error that the user has caused or can fix.
|
||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||
core.error('Upload failed (' + requestID + '): ' + await res.readBody());
|
||||
}
|
||||
else if (res.message.statusCode !== 202) {
|
||||
core.setFailed('Upload failed (' + requestID + '): ' + await res.readBody());
|
||||
}
|
||||
else {
|
||||
core.info("Successfully uploaded results");
|
||||
}
|
||||
// Mark that we have made an upload
|
||||
fs.writeFileSync(sentinelFile, '');
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
validateSarifFileSchema(file);
|
||||
}
|
||||
const commitOid = await util.getCommitOid();
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRef();
|
||||
const analysisKey = await util.getAnalysisKey();
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||
let checkoutPath = core.getInput('checkout_path');
|
||||
let checkoutURI = file_url_1.default(checkoutPath);
|
||||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
throw new Error('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
}
|
||||
let matrix = core.getInput('matrix');
|
||||
if (matrix === "null" || matrix === "") {
|
||||
matrix = undefined;
|
||||
}
|
||||
const toolNames = util.getToolNames(sarifPayload);
|
||||
const payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_key": analysisKey,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
"checkout_uri": checkoutURI,
|
||||
"environment": matrix,
|
||||
"started_at": startedAt,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
// Log some useful debug info about the info
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
core.debug("Raw upload size: " + rawUploadSizeBytes + " bytes");
|
||||
const zippedUploadSizeBytes = zipped_sarif.length;
|
||||
core.debug("Base64 zipped upload size: " + zippedUploadSizeBytes + " bytes");
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
core.debug("Number of results in upload: " + numResultInSarif);
|
||||
if (!util.isLocalRun()) {
|
||||
// Make the upload
|
||||
await uploadPayload(payload);
|
||||
}
|
||||
else {
|
||||
core.debug("Not uploading because this is a local run.");
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
}
|
||||
core.endGroup();
|
||||
return {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif,
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=upload-lib.js.map
|
||||
File diff suppressed because one or more lines are too long
25
lib/upload-lib.test.js
generated
25
lib/upload-lib.test.js
generated
@@ -1,25 +0,0 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const uploadLib = __importStar(require("./upload-lib"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default('validateSarifFileSchema - valid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
|
||||
t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile));
|
||||
});
|
||||
ava_1.default('validateSarifFileSchema - invalid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
|
||||
t.throws(() => uploadLib.validateSarifFileSchema(inputFile));
|
||||
});
|
||||
//# sourceMappingURL=upload-lib.test.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,mDAA2C;AAC3C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AAClE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/D,CAAC,CAAC,CAAC"}
|
||||
21
lib/upload-sarif.js
generated
21
lib/upload-sarif.js
generated
@@ -10,32 +10,21 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function sendSuccessStatusReport(startedAt, uploadStats) {
|
||||
const statusReportBase = await util.createStatusReportBase('upload-sarif', 'success', startedAt);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
...uploadStats,
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
if (!await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'starting', startedAt), true)) {
|
||||
if (util.should_abort('upload-sarif', false) || !await util.reportActionStarting('upload-sarif')) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const uploadStats = await upload_lib.upload(core.getInput('sarif_file'));
|
||||
await sendSuccessStatusReport(startedAt, uploadStats);
|
||||
await upload_lib.upload(core.getInput('sarif_file'));
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
console.log(error);
|
||||
await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'failure', startedAt, error.message, error.stack));
|
||||
await util.reportActionFailed('upload-sarif', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
await util.reportActionSucceeded('upload-sarif');
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("codeql/upload-sarif action failed: " + e);
|
||||
core.setFailed("upload-sarif action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=upload-sarif.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"upload-sarif.js","sourceRoot":"","sources":["../src/upload-sarif.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,yDAA2C;AAC3C,6CAA+B;AAI/B,KAAK,UAAU,uBAAuB,CAAC,SAAe,EAAE,WAA0C;IAChG,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,cAAc,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IACjG,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAI,WAAW;KAChB,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,cAAc,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;QAChH,OAAO;KACR;IAED,IAAI;QACF,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC,CAAC;QACzE,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KAEvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAC3D,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;QAChB,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
423
lib/util.js
generated
423
lib/util.js
generated
@@ -6,165 +6,159 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const auth = __importStar(require("@actions/http-client/auth"));
|
||||
const octokit = __importStar(require("@octokit/rest"));
|
||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||
const path = __importStar(require("path"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
/**
|
||||
* The API URL for github.com.
|
||||
* Should the current action be aborted?
|
||||
*
|
||||
* This method should be called at the start of all CodeQL actions and they
|
||||
* should abort cleanly if this returns true without failing the action.
|
||||
* This method will call `core.setFailed` if necessary.
|
||||
*/
|
||||
exports.GITHUB_DOTCOM_API_URL = "https://api.github.com";
|
||||
/**
|
||||
* Get the API URL for the GitHub instance we are connected to.
|
||||
* May be for github.com or for an enterprise instance.
|
||||
*/
|
||||
function getInstanceAPIURL() {
|
||||
return process.env["GITHUB_API_URL"] || exports.GITHUB_DOTCOM_API_URL;
|
||||
function should_abort(actionName, requireInitActionHasRun) {
|
||||
// Check that required aspects of the environment are present
|
||||
const ref = process.env['GITHUB_REF'];
|
||||
if (ref === undefined) {
|
||||
core.setFailed('GITHUB_REF must be set.');
|
||||
return true;
|
||||
}
|
||||
// Should abort if called on a merge commit for a pull request.
|
||||
if (ref.startsWith('refs/pull/')) {
|
||||
core.warning('The CodeQL ' + actionName + ' action is intended for workflows triggered on `push` events, '
|
||||
+ 'but the current workflow is running on a pull request. Aborting.');
|
||||
return true;
|
||||
}
|
||||
// If the init action is required, then check the it completed successfully.
|
||||
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
|
||||
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.getInstanceAPIURL = getInstanceAPIURL;
|
||||
exports.should_abort = should_abort;
|
||||
/**
|
||||
* Are we running against a GitHub Enterpise instance, as opposed to github.com.
|
||||
* Resolve the path to the workspace folder.
|
||||
*/
|
||||
function isEnterprise() {
|
||||
return getInstanceAPIURL() !== exports.GITHUB_DOTCOM_API_URL;
|
||||
function workspaceFolder() {
|
||||
let workspaceFolder = process.env['RUNNER_WORKSPACE'];
|
||||
if (!workspaceFolder)
|
||||
workspaceFolder = path.resolve('..');
|
||||
return workspaceFolder;
|
||||
}
|
||||
exports.isEnterprise = isEnterprise;
|
||||
exports.workspaceFolder = workspaceFolder;
|
||||
/**
|
||||
* Get an environment parameter, but throw an error if it is not set.
|
||||
*/
|
||||
function getRequiredEnvParam(paramName) {
|
||||
const value = process.env[paramName];
|
||||
if (value === undefined || value.length === 0) {
|
||||
if (value === undefined) {
|
||||
throw new Error(paramName + ' environment variable must be set');
|
||||
}
|
||||
core.debug(paramName + '=' + value);
|
||||
return value;
|
||||
}
|
||||
exports.getRequiredEnvParam = getRequiredEnvParam;
|
||||
function isLocalRun() {
|
||||
return !!process.env.CODEQL_LOCAL_RUN
|
||||
&& process.env.CODEQL_LOCAL_RUN !== 'false'
|
||||
&& process.env.CODEQL_LOCAL_RUN !== '0';
|
||||
}
|
||||
exports.isLocalRun = isLocalRun;
|
||||
/**
|
||||
* Ensures all required environment variables are set in the context of a local run.
|
||||
* Gets the set of languages in the current repository
|
||||
*/
|
||||
function prepareLocalRunEnvironment() {
|
||||
if (!isLocalRun()) {
|
||||
return;
|
||||
}
|
||||
core.debug('Action is running locally.');
|
||||
if (!process.env.GITHUB_JOB) {
|
||||
core.exportVariable('GITHUB_JOB', 'UNKNOWN-JOB');
|
||||
}
|
||||
}
|
||||
exports.prepareLocalRunEnvironment = prepareLocalRunEnvironment;
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
async function getCommitOid() {
|
||||
// Try to use git to get the current commit SHA. If that fails then
|
||||
// log but otherwise silently fall back to using the SHA from the environment.
|
||||
// The only time these two values will differ is during analysis of a PR when
|
||||
// the workflow has changed the current commit to the head commit instead of
|
||||
// the merge commit, which must mean that git is available.
|
||||
// Even if this does go wrong, it's not a huge problem for the alerts to
|
||||
// reported on the merge commit.
|
||||
try {
|
||||
let commitOid = '';
|
||||
await exec.exec('git', ['rev-parse', 'HEAD'], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => { commitOid += data.toString(); },
|
||||
stderr: (data) => { process.stderr.write(data); }
|
||||
}
|
||||
async function getLanguagesInRepo() {
|
||||
var _a;
|
||||
// Translate between GitHub's API names for languages and ours
|
||||
const codeqlLanguages = {
|
||||
'C': 'cpp',
|
||||
'C++': 'cpp',
|
||||
'C#': 'csharp',
|
||||
'Go': 'go',
|
||||
'Java': 'java',
|
||||
'JavaScript': 'javascript',
|
||||
'TypeScript': 'javascript',
|
||||
'Python': 'python',
|
||||
};
|
||||
let repo_nwo = (_a = process.env['GITHUB_REPOSITORY']) === null || _a === void 0 ? void 0 : _a.split("/");
|
||||
if (repo_nwo) {
|
||||
let owner = repo_nwo[0];
|
||||
let repo = repo_nwo[1];
|
||||
core.debug(`GitHub repo ${owner} ${repo}`);
|
||||
let ok = new octokit.Octokit({
|
||||
auth: core.getInput('token'),
|
||||
userAgent: "CodeQL Action",
|
||||
log: console_log_level_1.default({ level: "debug" })
|
||||
});
|
||||
return commitOid.trim();
|
||||
}
|
||||
catch (e) {
|
||||
core.info("Failed to call git to get current commit. Continuing with data from environment: " + e);
|
||||
return getRequiredEnvParam('GITHUB_SHA');
|
||||
}
|
||||
}
|
||||
exports.getCommitOid = getCommitOid;
|
||||
/**
|
||||
* Get the path of the currently executing workflow.
|
||||
*/
|
||||
async function getWorkflowPath() {
|
||||
if (isLocalRun()) {
|
||||
return 'LOCAL';
|
||||
}
|
||||
const repo_nwo = getRequiredEnvParam('GITHUB_REPOSITORY').split("/");
|
||||
const owner = repo_nwo[0];
|
||||
const repo = repo_nwo[1];
|
||||
const run_id = Number(getRequiredEnvParam('GITHUB_RUN_ID'));
|
||||
const apiClient = api.getApiClient();
|
||||
const runsResponse = await apiClient.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
|
||||
owner,
|
||||
repo,
|
||||
run_id
|
||||
});
|
||||
const workflowUrl = runsResponse.data.workflow_url;
|
||||
const workflowResponse = await apiClient.request('GET ' + workflowUrl);
|
||||
return workflowResponse.data.path;
|
||||
}
|
||||
/**
|
||||
* Get the analysis key paramter for the current job.
|
||||
*
|
||||
* This will combine the workflow path and current job name.
|
||||
* Computing this the first time requires making requests to
|
||||
* the github API, but after that the result will be cached.
|
||||
*/
|
||||
async function getAnalysisKey() {
|
||||
const analysisKeyEnvVar = 'CODEQL_ACTION_ANALYSIS_KEY';
|
||||
let analysisKey = process.env[analysisKeyEnvVar];
|
||||
if (analysisKey !== undefined) {
|
||||
return analysisKey;
|
||||
}
|
||||
const workflowPath = await getWorkflowPath();
|
||||
const jobName = getRequiredEnvParam('GITHUB_JOB');
|
||||
analysisKey = workflowPath + ':' + jobName;
|
||||
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
||||
return analysisKey;
|
||||
}
|
||||
exports.getAnalysisKey = getAnalysisKey;
|
||||
/**
|
||||
* Get the ref currently being analyzed.
|
||||
*/
|
||||
function getRef() {
|
||||
// Will be in the form "refs/heads/master" on a push event
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const ref = getRequiredEnvParam('GITHUB_REF');
|
||||
// For pull request refs we want to convert from the 'merge' ref
|
||||
// to the 'head' ref, as that is what we want to analyse.
|
||||
// There should have been some code earlier in the workflow to do
|
||||
// the checkout, but we have no way of verifying that here.
|
||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||
if (pull_ref_regex.test(ref)) {
|
||||
return ref.replace(pull_ref_regex, 'refs/pull/$1/head');
|
||||
const response = await ok.request("GET /repos/:owner/:repo/languages", ({
|
||||
owner,
|
||||
repo
|
||||
}));
|
||||
core.debug("Languages API response: " + JSON.stringify(response));
|
||||
// The GitHub API is going to return languages in order of popularity,
|
||||
// When we pick a language to autobuild we want to pick the most popular traced language
|
||||
// Since sets in javascript maintain insertion order, using a set here and then splatting it
|
||||
// into an array gives us an array of languages ordered by popularity
|
||||
let languages = new Set();
|
||||
for (let lang in response.data) {
|
||||
if (lang in codeqlLanguages) {
|
||||
languages.add(codeqlLanguages[lang]);
|
||||
}
|
||||
}
|
||||
return [...languages];
|
||||
}
|
||||
else {
|
||||
return ref;
|
||||
return [];
|
||||
}
|
||||
}
|
||||
exports.getRef = getRef;
|
||||
/**
|
||||
* Get the languages to analyse.
|
||||
*
|
||||
* The result is obtained from the environment parameter CODEQL_ACTION_LANGUAGES
|
||||
* if that has been set, otherwise it is obtained from the action input parameter
|
||||
* 'languages' if that has been set, otherwise it is deduced as all languages in the
|
||||
* repo that can be analysed.
|
||||
*
|
||||
* If the languages are obtained from either of the second choices, the
|
||||
* CODEQL_ACTION_LANGUAGES environment variable will be exported with the
|
||||
* deduced list.
|
||||
*/
|
||||
async function getLanguages() {
|
||||
// Obtain from CODEQL_ACTION_LANGUAGES if set
|
||||
const langsVar = process.env[sharedEnv.CODEQL_ACTION_LANGUAGES];
|
||||
if (langsVar) {
|
||||
return langsVar.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
}
|
||||
// Obtain from action input 'languages' if set
|
||||
let languages = core.getInput('languages', { required: false })
|
||||
.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
core.info("Languages from configuration: " + JSON.stringify(languages));
|
||||
if (languages.length === 0) {
|
||||
// Obtain languages as all languages in the repo that can be analysed
|
||||
languages = await getLanguagesInRepo();
|
||||
core.info("Automatically detected languages: " + JSON.stringify(languages));
|
||||
}
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_LANGUAGES, languages.join(','));
|
||||
return languages;
|
||||
}
|
||||
exports.getLanguages = getLanguages;
|
||||
/**
|
||||
* Compose a StatusReport.
|
||||
*
|
||||
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
||||
* @param status The status. Must be 'success', 'failure', or 'starting'
|
||||
* @param startedAt The time this action started executing.
|
||||
* @param cause Cause of failure (only supply if status is 'failure')
|
||||
* @param exception Exception (only supply if status is 'failure')
|
||||
*/
|
||||
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
|
||||
async function createStatusReport(actionName, status, cause, exception) {
|
||||
const commitOid = process.env['GITHUB_SHA'] || '';
|
||||
const ref = getRef();
|
||||
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
|
||||
let workflowRunID = -1;
|
||||
if (workflowRunIDStr) {
|
||||
@@ -172,23 +166,18 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
||||
}
|
||||
const workflowName = process.env['GITHUB_WORKFLOW'] || '';
|
||||
const jobName = process.env['GITHUB_JOB'] || '';
|
||||
const analysis_key = await getAnalysisKey();
|
||||
let workflowStartedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
|
||||
if (workflowStartedAt === undefined) {
|
||||
workflowStartedAt = actionStartedAt.toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||
}
|
||||
const languages = (await getLanguages()).sort().join(',');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT] || new Date().toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_STARTED_AT, startedAt);
|
||||
let statusReport = {
|
||||
workflow_run_id: workflowRunID,
|
||||
workflow_name: workflowName,
|
||||
job_name: jobName,
|
||||
analysis_key: analysis_key,
|
||||
languages: languages,
|
||||
commit_oid: commitOid,
|
||||
ref: ref,
|
||||
action_name: actionName,
|
||||
action_oid: "unknown",
|
||||
started_at: workflowStartedAt,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
started_at: startedAt,
|
||||
status: status
|
||||
};
|
||||
// Add optional parameters
|
||||
@@ -198,7 +187,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
||||
if (exception) {
|
||||
statusReport.exception = exception;
|
||||
}
|
||||
if (status === 'success' || status === 'failure' || status === 'aborted') {
|
||||
if (status === 'success' || status === 'failure') {
|
||||
statusReport.completed_at = new Date().toISOString();
|
||||
}
|
||||
let matrix = core.getInput('matrix');
|
||||
@@ -207,139 +196,69 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
||||
}
|
||||
return statusReport;
|
||||
}
|
||||
exports.createStatusReportBase = createStatusReportBase;
|
||||
/**
|
||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||
*
|
||||
* Optionally checks the response from the API endpoint and sets the action
|
||||
* as failed if the status report failed. This is only expected to be used
|
||||
* when sending a 'starting' report.
|
||||
*
|
||||
* Returns whether sending the status report was successful of not.
|
||||
* Returns the status code of the response to the status request, or
|
||||
* undefined if the given statusReport is undefined or no response was
|
||||
* received.
|
||||
*/
|
||||
async function sendStatusReport(statusReport, ignoreFailures) {
|
||||
if (isEnterprise()) {
|
||||
core.debug("Not sending status report to GitHub Enterprise");
|
||||
return true;
|
||||
}
|
||||
if (isLocalRun()) {
|
||||
core.debug("Not sending status report because this is a local run");
|
||||
return true;
|
||||
}
|
||||
async function sendStatusReport(statusReport) {
|
||||
var _a;
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
core.debug('Sending status report: ' + statusReportJSON);
|
||||
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
|
||||
const [owner, repo] = nwo.split("/");
|
||||
const statusResponse = await api.getApiClient().request('PUT /repos/:owner/:repo/code-scanning/analysis/status', {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
data: statusReportJSON,
|
||||
});
|
||||
if (!ignoreFailures) {
|
||||
// If the status report request fails with a 403 or a 404, then this is a deliberate
|
||||
// message from the endpoint that the SARIF upload can be expected to fail too,
|
||||
// so the action should fail to avoid wasting actions minutes.
|
||||
//
|
||||
// Other failure responses (or lack thereof) could be transitory and should not
|
||||
// cause the action to fail.
|
||||
if (statusResponse.status === 403) {
|
||||
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
|
||||
return false;
|
||||
}
|
||||
if (statusResponse.status === 404) {
|
||||
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
|
||||
return false;
|
||||
}
|
||||
const githubToken = core.getInput('token');
|
||||
const ph = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Status Report', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY']
|
||||
+ '/code-scanning/analysis/status';
|
||||
const res = await client.put(url, statusReportJSON);
|
||||
return (_a = res.message) === null || _a === void 0 ? void 0 : _a.statusCode;
|
||||
}
|
||||
/**
|
||||
* Send a status report that an action is starting.
|
||||
*
|
||||
* If the action is `init` then this also records the start time in the environment,
|
||||
* and ensures that the analysed languages are also recorded in the envirenment.
|
||||
*
|
||||
* Returns true unless a problem occurred and the action should abort.
|
||||
*/
|
||||
async function reportActionStarting(action) {
|
||||
const statusCode = await sendStatusReport(await createStatusReport(action, 'starting'));
|
||||
// If the status report request fails with a 403 or a 404, then this is a deliberate
|
||||
// message from the endpoint that the SARIF upload can be expected to fail too,
|
||||
// so the action should fail to avoid wasting actions minutes.
|
||||
//
|
||||
// Other failure responses (or lack thereof) could be transitory and should not
|
||||
// cause the action to fail.
|
||||
if (statusCode === 403) {
|
||||
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
|
||||
return false;
|
||||
}
|
||||
if (statusCode === 404) {
|
||||
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
exports.sendStatusReport = sendStatusReport;
|
||||
exports.reportActionStarting = reportActionStarting;
|
||||
/**
|
||||
* Get the array of all the tool names contained in the given sarif contents.
|
||||
* Report that an action has failed.
|
||||
*
|
||||
* Returns an array of unique string tool names.
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
function getToolNames(sarifContents) {
|
||||
const sarif = JSON.parse(sarifContents);
|
||||
const toolNames = {};
|
||||
for (const run of sarif.runs || []) {
|
||||
const tool = run.tool || {};
|
||||
const driver = tool.driver || {};
|
||||
if (typeof driver.name === "string" && driver.name.length > 0) {
|
||||
toolNames[driver.name] = true;
|
||||
}
|
||||
}
|
||||
return Object.keys(toolNames);
|
||||
async function reportActionFailed(action, cause, exception) {
|
||||
await sendStatusReport(await createStatusReport(action, 'failure', cause, exception));
|
||||
}
|
||||
exports.getToolNames = getToolNames;
|
||||
// Creates a random temporary directory, runs the given body, and then deletes the directory.
|
||||
// Mostly intended for use within tests.
|
||||
async function withTmpDir(body) {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
|
||||
const realSubdir = path.join(tmpDir, 'real');
|
||||
fs.mkdirSync(realSubdir);
|
||||
const symlinkSubdir = path.join(tmpDir, 'symlink');
|
||||
fs.symlinkSync(realSubdir, symlinkSubdir, 'dir');
|
||||
const result = await body(symlinkSubdir);
|
||||
fs.rmdirSync(tmpDir, { recursive: true });
|
||||
return result;
|
||||
}
|
||||
exports.withTmpDir = withTmpDir;
|
||||
exports.reportActionFailed = reportActionFailed;
|
||||
/**
|
||||
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
||||
* specified, the total available memory will be used minus 256 MB.
|
||||
* Report that an action has succeeded.
|
||||
*
|
||||
* @returns string
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
function getMemoryFlag() {
|
||||
let memoryToUseMegaBytes;
|
||||
const memoryToUseString = core.getInput("ram");
|
||||
if (memoryToUseString) {
|
||||
memoryToUseMegaBytes = Number(memoryToUseString);
|
||||
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
|
||||
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
|
||||
}
|
||||
}
|
||||
else {
|
||||
const totalMemoryBytes = os.totalmem();
|
||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||
const systemReservedMemoryMegaBytes = 256;
|
||||
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
|
||||
}
|
||||
return "--ram=" + Math.floor(memoryToUseMegaBytes);
|
||||
async function reportActionSucceeded(action) {
|
||||
await sendStatusReport(await createStatusReport(action, 'success'));
|
||||
}
|
||||
exports.getMemoryFlag = getMemoryFlag;
|
||||
/**
|
||||
* Get the codeql `--threads` value specified for the `threads` input. The value
|
||||
* defaults to 1. The value will be capped to the number of available CPUs.
|
||||
*
|
||||
* @returns string
|
||||
*/
|
||||
function getThreadsFlag() {
|
||||
let numThreads = 1;
|
||||
const numThreadsString = core.getInput("threads");
|
||||
if (numThreadsString) {
|
||||
numThreads = Number(numThreadsString);
|
||||
if (Number.isNaN(numThreads)) {
|
||||
throw new Error(`Invalid threads setting "${numThreadsString}", specified.`);
|
||||
}
|
||||
const maxThreads = os.cpus().length;
|
||||
if (numThreads > maxThreads) {
|
||||
numThreads = maxThreads;
|
||||
}
|
||||
const minThreads = -maxThreads;
|
||||
if (numThreads < minThreads) {
|
||||
numThreads = minThreads;
|
||||
}
|
||||
}
|
||||
return `--threads=${numThreads}`;
|
||||
}
|
||||
exports.getThreadsFlag = getThreadsFlag;
|
||||
/**
|
||||
* Get the directory where CodeQL databases should be placed.
|
||||
*/
|
||||
function getCodeQLDatabasesDir() {
|
||||
return path.resolve(getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
|
||||
}
|
||||
exports.getCodeQLDatabasesDir = getCodeQLDatabasesDir;
|
||||
//# sourceMappingURL=util.js.map
|
||||
exports.reportActionSucceeded = reportActionSucceeded;
|
||||
|
||||
File diff suppressed because one or more lines are too long
95
lib/util.test.js
generated
95
lib/util.test.js
generated
@@ -1,95 +0,0 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default('getToolNames', t => {
|
||||
const input = fs.readFileSync(__dirname + '/../src/testdata/tool-names.sarif', 'utf8');
|
||||
const toolNames = util.getToolNames(input);
|
||||
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
|
||||
});
|
||||
ava_1.default('getMemoryFlag() should return the correct --ram flag', t => {
|
||||
const totalMem = Math.floor(os.totalmem() / (1024 * 1024));
|
||||
const tests = {
|
||||
"": `--ram=${totalMem - 256}`,
|
||||
"512": "--ram=512",
|
||||
};
|
||||
for (const [input, expectedFlag] of Object.entries(tests)) {
|
||||
process.env['INPUT_RAM'] = input;
|
||||
const flag = util.getMemoryFlag();
|
||||
t.deepEqual(flag, expectedFlag);
|
||||
}
|
||||
});
|
||||
ava_1.default('getMemoryFlag() throws if the ram input is < 0 or NaN', t => {
|
||||
for (const input of ["-1", "hello!"]) {
|
||||
process.env['INPUT_RAM'] = input;
|
||||
t.throws(util.getMemoryFlag);
|
||||
}
|
||||
});
|
||||
ava_1.default('getThreadsFlag() should return the correct --threads flag', t => {
|
||||
const numCpus = os.cpus().length;
|
||||
const tests = {
|
||||
"0": "--threads=0",
|
||||
"1": "--threads=1",
|
||||
[`${numCpus + 1}`]: `--threads=${numCpus}`,
|
||||
[`${-numCpus - 1}`]: `--threads=${-numCpus}`
|
||||
};
|
||||
for (const [input, expectedFlag] of Object.entries(tests)) {
|
||||
process.env['INPUT_THREADS'] = input;
|
||||
const flag = util.getThreadsFlag();
|
||||
t.deepEqual(flag, expectedFlag);
|
||||
}
|
||||
});
|
||||
ava_1.default('getThreadsFlag() throws if the threads input is not an integer', t => {
|
||||
process.env['INPUT_THREADS'] = "hello!";
|
||||
t.throws(util.getThreadsFlag);
|
||||
});
|
||||
ava_1.default('getRef() throws on the empty string', t => {
|
||||
process.env["GITHUB_REF"] = "";
|
||||
t.throws(util.getRef);
|
||||
});
|
||||
ava_1.default('isLocalRun() runs correctly', t => {
|
||||
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
|
||||
process.env.CODEQL_LOCAL_RUN = '';
|
||||
t.assert(!util.isLocalRun());
|
||||
process.env.CODEQL_LOCAL_RUN = 'false';
|
||||
t.assert(!util.isLocalRun());
|
||||
process.env.CODEQL_LOCAL_RUN = '0';
|
||||
t.assert(!util.isLocalRun());
|
||||
process.env.CODEQL_LOCAL_RUN = 'true';
|
||||
t.assert(util.isLocalRun());
|
||||
process.env.CODEQL_LOCAL_RUN = 'hucairz';
|
||||
t.assert(util.isLocalRun());
|
||||
process.env.CODEQL_LOCAL_RUN = origLocalRun;
|
||||
});
|
||||
ava_1.default('prepareEnvironment() when a local run', t => {
|
||||
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
|
||||
process.env.CODEQL_LOCAL_RUN = 'false';
|
||||
process.env.GITHUB_JOB = 'YYY';
|
||||
util.prepareLocalRunEnvironment();
|
||||
// unchanged
|
||||
t.deepEqual(process.env.GITHUB_JOB, 'YYY');
|
||||
process.env.CODEQL_LOCAL_RUN = 'true';
|
||||
util.prepareLocalRunEnvironment();
|
||||
// unchanged
|
||||
t.deepEqual(process.env.GITHUB_JOB, 'YYY');
|
||||
process.env.GITHUB_JOB = '';
|
||||
util.prepareLocalRunEnvironment();
|
||||
// updated
|
||||
t.deepEqual(process.env.GITHUB_JOB, 'UNKNOWN-JOB');
|
||||
process.env.CODEQL_LOCAL_RUN = origLocalRun;
|
||||
});
|
||||
//# sourceMappingURL=util.test.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"util.test.js","sourceRoot":"","sources":["../src/util.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,uCAAyB;AAEzB,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACvB,MAAM,KAAK,GAAG,EAAE,CAAC,YAAY,CAAC,SAAS,GAAG,mCAAmC,EAAE,MAAM,CAAC,CAAC;IACvF,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,+BAA+B,EAAE,QAAQ,CAAC,CAAC,CAAC;AACtE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,sDAAsD,EAAE,CAAC,CAAC,EAAE;IAE/D,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC;IAE3D,MAAM,KAAK,GAAG;QACZ,EAAE,EAAE,SAAS,QAAQ,GAAG,GAAG,EAAE;QAC7B,KAAK,EAAE,WAAW;KACnB,CAAC;IAEF,KAAK,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAEzD,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC;QAEjC,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,EAAE,CAAC;QAClC,CAAC,CAAC,SAAS,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;KACjC;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,uDAAuD,EAAE,CAAC,CAAC,EAAE;IAChE,KAAK,MAAM,KAAK,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,EAAE;QACpC,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC;QACjC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;KAC9B;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,2DAA2D,EAAE,CAAC,CAAC,EAAE;IAEpE,MAAM,OAAO,GAAG,EAAE,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC;IAEjC,MAAM,KAAK,GAAG;QACZ,GAAG,EAAE,aAAa;QAClB,GAAG,EAAE,aAAa;QAClB,CAAC,GAAG,OAAO,GAAG,CAAC,EAAE,CAAC,EAAE,aAAa,OAAO,EAAE;QAC1C,CAAC,GAAG,CAAC,OAAO,GAAG,CAAC,EAAE,CAAC,EAAE,aAAa,CAAC,OAAO,EAAE;KAC7C,CAAC;IAEF,KAAK,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAEzD,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,GAAG,KAAK,CAAC;QAErC,MAAM,IAAI,GAAG,IAAI,CAAC,cAAc,EAAE,CAAC;QACnC,CAAC,CAAC,SAAS,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;KACjC;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,gEAAgE,EAAE,CAAC,CAAC,EAAE;IACzE,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,GAAG,QAAQ,CAAC;IACxC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;AAChC,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE;IAC9C,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC;IAC/B,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACxB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,6BAA6B,EAAE,CAAC,CAAC,EAAE;IACtC,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC;IAElD,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,EAAE,CAAC;IAClC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAE7B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,OAAO,CAAC;IACvC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAE7B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,GAAG,CAAC;IACnC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAE7B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,MAAM,CAAC;IACtC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAE5B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,SAAS,CAAC;IACzC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAE5B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,YAAY,CAAC;AAC9C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,uCAAuC,EAAE,CAAC,CAAC,EAAE;IAChD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC;IAElD,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,OAAO,CAAC;IACvC,OAAO,CAAC,GAAG,CAAC,UAAU,GAAG,KAAK,CAAC;IAE/B,IAAI,CAAC,0BAA0B,EAAE,CAAC;IAElC,YAAY;IACZ,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;IAE3C,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,MAAM,CAAC;IAEtC,IAAI,CAAC,0BAA0B,EAAE,CAAC;IAElC,YAAY;IACZ,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;IAE3C,OAAO,CAAC,GAAG,CAAC,UAAU,GAAG,EAAE,CAAC;IAE5B,IAAI,CAAC,0BAA0B,EAAE,CAAC;IAElC,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAEnD,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,YAAY,CAAC;AAC9C,CAAC,CAAC,CAAC"}
|
||||
1
node_modules/.bin/atob
generated
vendored
Symbolic link
1
node_modules/.bin/atob
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../atob/bin/atob.js
|
||||
1
node_modules/.bin/ava
generated
vendored
1
node_modules/.bin/ava
generated
vendored
@@ -1 +0,0 @@
|
||||
../ava/cli.js
|
||||
1
node_modules/.bin/escodegen
generated
vendored
Symbolic link
1
node_modules/.bin/escodegen
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../escodegen/bin/escodegen.js
|
||||
1
node_modules/.bin/esgenerate
generated
vendored
Symbolic link
1
node_modules/.bin/esgenerate
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../escodegen/bin/esgenerate.js
|
||||
1
node_modules/.bin/esparse
generated
vendored
Symbolic link
1
node_modules/.bin/esparse
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../esprima/bin/esparse.js
|
||||
1
node_modules/.bin/esvalidate
generated
vendored
Symbolic link
1
node_modules/.bin/esvalidate
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../esprima/bin/esvalidate.js
|
||||
1
node_modules/.bin/jest
generated
vendored
Symbolic link
1
node_modules/.bin/jest
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../jest/bin/jest.js
|
||||
1
node_modules/.bin/jest-runtime
generated
vendored
Symbolic link
1
node_modules/.bin/jest-runtime
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../jest-runtime/bin/jest-runtime.js
|
||||
1
node_modules/.bin/jsesc
generated
vendored
Symbolic link
1
node_modules/.bin/jsesc
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../jsesc/bin/jsesc
|
||||
1
node_modules/.bin/json5
generated
vendored
Symbolic link
1
node_modules/.bin/json5
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../json5/lib/cli.js
|
||||
1
node_modules/.bin/parser
generated
vendored
Symbolic link
1
node_modules/.bin/parser
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../@babel/parser/bin/babel-parser.js
|
||||
1
node_modules/.bin/rc
generated
vendored
1
node_modules/.bin/rc
generated
vendored
@@ -1 +0,0 @@
|
||||
../rc/cli.js
|
||||
1
node_modules/.bin/removeNPMAbsolutePaths
generated
vendored
1
node_modules/.bin/removeNPMAbsolutePaths
generated
vendored
@@ -1 +0,0 @@
|
||||
../removeNPMAbsolutePaths/bin/removeNPMAbsolutePaths
|
||||
1
node_modules/.bin/sane
generated
vendored
Symbolic link
1
node_modules/.bin/sane
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../sane/src/cli.js
|
||||
2
node_modules/.bin/semver
generated
vendored
2
node_modules/.bin/semver
generated
vendored
@@ -1 +1 @@
|
||||
../semver/bin/semver.js
|
||||
../semver/bin/semver
|
||||
1
node_modules/.bin/sshpk-conv
generated
vendored
Symbolic link
1
node_modules/.bin/sshpk-conv
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../sshpk/bin/sshpk-conv
|
||||
1
node_modules/.bin/sshpk-sign
generated
vendored
Symbolic link
1
node_modules/.bin/sshpk-sign
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../sshpk/bin/sshpk-sign
|
||||
1
node_modules/.bin/sshpk-verify
generated
vendored
Symbolic link
1
node_modules/.bin/sshpk-verify
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../sshpk/bin/sshpk-verify
|
||||
1
node_modules/.bin/ts-jest
generated
vendored
Symbolic link
1
node_modules/.bin/ts-jest
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../ts-jest/cli.js
|
||||
1
node_modules/.bin/watch
generated
vendored
Symbolic link
1
node_modules/.bin/watch
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../@cnakazawa/watch/cli.js
|
||||
74
node_modules/@actions/github/README.md
generated
vendored
74
node_modules/@actions/github/README.md
generated
vendored
@@ -1,74 +0,0 @@
|
||||
# `@actions/github`
|
||||
|
||||
> A hydrated Octokit client.
|
||||
|
||||
## Usage
|
||||
|
||||
Returns an authenticated Octokit client that follows the machine [proxy settings](https://help.github.com/en/actions/hosting-your-own-runners/using-a-proxy-server-with-self-hosted-runners). See https://octokit.github.io/rest.js for the API.
|
||||
|
||||
```js
|
||||
const github = require('@actions/github');
|
||||
const core = require('@actions/core');
|
||||
|
||||
async function run() {
|
||||
// This should be a token with access to your repository scoped in as a secret.
|
||||
// The YML workflow will need to set myToken with the GitHub Secret Token
|
||||
// myToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
// https://help.github.com/en/actions/automating-your-workflow-with-github-actions/authenticating-with-the-github_token#about-the-github_token-secret
|
||||
const myToken = core.getInput('myToken');
|
||||
|
||||
const octokit = new github.GitHub(myToken);
|
||||
|
||||
const { data: pullRequest } = await octokit.pulls.get({
|
||||
owner: 'octokit',
|
||||
repo: 'rest.js',
|
||||
pull_number: 123,
|
||||
mediaType: {
|
||||
format: 'diff'
|
||||
}
|
||||
});
|
||||
|
||||
console.log(pullRequest);
|
||||
}
|
||||
|
||||
run();
|
||||
```
|
||||
|
||||
You can pass client options, as specified by [Octokit](https://octokit.github.io/rest.js/), as a second argument to the `GitHub` constructor.
|
||||
|
||||
You can also make GraphQL requests. See https://github.com/octokit/graphql.js for the API.
|
||||
|
||||
```js
|
||||
const result = await octokit.graphql(query, variables);
|
||||
```
|
||||
|
||||
Finally, you can get the context of the current action:
|
||||
|
||||
```js
|
||||
const github = require('@actions/github');
|
||||
|
||||
const context = github.context;
|
||||
|
||||
const newIssue = await octokit.issues.create({
|
||||
...context.repo,
|
||||
title: 'New issue!',
|
||||
body: 'Hello Universe!'
|
||||
});
|
||||
```
|
||||
|
||||
## Webhook payload typescript definitions
|
||||
|
||||
The npm module `@octokit/webhooks` provides type definitions for the response payloads. You can cast the payload to these types for better type information.
|
||||
|
||||
First, install the npm module `npm install @octokit/webhooks`
|
||||
|
||||
Then, assert the type based on the eventName
|
||||
```ts
|
||||
import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import * as Webhooks from '@octokit/webhooks'
|
||||
if (github.context.eventName === 'push') {
|
||||
const pushPayload = github.context.payload as Webhooks.WebhookPayloadPush
|
||||
core.info(`The head commit is: ${pushPayload.head}`)
|
||||
}
|
||||
```
|
||||
26
node_modules/@actions/github/lib/context.d.ts
generated
vendored
26
node_modules/@actions/github/lib/context.d.ts
generated
vendored
@@ -1,26 +0,0 @@
|
||||
import { WebhookPayload } from './interfaces';
|
||||
export declare class Context {
|
||||
/**
|
||||
* Webhook payload object that triggered the workflow
|
||||
*/
|
||||
payload: WebhookPayload;
|
||||
eventName: string;
|
||||
sha: string;
|
||||
ref: string;
|
||||
workflow: string;
|
||||
action: string;
|
||||
actor: string;
|
||||
/**
|
||||
* Hydrate the context from the environment
|
||||
*/
|
||||
constructor();
|
||||
get issue(): {
|
||||
owner: string;
|
||||
repo: string;
|
||||
number: number;
|
||||
};
|
||||
get repo(): {
|
||||
owner: string;
|
||||
repo: string;
|
||||
};
|
||||
}
|
||||
46
node_modules/@actions/github/lib/context.js
generated
vendored
46
node_modules/@actions/github/lib/context.js
generated
vendored
@@ -1,46 +0,0 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = require("fs");
|
||||
const os_1 = require("os");
|
||||
class Context {
|
||||
/**
|
||||
* Hydrate the context from the environment
|
||||
*/
|
||||
constructor() {
|
||||
this.payload = {};
|
||||
if (process.env.GITHUB_EVENT_PATH) {
|
||||
if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
|
||||
this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
|
||||
}
|
||||
else {
|
||||
const path = process.env.GITHUB_EVENT_PATH;
|
||||
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
|
||||
}
|
||||
}
|
||||
this.eventName = process.env.GITHUB_EVENT_NAME;
|
||||
this.sha = process.env.GITHUB_SHA;
|
||||
this.ref = process.env.GITHUB_REF;
|
||||
this.workflow = process.env.GITHUB_WORKFLOW;
|
||||
this.action = process.env.GITHUB_ACTION;
|
||||
this.actor = process.env.GITHUB_ACTOR;
|
||||
}
|
||||
get issue() {
|
||||
const payload = this.payload;
|
||||
return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
|
||||
}
|
||||
get repo() {
|
||||
if (process.env.GITHUB_REPOSITORY) {
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
|
||||
return { owner, repo };
|
||||
}
|
||||
if (this.payload.repository) {
|
||||
return {
|
||||
owner: this.payload.repository.owner.login,
|
||||
repo: this.payload.repository.name
|
||||
};
|
||||
}
|
||||
throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'");
|
||||
}
|
||||
}
|
||||
exports.Context = Context;
|
||||
//# sourceMappingURL=context.js.map
|
||||
1
node_modules/@actions/github/lib/context.js.map
generated
vendored
1
node_modules/@actions/github/lib/context.js.map
generated
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"context.js","sourceRoot":"","sources":["../src/context.ts"],"names":[],"mappings":";;AAEA,2BAA2C;AAC3C,2BAAsB;AAEtB,MAAa,OAAO;IAalB;;OAEG;IACH;QACE,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;QACjB,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,IAAI,eAAU,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;gBAC7C,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,KAAK,CACvB,iBAAY,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,EAAC,QAAQ,EAAE,MAAM,EAAC,CAAC,CAChE,CAAA;aACF;iBAAM;gBACL,MAAM,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAA;gBAC1C,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,qBAAqB,IAAI,kBAAkB,QAAG,EAAE,CAAC,CAAA;aACvE;SACF;QACD,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,iBAA2B,CAAA;QACxD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,eAAyB,CAAA;QACrD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,aAAuB,CAAA;QACjD,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,YAAsB,CAAA;IACjD,CAAC;IAED,IAAI,KAAK;QACP,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAA;QAE5B,uCACK,IAAI,CAAC,IAAI,KACZ,MAAM,EAAE,CAAC,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,YAAY,IAAI,OAAO,CAAC,CAAC,MAAM,IAClE;IACH,CAAC;IAED,IAAI,IAAI;QACN,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;YAC9D,OAAO,EAAC,KAAK,EAAE,IAAI,EAAC,CAAA;SACrB;QAED,IAAI,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YAC3B,OAAO;gBACL,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK;gBAC1C,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI;aACnC,CAAA;SACF;QAED,MAAM,IAAI,KAAK,CACb,kFAAkF,CACnF,CAAA;IACH,CAAC;CACF;AA9DD,0BA8DC"}
|
||||
27
node_modules/@actions/github/lib/github.d.ts
generated
vendored
27
node_modules/@actions/github/lib/github.d.ts
generated
vendored
@@ -1,27 +0,0 @@
|
||||
import { graphql as GraphQL } from '@octokit/graphql/dist-types/types';
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import * as Context from './context';
|
||||
export declare const context: Context.Context;
|
||||
export declare class GitHub extends Octokit {
|
||||
graphql: GraphQL;
|
||||
/**
|
||||
* Sets up the REST client and GraphQL client with auth and proxy support.
|
||||
* The parameter `token` or `opts.auth` must be supplied. The GraphQL client
|
||||
* authorization is not setup when `opts.auth` is a function or object.
|
||||
*
|
||||
* @param token Auth token
|
||||
* @param opts Octokit options
|
||||
*/
|
||||
constructor(token: string, opts?: Omit<Octokit.Options, 'auth'>);
|
||||
constructor(opts: Octokit.Options);
|
||||
/**
|
||||
* Disambiguates the constructor overload parameters
|
||||
*/
|
||||
private static disambiguate;
|
||||
private static getOctokitOptions;
|
||||
private static getGraphQL;
|
||||
private static getAuthString;
|
||||
private static getProxyAgent;
|
||||
private static getApiBaseUrl;
|
||||
private static getGraphQLBaseUrl;
|
||||
}
|
||||
108
node_modules/@actions/github/lib/github.js
generated
vendored
108
node_modules/@actions/github/lib/github.js
generated
vendored
@@ -1,108 +0,0 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// Originally pulled from https://github.com/JasonEtco/actions-toolkit/blob/master/src/github.ts
|
||||
const graphql_1 = require("@octokit/graphql");
|
||||
const rest_1 = require("@octokit/rest");
|
||||
const Context = __importStar(require("./context"));
|
||||
const httpClient = __importStar(require("@actions/http-client"));
|
||||
// We need this in order to extend Octokit
|
||||
rest_1.Octokit.prototype = new rest_1.Octokit();
|
||||
exports.context = new Context.Context();
|
||||
class GitHub extends rest_1.Octokit {
|
||||
constructor(token, opts) {
|
||||
super(GitHub.getOctokitOptions(GitHub.disambiguate(token, opts)));
|
||||
this.graphql = GitHub.getGraphQL(GitHub.disambiguate(token, opts));
|
||||
}
|
||||
/**
|
||||
* Disambiguates the constructor overload parameters
|
||||
*/
|
||||
static disambiguate(token, opts) {
|
||||
return [
|
||||
typeof token === 'string' ? token : '',
|
||||
typeof token === 'object' ? token : opts || {}
|
||||
];
|
||||
}
|
||||
static getOctokitOptions(args) {
|
||||
const token = args[0];
|
||||
const options = Object.assign({}, args[1]); // Shallow clone - don't mutate the object provided by the caller
|
||||
// Base URL - GHES or Dotcom
|
||||
options.baseUrl = options.baseUrl || this.getApiBaseUrl();
|
||||
// Auth
|
||||
const auth = GitHub.getAuthString(token, options);
|
||||
if (auth) {
|
||||
options.auth = auth;
|
||||
}
|
||||
// Proxy
|
||||
const agent = GitHub.getProxyAgent(options.baseUrl, options);
|
||||
if (agent) {
|
||||
// Shallow clone - don't mutate the object provided by the caller
|
||||
options.request = options.request ? Object.assign({}, options.request) : {};
|
||||
// Set the agent
|
||||
options.request.agent = agent;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
static getGraphQL(args) {
|
||||
const defaults = {};
|
||||
defaults.baseUrl = this.getGraphQLBaseUrl();
|
||||
const token = args[0];
|
||||
const options = args[1];
|
||||
// Authorization
|
||||
const auth = this.getAuthString(token, options);
|
||||
if (auth) {
|
||||
defaults.headers = {
|
||||
authorization: auth
|
||||
};
|
||||
}
|
||||
// Proxy
|
||||
const agent = GitHub.getProxyAgent(defaults.baseUrl, options);
|
||||
if (agent) {
|
||||
defaults.request = { agent };
|
||||
}
|
||||
return graphql_1.graphql.defaults(defaults);
|
||||
}
|
||||
static getAuthString(token, options) {
|
||||
// Validate args
|
||||
if (!token && !options.auth) {
|
||||
throw new Error('Parameter token or opts.auth is required');
|
||||
}
|
||||
else if (token && options.auth) {
|
||||
throw new Error('Parameters token and opts.auth may not both be specified');
|
||||
}
|
||||
return typeof options.auth === 'string' ? options.auth : `token ${token}`;
|
||||
}
|
||||
static getProxyAgent(destinationUrl, options) {
|
||||
var _a;
|
||||
if (!((_a = options.request) === null || _a === void 0 ? void 0 : _a.agent)) {
|
||||
if (httpClient.getProxyUrl(destinationUrl)) {
|
||||
const hc = new httpClient.HttpClient();
|
||||
return hc.getAgent(destinationUrl);
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
static getApiBaseUrl() {
|
||||
return process.env['GITHUB_API_URL'] || 'https://api.github.com';
|
||||
}
|
||||
static getGraphQLBaseUrl() {
|
||||
let url = process.env['GITHUB_GRAPHQL_URL'] || 'https://api.github.com/graphql';
|
||||
// Shouldn't be a trailing slash, but remove if so
|
||||
if (url.endsWith('/')) {
|
||||
url = url.substr(0, url.length - 1);
|
||||
}
|
||||
// Remove trailing "/graphql"
|
||||
if (url.toUpperCase().endsWith('/GRAPHQL')) {
|
||||
url = url.substr(0, url.length - '/graphql'.length);
|
||||
}
|
||||
return url;
|
||||
}
|
||||
}
|
||||
exports.GitHub = GitHub;
|
||||
//# sourceMappingURL=github.js.map
|
||||
1
node_modules/@actions/github/lib/github.js.map
generated
vendored
1
node_modules/@actions/github/lib/github.js.map
generated
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"github.js","sourceRoot":"","sources":["../src/github.ts"],"names":[],"mappings":";;;;;;;;;AAAA,gGAAgG;AAChG,8CAAwC;AAUxC,wCAAqC;AACrC,mDAAoC;AAEpC,iEAAkD;AAElD,0CAA0C;AAC1C,cAAO,CAAC,SAAS,GAAG,IAAI,cAAO,EAAE,CAAA;AAEpB,QAAA,OAAO,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,CAAA;AAE5C,MAAa,MAAO,SAAQ,cAAO;IAiBjC,YAAY,KAA+B,EAAE,IAAsB;QACjE,KAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC,MAAM,CAAC,YAAY,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,CAAC,CAAA;QAEjE,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,YAAY,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,CAAA;IACpE,CAAC;IAED;;OAEG;IACK,MAAM,CAAC,YAAY,CACzB,KAA+B,EAC/B,IAAsB;QAEtB,OAAO;YACL,OAAO,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;YACtC,OAAO,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE;SAC/C,CAAA;IACH,CAAC;IAEO,MAAM,CAAC,iBAAiB,CAC9B,IAA+B;QAE/B,MAAM,KAAK,GAAG,IAAI,CAAC,CAAC,CAAC,CAAA;QACrB,MAAM,OAAO,qBAAO,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA,CAAC,iEAAiE;QAE9F,4BAA4B;QAC5B,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,IAAI,CAAC,aAAa,EAAE,CAAA;QAEzD,OAAO;QACP,MAAM,IAAI,GAAG,MAAM,CAAC,aAAa,CAAC,KAAK,EAAE,OAAO,CAAC,CAAA;QACjD,IAAI,IAAI,EAAE;YACR,OAAO,CAAC,IAAI,GAAG,IAAI,CAAA;SACpB;QAED,QAAQ;QACR,MAAM,KAAK,GAAG,MAAM,CAAC,aAAa,CAAC,OAAO,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;QAC5D,IAAI,KAAK,EAAE;YACT,iEAAiE;YACjE,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,mBAAK,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC,EAAE,CAAA;YAE7D,gBAAgB;YAChB,OAAO,CAAC,OAAO,CAAC,KAAK,GAAG,KAAK,CAAA;SAC9B;QAED,OAAO,OAAO,CAAA;IAChB,CAAC;IAEO,MAAM,CAAC,UAAU,CAAC,IAA+B;QACvD,MAAM,QAAQ,GAA6B,EAAE,CAAA;QAC7C,QAAQ,CAAC,OAAO,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAA;QAC3C,MAAM,KAAK,GAAG,IAAI,CAAC,CAAC,CAAC,CAAA;QACrB,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,CAAA;QAEvB,gBAAgB;QAChB,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,EAAE,OAAO,CAAC,CAAA;QAC/C,IAAI,IAAI,EAAE;YACR,QAAQ,CAAC,OAAO,GAAG;gBACjB,aAAa,EAAE,IAAI;aACpB,CAAA;SACF;QAED,QAAQ;QACR,MAAM,KAAK,GAAG,MAAM,CAAC,aAAa,CAAC,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;QAC7D,IAAI,KAAK,EAAE;YACT,QAAQ,CAAC,OAAO,GAAG,EAAC,KAAK,EAAC,CAAA;SAC3B;QAED,OAAO,iBAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAA;IACnC,CAAC;IAEO,MAAM,CAAC,aAAa,CAC1B,KAAa,EACb,OAAwB;QAExB,gBAAgB;QAChB,IAAI,CAAC,KAAK,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAA;SAC5D;aAAM,IAAI,KAAK,IAAI,OAAO,CAAC,IAAI,EAAE;YAChC,MAAM,IAAI,KAAK,CACb,0DAA0D,CAC3D,CAAA;SACF;QAED,OAAO,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,KAAK,EAAE,CAAA;IAC3E,CAAC;IAEO,MAAM,CAAC,aAAa,CAC1B,cAAsB,EACtB,OAAwB;;QAExB,IAAI,QAAC,OAAO,CAAC,OAAO,0CAAE,KAAK,CAAA,EAAE;YAC3B,IAAI,UAAU,CAAC,WAAW,CAAC,cAAc,CAAC,EAAE;gBAC1C,MAAM,EAAE,GAAG,IAAI,UAAU,CAAC,UAAU,EAAE,CAAA;gBACtC,OAAO,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAA;aACnC;SACF;QAED,OAAO,SAAS,CAAA;IAClB,CAAC;IAEO,MAAM,CAAC,aAAa;QAC1B,OAAO,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,IAAI,wBAAwB,CAAA;IAClE,CAAC;IAEO,MAAM,CAAC,iBAAiB;QAC9B,IAAI,GAAG,GACL,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,IAAI,gCAAgC,CAAA;QAEvE,kDAAkD;QAClD,IAAI,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YACrB,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;SACpC;QAED,6BAA6B;QAC7B,IAAI,GAAG,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE;YAC1C,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,MAAM,GAAG,UAAU,CAAC,MAAM,CAAC,CAAA;SACpD;QACD,OAAO,GAAG,CAAA;IACZ,CAAC;CACF;AAxID,wBAwIC"}
|
||||
36
node_modules/@actions/github/lib/interfaces.d.ts
generated
vendored
36
node_modules/@actions/github/lib/interfaces.d.ts
generated
vendored
@@ -1,36 +0,0 @@
|
||||
export interface PayloadRepository {
|
||||
[key: string]: any;
|
||||
full_name?: string;
|
||||
name: string;
|
||||
owner: {
|
||||
[key: string]: any;
|
||||
login: string;
|
||||
name?: string;
|
||||
};
|
||||
html_url?: string;
|
||||
}
|
||||
export interface WebhookPayload {
|
||||
[key: string]: any;
|
||||
repository?: PayloadRepository;
|
||||
issue?: {
|
||||
[key: string]: any;
|
||||
number: number;
|
||||
html_url?: string;
|
||||
body?: string;
|
||||
};
|
||||
pull_request?: {
|
||||
[key: string]: any;
|
||||
number: number;
|
||||
html_url?: string;
|
||||
body?: string;
|
||||
};
|
||||
sender?: {
|
||||
[key: string]: any;
|
||||
type: string;
|
||||
};
|
||||
action?: string;
|
||||
installation?: {
|
||||
id: number;
|
||||
[key: string]: any;
|
||||
};
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user