Compare commits

..

2 Commits

Author SHA1 Message Date
Joshua Hale
4cd2eff354 report exceptions during upload 2020-05-06 14:14:14 +01:00
Joshua Hale
cce113b7fc don't catch error initializing config 2020-05-06 14:03:29 +01:00
12429 changed files with 1453359 additions and 126648 deletions

View File

@@ -1,10 +0,0 @@
root = true
[*]
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
[*.ts]
indent_style = space
indent_size = 2

View File

@@ -1,5 +0,0 @@
blank_issues_enabled: true
contact_links:
- name: Contact GitHub Support
url: https://support.github.com/contact?subject=Code+Scanning+Beta+Support&tags=code-scanning-support
about: Contact Support about code scanning

View File

@@ -1,13 +1,4 @@
name: "CodeQL config" me: "CodeQL config"
queries: queries:
- name: Run custom queries - name: Run custom queries
uses: ./queries uses: ./queries
# Run all extra query suites, both because we want to
# and because it'll act as extra testing. This is why
# we include both even though one is a superset of the
# other, because we're testing the parsing logic and
# that the suites exist in the codeql bundle.
- uses: security-extended
- uses: security-and-quality
paths-ignore:
- tests

View File

@@ -1,178 +0,0 @@
import datetime
from github import Github
import random
import requests
import subprocess
import sys
# The branch being merged from.
# This is the one that contains day-to-day development work.
MAIN_BRANCH = 'main'
# The branch being merged into.
# This is the release branch that users reference.
LATEST_RELEASE_BRANCH = 'v1'
# Name of the remote
ORIGIN = 'origin'
# Runs git with the given args and returns the stdout.
# Raises an error if git does not exit successfully.
def run_git(*args):
cmd = ['git', *args]
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if (p.returncode != 0):
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
return p.stdout.decode('ascii')
# Returns true if the given branch exists on the origin remote
def branch_exists_on_remote(branch_name):
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
# Opens a PR from the given branch to the release branch
def open_pr(repo, all_commits, short_main_sha, branch_name):
# Sort the commits into the pull requests that introduced them,
# and any commits that don't have a pull request
pull_requests = []
commits_without_pull_requests = []
for commit in all_commits:
pr = get_pr_for_commit(repo, commit)
if pr is None:
commits_without_pull_requests.append(commit)
elif not any(p for p in pull_requests if p.number == pr.number):
pull_requests.append(pr)
print('Found ' + str(len(pull_requests)) + ' pull requests')
print('Found ' + str(len(commits_without_pull_requests)) + ' commits not in a pull request')
# Sort PRs and commits by age
pull_requests = sorted(pull_requests, key=lambda pr: pr.number)
commits_without_pull_requests = sorted(commits_without_pull_requests, key=lambda c: c.commit.author.date)
# Start constructing the body text
body = 'Merging ' + short_main_sha + ' into ' + LATEST_RELEASE_BRANCH
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
body += '\n\nConductor for this PR is @' + conductor
# List all PRs merged
if len(pull_requests) > 0:
body += '\n\nContains the following pull requests:'
for pr in pull_requests:
merger = get_merger_of_pr(repo, pr)
body += '\n- #' + str(pr.number)
body += ' - ' + pr.title
body += ' (@' + merger + ')'
# List all commits not part of a PR
if len(commits_without_pull_requests) > 0:
body += '\n\nContains the following commits not from a pull request:'
for commit in commits_without_pull_requests:
body += '\n- ' + commit.sha
body += ' - ' + get_truncated_commit_message(commit)
body += ' (@' + commit.author.login + ')'
title = 'Merge ' + MAIN_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
# Create the pull request
pr = repo.create_pull(title=title, body=body, head=branch_name, base=LATEST_RELEASE_BRANCH)
print('Created PR #' + str(pr.number))
# Assign the conductor
pr.add_to_assignees(conductor)
print('Assigned PR to ' + conductor)
# Gets the person who should be in charge of the mergeback PR
def get_conductor(repo, pull_requests, other_commits):
# If there are any PRs then use whoever merged the last one
if len(pull_requests) > 0:
return get_merger_of_pr(repo, pull_requests[-1])
# Otherwise take the author of the latest commit
return other_commits[-1].author.login
# Gets a list of the SHAs of all commits that have happened on main
# since the release branched off.
# This will not include any commits that exist on the release branch
# that aren't on main.
def get_commit_difference(repo):
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '...' + MAIN_BRANCH).strip().split('\n')
# Convert to full-fledged commit objects
commits = [repo.get_commit(c) for c in commits]
# Filter out merge commits for PRs
return list(filter(lambda c: not is_pr_merge_commit(c), commits))
# Is the given commit the automatic merge commit from when merging a PR
def is_pr_merge_commit(commit):
return commit.committer.login == 'web-flow' and len(commit.parents) > 1
# Gets a copy of the commit message that should display nicely
def get_truncated_commit_message(commit):
message = commit.commit.message.split('\n')[0]
if len(message) > 60:
return message[:57] + '...'
else:
return message
# Converts a commit into the PR that introduced it to the main branch.
# Returns the PR object, or None if no PR could be found.
def get_pr_for_commit(repo, commit):
prs = commit.get_pulls()
if prs.totalCount > 0:
# In the case that there are multiple PRs, return the earliest one
prs = list(prs)
sorted(prs, key=lambda pr: int(pr.number))
return prs[0]
else:
return None
# Get the person who merged the pull request.
# For most cases this will be the same as the author, but for PRs opened
# by external contributors getting the merger will get us the GitHub
# employee who reviewed and merged the PR.
def get_merger_of_pr(repo, pr):
return repo.get_commit(pr.merge_commit_sha).author.login
def main():
if len(sys.argv) != 3:
raise Exception('Usage: update-release.branch.py <github token> <repository nwo>')
github_token = sys.argv[1]
repository_nwo = sys.argv[2]
repo = Github(github_token).get_repo(repository_nwo)
# Print what we intend to go
print('Considering difference between ' + MAIN_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
short_main_sha = run_git('rev-parse', '--short', MAIN_BRANCH).strip()
print('Current head of ' + MAIN_BRANCH + ' is ' + short_main_sha)
# See if there are any commits to merge in
commits = get_commit_difference(repo)
if len(commits) == 0:
print('No commits to merge from ' + MAIN_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
return
# The branch name is based off of the name of branch being merged into
# and the SHA of the branch being merged from. Thus if the branch already
# exists we can assume we don't need to recreate it.
new_branch_name = 'update-' + LATEST_RELEASE_BRANCH + '-' + short_main_sha
print('Branch name is ' + new_branch_name)
# Check if the branch already exists. If so we can abort as this script
# has already run on this combination of branches.
if branch_exists_on_remote(new_branch_name):
print('Branch ' + new_branch_name + ' already exists. Nothing to do.')
return
# Create the new branch and push it to the remote
print('Creating branch ' + new_branch_name)
run_git('checkout', '-b', new_branch_name, MAIN_BRANCH)
run_git('push', ORIGIN, new_branch_name)
# Open a PR to update the branch
open_pr(repo, commits, short_main_sha, new_branch_name)
if __name__ == '__main__':
main()

View File

@@ -1,6 +1,6 @@
name: "CodeQL action" name: "CodeQL action"
on: [push, pull_request] on: [push]
jobs: jobs:
build: build:
@@ -11,18 +11,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
with:
# Must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head of the pull request.
fetch-depth: 2
# If this run was triggered by a pull request event then checkout
# the head of the pull request instead of the merge commit.
- run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }}
- uses: ./init - uses: ./init
with: with:
languages: javascript config-file: ./.github/codeql/codeql-config.yml
config-file: ./.github/codeql/codeql-config.yml
- uses: ./analyze - uses: ./analyze

View File

@@ -1,126 +1,22 @@
name: "Integration Testing" name: "Integration Testing"
on: [push, pull_request] on: [push]
jobs: jobs:
multi-language-repo_test-autodetect-languages: dispatch-events:
if: github.event.repository.full_name == 'github/codeql-action'
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - name: Send repository dispatch events
- name: Move codeql-action
shell: bash
run: | run: |
mkdir ../action curl -X POST \
mv * .github ../action/ -H "Authorization: Bearer ${{ secrets.CODEQL_TESTING_TOKEN }}" \
mv ../action/tests/multi-language-repo/{*,.github} . -H "Accept: application/vnd.github.everest-preview+json" \
- uses: ./../action/init https://api.github.com/repos/Anthophila/amazon-cognito-js-copy/dispatches \
- name: Build code -d '{"event_type":"codeql-integration","client_payload": {"sha": "${{ github.sha }}"}}'
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
- run: |
cd "$CODEQL_ACTION_DATABASE_DIR"
# List all directories as there will be precisely one directory per database
# but there may be other files in this directory such as query suites.
if [ "$(ls -d */ | wc -l)" != 6 ] || \
[[ ! -d cpp ]] || \
[[ ! -d csharp ]] || \
[[ ! -d go ]] || \
[[ ! -d java ]] || \
[[ ! -d javascript ]] || \
[[ ! -d python ]]; then
echo "Did not find expected number of databases. Database dir contains: $(ls)"
exit 1
fi
multi-language-repo_test-custom-queries: curl -X POST \
strategy: -H "Authorization: Bearer ${{ secrets.CODEQL_TESTING_TOKEN }}" \
fail-fast: false -H "Accept: application/vnd.github.everest-preview+json" \
matrix: https://api.github.com/repos/Anthophila/electron-test-action/dispatches \
os: [ubuntu-latest, windows-latest, macos-latest] -d '{"event_type":"codeql-integration","client_payload": {"sha": "${{ github.sha }}"}}'
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- uses: ./../action/init
with:
languages: cpp,csharp,java,javascript,python
config-file: ./.github/codeql/custom-queries.yml
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
# Currently is not possible to analyze Go in conjunction with other languages in macos
multi-language-repo_test-go-custom-queries:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/setup-go@v2
if: ${{ matrix.os == 'macos-latest' }}
with:
go-version: '^1.13.1'
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- uses: ./../action/init
with:
languages: go
config-file: ./.github/codeql/custom-queries.yml
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
multi-language-repo_rubocop:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6
- name: Install Code Scanning integration
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
- name: Install dependencies
run: bundle install
- name: Rubocop run
run: |
bash -c "
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
[[ $? -ne 2 ]]
"
- uses: ./../action/upload-sarif
with:
sarif_file: rubocop.sarif
env:
TEST_MODE: true

27
.github/workflows/js-uptodate-check.yml vendored Normal file
View File

@@ -0,0 +1,27 @@
name: "Check generated JavaScript"
on: [pull_request]
jobs:
check-js:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Check generated JavaScript
run: |
# Sanity check that repo is clean to start with
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then this workflow needs attention...
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Generate the JavaScript files
npm run-script build
# Check that repo is still clean
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
exit 1
fi
echo "Success: JavaScript files are up to date"

12
.github/workflows/npm-test.yml vendored Normal file
View File

@@ -0,0 +1,12 @@
name: "npm run-script test"
on: [push]
jobs:
npm-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: npm run-script test
run: npm run-script test

View File

@@ -1,71 +0,0 @@
name: "PR checks"
on: [push, pull_request]
jobs:
tslint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: tslint
run: npm run-script lint
check-js:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Check generated JavaScript
run: |
# Sanity check that repo is clean to start with
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then this workflow needs attention...
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Generate the JavaScript files
npm run-script build
# Check that repo is still clean
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
git status
exit 1
fi
echo "Success: JavaScript files are up to date"
check-node-modules:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Check node modules up to date
run: |
# Sanity check that repo is clean to start with
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then this workflow needs attention...
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Reinstall modules and then clean to remove absolute paths
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
npm ci
npm run removeNPMAbsolutePaths
# Check that repo is still clean
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
git status
exit 1
fi
echo "Success: node_modules are up to date"
npm-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: npm run-script test
run: npm run-script test

12
.github/workflows/ts-lint.yml vendored Normal file
View File

@@ -0,0 +1,12 @@
name: "TSLint"
on: [push]
jobs:
tslint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: tslint
run: npm run-script lint

View File

@@ -1,31 +0,0 @@
name: Update release branch
on:
schedule:
- cron: 0 9 * * 1
repository_dispatch:
# Example of how to trigger this:
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
# Replace <token> with a personal access token from this page: https://github.com/settings/tokens
types: [update-release-branch]
jobs:
update:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
# Need full history so we calculate diffs
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.5
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install PyGithub==1.51 requests
- name: Update release branch
run: python .github/update-release-branch.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }}

10
.vscode/settings.json vendored
View File

@@ -1,10 +0,0 @@
{
"files.exclude": {
// include the defaults from VS Code
"**/.git": true,
"**/.DS_Store": true,
// transpiled JavaScript
"lib": true,
}
}

103
README.md
View File

@@ -1,6 +1,6 @@
# CodeQL Action # CodeQL Action
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code. This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/semmle/ql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
## License ## License
@@ -10,8 +10,6 @@ The underlying CodeQL CLI, used in this action, is licensed under the [GitHub Co
## Usage ## Usage
This is a short walkthrough, but for more information read [configuring code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning).
To get code scanning results from CodeQL analysis on your repo you can use the following workflow as a template: To get code scanning results from CodeQL analysis on your repo you can use the following workflow as a template:
```yaml ```yaml
@@ -20,29 +18,21 @@ name: "Code Scanning - Action"
on: on:
push: push:
pull_request:
schedule: schedule:
- cron: '0 0 * * 0' - cron: '0 0 * * 0'
jobs: jobs:
CodeQL-Build: CodeQL-Build:
strategy:
fail-fast: false
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest # CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v2 uses: actions/checkout@v2
with:
# Must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head of the pull request.
# Only include this option if you are running this workflow on pull requests.
fetch-depth: 2
# If this run was triggered by a pull request event then checkout
# the head of the pull request instead of the merge commit.
# Only include this step if you are running this workflow on pull requests.
- run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }}
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
@@ -88,9 +78,24 @@ If you prefer to integrate this within an existing CI workflow, it should end up
uses: github/codeql-action/analyze@v1 uses: github/codeql-action/analyze@v1
``` ```
### Configuration file ### Actions triggers
Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`. The CodeQL action should be run on `push` events, and on a `schedule`. `Push` events allow us to do a detailed analysis of the delta in a pull request, while the `schedule` event ensures that GitHub regularly scans the repository for the latest vulnerabilities, even if the repository becomes inactive. This action does not support the `pull_request` event.
### Configuration
You may optionally specify additional queries for CodeQL to execute by using a config file. The queries must belong to a [QL pack](https://help.semmle.com/codeql/codeql-cli/reference/qlpack-overview.html) and can be in your repository or any public repository. You can choose a single .ql file, a folder containing multiple .ql files, a .qls [query suite](https://help.semmle.com/codeql/codeql-cli/procedures/query-suites.html) file, or any combination of the above. To use queries from other repositories use the same syntax as when [using an action](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepsuses).
You can disable the default queries using `disable-default-queries: true`.
You can choose to ignore some files or folders from the analysis, or include additional files/folders for analysis. This *only* works for Javascript and Python analysis.
Identifying potential files for extraction:
- Scans each folder that's defined as `paths` in turn, traversing subfolders, and looking for relevant files.
- If it finds a subfolder that's defined as `paths-ignore`, stop traversing.
- If a file or folder is both in `paths` and `paths-ignore`, the `paths-ignore` is ignored.
Use the `config-file` parameter of the init action to enable the configuration file. For example:
```yaml ```yaml
- uses: github/codeql-action/init@v1 - uses: github/codeql-action/init@v1
@@ -98,8 +103,68 @@ Use the `config-file` parameter of the `init` action to enable the configuration
config-file: ./.github/codeql/codeql-config.yml config-file: ./.github/codeql/codeql-config.yml
``` ```
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration)." A config file looks like this:
```yaml
name: "My CodeQL config"
disable-default-queries: true
queries:
- name: In-repo queries (Runs the queries located in the my-queries folder of the repo)
uses: ./my-queries
- name: External Javascript QL pack (Runs a QL pack located in an external repo)
uses: /Semmle/ql/javascript/ql/src/Electron@master
- name: External query (Runs a single query located in an external QL pack)
uses: Semmle/ql/javascript/ql/src/AngularJS/DeadAngularJSEventListener.ql@master
- name: Select query suite (Runs a query suites)
uses: ./codeql-querypacks/complex-python-querypack/rootAndBar.qls
paths:
- src/util.ts
paths-ignore:
- src
- lib
```
## Troubleshooting ## Troubleshooting
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning). ### Trouble with Go dependencies
#### If you use a vendor directory
Try passing
```yaml
env:
GOFLAGS: "-mod=vendor"
```
to `github/codeql-action/analyze`.
### If you do not use a vendor directory
Dependencies on public repositories should just work. If you have dependencies on private repositories, one option is to use `git config` and a [personal access token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) to authenticate when downloading dependencies. Add a section like
```yaml
steps:
- name: Configure git private repo access
env:
TOKEN: ${{ secrets.GITHUB_PAT }}
run: |
git config --global url."https://${TOKEN}@github.com/foo/bar".insteadOf "https://github.com/foo/bar"
git config --global url."https://${TOKEN}@github.com/foo/baz".insteadOf "https://github.com/foo/baz"
```
before any codeql actions. A similar thing can also be done with an SSH key or deploy key.
### C# using dotnet version 2 on linux
This currently requires invoking `dotnet` with the `/p:UseSharedCompilation=false` flag. For example:
```shell
dotnet build /p:UseSharedCompilation=false
```
Version 3 does not require the additional flag.

View File

@@ -4,7 +4,6 @@ author: 'GitHub'
inputs: inputs:
check_name: check_name:
description: The name of the check run to add text to. description: The name of the check run to add text to.
required: false
output: output:
description: The path of the directory in which to save the SARIF results description: The path of the directory in which to save the SARIF results
required: false required: false
@@ -12,14 +11,7 @@ inputs:
upload: upload:
description: Upload the SARIF file description: Upload the SARIF file
required: false required: false
default: "true" default: true
ram:
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
required: false
threads:
description: The number of threads to be used by CodeQL.
required: false
default: "1"
token: token:
default: ${{ github.token }} default: ${{ github.token }}
matrix: matrix:

View File

@@ -5,14 +5,12 @@ inputs:
tools: tools:
description: URL of CodeQL tools description: URL of CodeQL tools
required: false required: false
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200601/codeql-bundle.tar.gz default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200427/codeql-bundle.tar.gz
languages: languages:
description: The languages to be analysed description: The languages to be analysed
required: false required: false
token: token:
default: ${{ github.token }} default: ${{ github.token }}
matrix:
default: ${{ toJson(matrix) }}
config-file: config-file:
description: Path of the config file to use description: Path of the config file to use
required: false required: false

11
jest.config.js Normal file
View File

@@ -0,0 +1,11 @@
module.exports = {
clearMocks: true,
moduleFileExtensions: ['js', 'ts'],
testEnvironment: 'node',
testMatch: ['**/*.test.ts'],
testRunner: 'jest-circus/runner',
transform: {
'^.+\\.ts$': 'ts-jest'
},
verbose: true
}

3
lib/analysis-paths.js generated
View File

@@ -16,7 +16,7 @@ function includeAndExcludeAnalysisPaths(config, languages) {
core.exportVariable('LGTM_INDEX_EXCLUDE', config.pathsIgnore.join('\n')); core.exportVariable('LGTM_INDEX_EXCLUDE', config.pathsIgnore.join('\n'));
} }
function isInterpretedLanguage(language) { function isInterpretedLanguage(language) {
return language === 'javascript' || language === 'python'; return language === 'javascript' && language === 'python';
} }
// Index include/exclude only work in javascript and python // Index include/exclude only work in javascript and python
// If some other language is detected/configured show a warning // If some other language is detected/configured show a warning
@@ -25,4 +25,3 @@ function includeAndExcludeAnalysisPaths(config, languages) {
} }
} }
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths; exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;
//# sourceMappingURL=analysis-paths.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAgB,8BAA8B,CAAC,MAA0B,EAAE,SAAmB;IAC5F,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KACpE;IAED,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC1E;IAED,SAAS,qBAAqB,CAAC,QAAQ;QACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;IAC5D,CAAC;IAED,2DAA2D;IAC3D,+DAA+D;IAC/D,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAC7G,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC5G;AACH,CAAC;AAlBD,wEAkBC"}

View File

@@ -1,32 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const analysisPaths = __importStar(require("./analysis-paths"));
const configUtils = __importStar(require("./config-utils"));
const testing_utils_1 = require("./testing-utils");
testing_utils_1.silenceDebugOutput(ava_1.default);
ava_1.default("emptyPaths", async (t) => {
let config = new configUtils.Config();
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
});
ava_1.default("nonEmptyPaths", async (t) => {
let config = new configUtils.Config();
config.paths.push('path1', 'path2');
config.pathsIgnore.push('path3', 'path4');
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path3\npath4');
});
//# sourceMappingURL=analysis-paths.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,4DAA8C;AAC9C,mDAAmD;AAEnD,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IACpC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC1C,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;AAC1D,CAAC,CAAC,CAAC"}

23
lib/api-client.js generated
View File

@@ -1,23 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const octokit = __importStar(require("@octokit/rest"));
const console_log_level_1 = __importDefault(require("console-log-level"));
const githubAPIURL = process.env["GITHUB_API_URL"] || "https://api.github.com";
exports.client = new octokit.Octokit({
auth: core.getInput("token"),
baseUrl: githubAPIURL,
userAgent: "CodeQL Action",
log: console_log_level_1.default({ level: "debug" })
});
//# sourceMappingURL=api-client.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,uDAAyC;AACzC,0EAAgD;AAEhD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,IAAI,wBAAwB,CAAC;AAClE,QAAA,MAAM,GAAG,IAAI,OAAO,CAAC,OAAO,CAAC;IACxC,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC;IAC5B,OAAO,EAAE,YAAY;IACrB,SAAS,EAAE,eAAe;IAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;CACzC,CAAC,CAAC"}

1
lib/autobuild.js generated
View File

@@ -58,4 +58,3 @@ run().catch(e => {
core.setFailed("autobuild action failed. " + e); core.setFailed("autobuild action failed. " + e);
console.log(e); console.log(e);
}); });
//# sourceMappingURL=autobuild.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,2CAA6B;AAE7B,gEAAkD;AAClD,6CAA+B;AAE/B,KAAK,UAAU,GAAG;;IAChB,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,EAAE;YACzF,OAAO;SACR;QAED,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,OAAA,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,8BAA8B,CAAC,0CAAE,KAAK,CAAC,GAAG,MAAK,EAAE,CAAC;QACnG,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,8DAA8D;QAC9D,MAAM,SAAS,GAAG,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,iBAAiB,CAAC,CAAC;QAExE,MAAM,OAAO,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,cAAc,CAAC;QAChF,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;QAGpF,+DAA+D;QAC/D,0FAA0F;QAC1F,qDAAqD;QACrD,8EAA8E;QAC9E,gHAAgH;QAChH,IAAI,eAAe,GAAG,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,EAAE,CAAC;QAC7D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,wBAAwB,EAAE,+BAA+B,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAE1I,MAAM,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QAC9B,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,MAAM,IAAI,CAAC,kBAAkB,CAAC,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QACvE,OAAO;KACR;IAED,MAAM,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,CAAC;AAChD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

183
lib/config-utils.js generated
View File

@@ -12,13 +12,6 @@ const io = __importStar(require("@actions/io"));
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const yaml = __importStar(require("js-yaml")); const yaml = __importStar(require("js-yaml"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const util = __importStar(require("./util"));
const NAME_PROPERTY = 'name';
const DISPLAY_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
const QUERIES_PROPERTY = 'queries';
const QUERIES_USES_PROPERTY = 'uses';
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
const PATHS_PROPERTY = 'paths';
class ExternalQuery { class ExternalQuery {
constructor(repository, ref) { constructor(repository, ref) {
this.path = ''; this.path = '';
@@ -27,74 +20,39 @@ class ExternalQuery {
} }
} }
exports.ExternalQuery = ExternalQuery; exports.ExternalQuery = ExternalQuery;
// The set of acceptable values for built-in suites from the codeql bundle
const builtinSuites = ['security-extended', 'security-and-quality'];
class Config { class Config {
constructor() { constructor() {
this.name = ""; this.name = "";
this.disableDefaultQueries = false; this.disableDefaultQueries = false;
this.additionalQueries = []; this.additionalQueries = [];
this.externalQueries = []; this.externalQueries = [];
this.additionalSuites = [];
this.pathsIgnore = []; this.pathsIgnore = [];
this.paths = []; this.paths = [];
} }
addQuery(configFile, queryUses) { addQuery(queryUses) {
// The logic for parsing the string is based on what actions does for // The logic for parsing the string is based on what actions does for
// parsing the 'uses' actions in the workflow file // parsing the 'uses' actions in the workflow file
queryUses = queryUses.trim();
if (queryUses === "") { if (queryUses === "") {
throw new Error(getQueryUsesInvalid(configFile)); throw '"uses" value for queries cannot be blank';
} }
// Check for the local path case before we start trying to parse the repository name
if (queryUses.startsWith("./")) { if (queryUses.startsWith("./")) {
const localQueryPath = queryUses.slice(2); this.additionalQueries.push(queryUses.slice(2));
// Resolve the local path against the workspace so that when this is
// passed to codeql it resolves to exactly the path we expect it to resolve to.
const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
// Check the file exists
if (!fs.existsSync(absoluteQueryPath)) {
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
}
// Call this after checking file exists, because it'll fail if file doesn't exist
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
// Check the local path doesn't jump outside the repo using '..' or symlinks
if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
}
this.additionalQueries.push(absoluteQueryPath);
return; return;
} }
// Check for one of the builtin suites
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
const suite = builtinSuites.find((suite) => suite === queryUses);
if (suite) {
this.additionalSuites.push(suite);
return;
}
else {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
}
let tok = queryUses.split('@'); let tok = queryUses.split('@');
if (tok.length !== 2) { if (tok.length !== 2) {
throw new Error(getQueryUsesInvalid(configFile, queryUses)); throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
} }
const ref = tok[1]; const ref = tok[1];
tok = tok[0].split('/'); tok = tok[0].split('/');
// The first token is the owner // The first token is the owner
// The second token is the repo // The second token is the repo
// The rest is a path, if there is more than one token combine them to form the full path // The rest is a path, if there is more than one token combine them to form the full path
if (tok.length < 2) {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
if (tok.length > 3) { if (tok.length > 3) {
tok = [tok[0], tok[1], tok.slice(2).join('/')]; tok = [tok[0], tok[1], tok.slice(2).join('/')];
} }
// Check none of the parts of the repository name are empty if (tok.length < 2) {
if (tok[0].trim() === '' || tok[1].trim() === '') { throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
throw new Error(getQueryUsesInvalid(configFile, queryUses));
} }
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref); let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
if (tok.length === 3) { if (tok.length === 3) {
@@ -104,137 +62,57 @@ class Config {
} }
} }
exports.Config = Config; exports.Config = Config;
function getNameInvalid(configFile) { const configFolder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
}
exports.getNameInvalid = getNameInvalid;
function getDisableDefaultQueriesInvalid(configFile) {
return getConfigFilePropertyError(configFile, DISPLAY_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
}
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
function getQueriesInvalid(configFile) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
}
exports.getQueriesInvalid = getQueriesInvalid;
function getQueryUsesInvalid(configFile, queryUses) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'must be a built-in suite (' + builtinSuites.join(' or ') +
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
}
exports.getQueryUsesInvalid = getQueryUsesInvalid;
function getPathsIgnoreInvalid(configFile) {
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
}
exports.getPathsIgnoreInvalid = getPathsIgnoreInvalid;
function getPathsInvalid(configFile) {
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
}
exports.getPathsInvalid = getPathsInvalid;
function getLocalPathOutsideOfRepository(configFile, localPath) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" is outside of the repository');
}
exports.getLocalPathOutsideOfRepository = getLocalPathOutsideOfRepository;
function getLocalPathDoesNotExist(configFile, localPath) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" does not exist in the repository');
}
exports.getLocalPathDoesNotExist = getLocalPathDoesNotExist;
function getConfigFileOutsideWorkspaceErrorMessage(configFile) {
return 'The configuration file "' + configFile + '" is outside of the workspace';
}
exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage;
function getConfigFileDoesNotExistErrorMessage(configFile) {
return 'The configuration file "' + configFile + '" does not exist';
}
exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage;
function getConfigFilePropertyError(configFile, property, error) {
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
}
function initConfig() { function initConfig() {
let configFile = core.getInput('config-file'); const configFile = core.getInput('config-file');
const config = new Config(); const config = new Config();
// If no config file was provided create an empty one // If no config file was provided create an empty one
if (configFile === '') { if (configFile === '') {
core.debug('No configuration file was provided'); core.debug('No configuration file was provided');
return config; return config;
} }
// Treat the config file as relative to the workspace
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
configFile = path.resolve(workspacePath, configFile);
// Error if the config file is now outside of the workspace
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
}
// Error if the file does not exist
if (!fs.existsSync(configFile)) {
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
}
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8')); const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
if (NAME_PROPERTY in parsedYAML) { if (parsedYAML.name && typeof parsedYAML.name === "string") {
if (typeof parsedYAML[NAME_PROPERTY] !== "string") { config.name = parsedYAML.name;
throw new Error(getNameInvalid(configFile));
}
if (parsedYAML[NAME_PROPERTY].length === 0) {
throw new Error(getNameInvalid(configFile));
}
config.name = parsedYAML[NAME_PROPERTY];
} }
if (DISPLAY_DEFAULT_QUERIES_PROPERTY in parsedYAML) { if (parsedYAML['disable-default-queries'] && typeof parsedYAML['disable-default-queries'] === "boolean") {
if (typeof parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY] !== "boolean") { config.disableDefaultQueries = parsedYAML['disable-default-queries'];
throw new Error(getDisableDefaultQueriesInvalid(configFile));
}
config.disableDefaultQueries = parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY];
} }
if (QUERIES_PROPERTY in parsedYAML) { const queries = parsedYAML.queries;
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) { if (queries && queries instanceof Array) {
throw new Error(getQueriesInvalid(configFile)); queries.forEach(query => {
} if (query.uses && typeof query.uses === "string") {
parsedYAML[QUERIES_PROPERTY].forEach(query => { config.addQuery(query.uses);
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueryUsesInvalid(configFile));
} }
config.addQuery(configFile, query[QUERIES_USES_PROPERTY]);
}); });
} }
if (PATHS_IGNORE_PROPERTY in parsedYAML) { const pathsIgnore = parsedYAML['paths-ignore'];
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) { if (pathsIgnore && pathsIgnore instanceof Array) {
throw new Error(getPathsIgnoreInvalid(configFile)); pathsIgnore.forEach(path => {
} if (typeof path === "string") {
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => { config.pathsIgnore.push(path);
if (typeof path !== "string" || path === '') {
throw new Error(getPathsIgnoreInvalid(configFile));
} }
config.pathsIgnore.push(path);
}); });
} }
if (PATHS_PROPERTY in parsedYAML) { const paths = parsedYAML.paths;
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) { if (paths && paths instanceof Array) {
throw new Error(getPathsInvalid(configFile)); paths.forEach(path => {
} if (typeof path === "string") {
parsedYAML[PATHS_PROPERTY].forEach(path => { config.paths.push(path);
if (typeof path !== "string" || path === '') {
throw new Error(getPathsInvalid(configFile));
} }
config.paths.push(path);
}); });
} }
return config; return config;
} }
function getConfigFolder() {
return util.getRequiredEnvParam('RUNNER_TEMP');
}
function getConfigFile() {
return path.join(getConfigFolder(), 'config');
}
exports.getConfigFile = getConfigFile;
async function saveConfig(config) { async function saveConfig(config) {
const configString = JSON.stringify(config); const configString = JSON.stringify(config);
await io.mkdirP(getConfigFolder()); await io.mkdirP(configFolder);
fs.writeFileSync(getConfigFile(), configString, 'utf8'); fs.writeFileSync(path.join(configFolder, 'config'), configString, 'utf8');
core.debug('Saved config:'); core.debug('Saved config:');
core.debug(configString); core.debug(configString);
} }
async function loadConfig() { async function loadConfig() {
const configFile = getConfigFile(); const configFile = path.join(configFolder, 'config');
if (fs.existsSync(configFile)) { if (fs.existsSync(configFile)) {
const configString = fs.readFileSync(configFile, 'utf8'); const configString = fs.readFileSync(configFile, 'utf8');
core.debug('Loaded config:'); core.debug('Loaded config:');
@@ -250,4 +128,3 @@ async function loadConfig() {
} }
} }
exports.loadConfig = loadConfig; exports.loadConfig = loadConfig;
//# sourceMappingURL=config-utils.js.map

File diff suppressed because one or more lines are too long

164
lib/config-utils.test.js generated
View File

@@ -1,164 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const configUtils = __importStar(require("./config-utils"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.silenceDebugOutput(ava_1.default);
function setInput(name, value) {
// Transformation copied from
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
if (value !== undefined) {
process.env[envVar] = value;
}
else {
delete process.env[envVar];
}
}
ava_1.default("load empty config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', undefined);
const config = await configUtils.loadConfig();
t.deepEqual(config, new configUtils.Config());
});
});
ava_1.default("loading config saves config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
const configFile = configUtils.getConfigFile();
// Sanity check the saved config file does not already exist
t.false(fs.existsSync(configFile));
const config = await configUtils.loadConfig();
// The saved config file should now exist
t.true(fs.existsSync(configFile));
// And the contents should parse correctly to the config that was returned
t.deepEqual(fs.readFileSync(configFile, 'utf8'), JSON.stringify(config));
});
});
ava_1.default("load input outside of workspace", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', '../input');
try {
await configUtils.loadConfig();
throw new Error('loadConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
}
});
});
ava_1.default("load non-existent input", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
t.false(fs.existsSync(path.join(tmpDir, 'input')));
setInput('config-file', 'input');
try {
await configUtils.loadConfig();
throw new Error('loadConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
}
});
});
ava_1.default("load non-empty input", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// Just create a generic config object with non-default values for all fields
const inputFileContents = `
name: my config
disable-default-queries: true
queries:
- uses: ./
- uses: ./foo
- uses: foo/bar@dev
paths-ignore:
- a
- b
paths:
- c/d`;
fs.mkdirSync(path.join(tmpDir, 'foo'));
// And the config we expect it to parse to
const expectedConfig = new configUtils.Config();
expectedConfig.name = 'my config';
expectedConfig.disableDefaultQueries = true;
expectedConfig.additionalQueries.push(fs.realpathSync(tmpDir));
expectedConfig.additionalQueries.push(fs.realpathSync(path.join(tmpDir, 'foo')));
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
expectedConfig.pathsIgnore = ['a', 'b'];
expectedConfig.paths = ['c/d'];
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
const actualConfig = await configUtils.loadConfig();
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
});
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
ava_1.default("load invalid input - " + testName, async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
const inputFile = path.join(tmpDir, 'input');
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
setInput('config-file', 'input');
try {
await configUtils.loadConfig();
throw new Error('loadConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
}
});
});
}
doInvalidInputTest('name invalid type', `
name:
- foo: bar`, configUtils.getNameInvalid);
doInvalidInputTest('disable-default-queries invalid type', `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
doInvalidInputTest('queries invalid type', `queries: foo`, configUtils.getQueriesInvalid);
doInvalidInputTest('paths-ignore invalid type', `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
doInvalidInputTest('paths invalid type', `paths: 17`, configUtils.getPathsInvalid);
doInvalidInputTest('queries uses invalid type', `
queries:
- uses:
- hello: world`, configUtils.getQueryUsesInvalid);
function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) {
// Invalid contents of a "queries.uses" field.
// Should fail with the expected error message
const inputFileContents = `
name: my config
queries:
- name: foo
uses: ` + input;
doInvalidInputTest("queries uses \"" + input + "\"", inputFileContents, expectedErrorMessageGenerator);
}
// Various "uses" fields, and the errors they should produce
doInvalidQueryUsesTest("''", c => configUtils.getQueryUsesInvalid(c, undefined));
doInvalidQueryUsesTest("foo/bar", c => configUtils.getQueryUsesInvalid(c, "foo/bar"));
doInvalidQueryUsesTest("foo/bar@v1@v2", c => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
doInvalidQueryUsesTest("foo@master", c => configUtils.getQueryUsesInvalid(c, "foo@master"));
doInvalidQueryUsesTest("https://github.com/foo/bar@master", c => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
doInvalidQueryUsesTest("./foo", c => configUtils.getLocalPathDoesNotExist(c, "foo"));
doInvalidQueryUsesTest("./..", c => configUtils.getLocalPathOutsideOfRepository(c, ".."));
//# sourceMappingURL=config-utils.test.js.map

File diff suppressed because one or more lines are too long

View File

@@ -11,9 +11,8 @@ const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec")); const exec = __importStar(require("@actions/exec"));
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const util = __importStar(require("./util"));
async function checkoutExternalQueries(config) { async function checkoutExternalQueries(config) {
const folder = util.getRequiredEnvParam('RUNNER_TEMP'); const folder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
for (const externalQuery of config.externalQueries) { for (const externalQuery of config.externalQueries) {
core.info('Checking out ' + externalQuery.repository); core.info('Checking out ' + externalQuery.repository);
const checkoutLocation = path.join(folder, externalQuery.repository); const checkoutLocation = path.join(folder, externalQuery.repository);
@@ -30,4 +29,3 @@ async function checkoutExternalQueries(config) {
} }
} }
exports.checkoutExternalQueries = checkoutExternalQueries; exports.checkoutExternalQueries = checkoutExternalQueries;
//# sourceMappingURL=external-queries.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAG7B,6CAA+B;AAExB,KAAK,UAAU,uBAAuB,CAAC,MAA0B;IACtE,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC;IAEvD,KAAK,MAAM,aAAa,IAAI,MAAM,CAAC,eAAe,EAAE;QAClD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,UAAU,CAAC,CAAC;QAEtD,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,aAAa,CAAC,UAAU,CAAC,CAAC;QACrE,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;YACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,aAAa,CAAC,UAAU,GAAG,MAAM,CAAC;YAC1E,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;YAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;gBACrB,cAAc,GAAG,gBAAgB;gBACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;gBACzC,UAAU,EAAE,aAAa,CAAC,GAAG;aAC9B,CAAC,CAAC;SACJ;QAED,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;KAChF;AACH,CAAC;AAnBD,0DAmBC"}

View File

@@ -1,33 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const configUtils = __importStar(require("./config-utils"));
const externalQueries = __importStar(require("./external-queries"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.silenceDebugOutput(ava_1.default);
ava_1.default("checkoutExternalQueries", async (t) => {
let config = new configUtils.Config();
config.externalQueries = [
new configUtils.ExternalQuery("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b"),
];
await util.withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
await externalQueries.checkoutExternalQueries(config);
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
});
});
//# sourceMappingURL=external-queries.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,4DAA8C;AAC9C,oEAAsD;AACtD,mDAAmD;AACnD,6CAA+B;AAE/B,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACxC,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,eAAe,GAAG;QACvB,IAAI,WAAW,CAAC,aAAa,CAAC,kBAAkB,EAAE,0CAA0C,CAAC;KAC9F,CAAC;IAEF,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QACpC,MAAM,eAAe,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtD,uFAAuF;QACvF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IAC/E,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

93
lib/finalize-db.js generated
View File

@@ -17,25 +17,6 @@ const externalQueries = __importStar(require("./external-queries"));
const sharedEnv = __importStar(require("./shared-environment")); const sharedEnv = __importStar(require("./shared-environment"));
const upload_lib = __importStar(require("./upload-lib")); const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
/**
* A list of queries from https://github.com/github/codeql that
* we don't want to run. Disabling them here is a quicker alternative to
* disabling them in the code scanning query suites. Queries should also
* be disabled in the suites, and removed from this list here once the
* bundle is updated to make those suite changes live.
*
* Format is a map from language to an array of path suffixes of .ql files.
*/
const DISABLED_BUILTIN_QUERIES = {
'csharp': [
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
]
};
function queryIsDisabled(language, query) {
return (DISABLED_BUILTIN_QUERIES[language] || [])
.some(disabledQuery => query.endsWith(disabledQuery));
}
async function createdDBForScannedLanguages(codeqlCmd, databaseFolder) { async function createdDBForScannedLanguages(codeqlCmd, databaseFolder) {
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES]; const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
if (scannedLanguages) { if (scannedLanguages) {
@@ -68,50 +49,26 @@ async function finalizeDatabaseCreation(codeqlCmd, databaseFolder) {
core.endGroup(); core.endGroup();
} }
} }
async function runResolveQueries(codeqlCmd, queries) {
let output = '';
const options = {
listeners: {
stdout: (data) => {
output += data.toString();
}
}
};
await exec.exec(codeqlCmd, [
'resolve',
'queries',
...queries,
'--format=bylanguage'
], options);
return JSON.parse(output);
}
async function resolveQueryLanguages(codeqlCmd, config) { async function resolveQueryLanguages(codeqlCmd, config) {
let res = new Map(); let res = new Map();
if (!config.disableDefaultQueries || config.additionalSuites.length !== 0) {
const suites = [];
for (const language of await util.getLanguages()) {
if (!config.disableDefaultQueries) {
suites.push(language + '-code-scanning.qls');
}
for (const additionalSuite of config.additionalSuites) {
suites.push(language + '-' + additionalSuite + '.qls');
}
}
const resolveQueriesOutputObject = await runResolveQueries(codeqlCmd, suites);
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
if (res[language] === undefined) {
res[language] = [];
}
res[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
}
}
if (config.additionalQueries.length !== 0) { if (config.additionalQueries.length !== 0) {
const resolveQueriesOutputObject = await runResolveQueries(codeqlCmd, config.additionalQueries); let resolveQueriesOutput = '';
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) { const options = {
if (res[language] === undefined) { listeners: {
res[language] = []; stdout: (data) => {
resolveQueriesOutput += data.toString();
}
} }
res[language].push(...Object.keys(queries)); };
await exec.exec(codeqlCmd, [
'resolve',
'queries',
...config.additionalQueries,
'--format=bylanguage'
], options);
const resolveQueriesOutputObject = JSON.parse(resolveQueriesOutput);
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
res[language] = Object.keys(queries);
} }
const noDeclaredLanguage = resolveQueriesOutputObject.noDeclaredLanguage; const noDeclaredLanguage = resolveQueriesOutputObject.noDeclaredLanguage;
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage); const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
@@ -131,27 +88,20 @@ async function runQueries(codeqlCmd, databaseFolder, sarifFolder, config) {
const queriesPerLanguage = await resolveQueryLanguages(codeqlCmd, config); const queriesPerLanguage = await resolveQueryLanguages(codeqlCmd, config);
for (let database of fs.readdirSync(databaseFolder)) { for (let database of fs.readdirSync(databaseFolder)) {
core.startGroup('Analyzing ' + database); core.startGroup('Analyzing ' + database);
const queries = queriesPerLanguage[database] || []; const queries = [];
if (queries.length === 0) { if (!config.disableDefaultQueries) {
throw new Error('Unable to analyse ' + database + ' as no queries were selected for this language'); queries.push(database + '-code-scanning.qls');
} }
// Pass the queries to codeql using a file instead of using the command queries.push(...(queriesPerLanguage[database] || []));
// line to avoid command line length restrictions, particularly on windows.
const querySuite = path.join(databaseFolder, database + '-queries.qls');
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
fs.writeFileSync(querySuite, querySuiteContents);
core.debug('Query suite file for ' + database + '...\n' + querySuiteContents);
const sarifFile = path.join(sarifFolder, database + '.sarif'); const sarifFile = path.join(sarifFolder, database + '.sarif');
await exec.exec(codeqlCmd, [ await exec.exec(codeqlCmd, [
'database', 'database',
'analyze', 'analyze',
util.getMemoryFlag(),
util.getThreadsFlag(),
path.join(databaseFolder, database), path.join(databaseFolder, database),
'--format=sarif-latest', '--format=sarif-latest',
'--output=' + sarifFile, '--output=' + sarifFile,
'--no-sarif-add-snippets', '--no-sarif-add-snippets',
querySuite ...queries
]); ]);
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"'); core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
core.endGroup(); core.endGroup();
@@ -192,4 +142,3 @@ run().catch(e => {
core.setFailed("analyze action failed: " + e); core.setFailed("analyze action failed: " + e);
console.log(e); console.log(e);
}); });
//# sourceMappingURL=finalize-db.js.map

File diff suppressed because one or more lines are too long

View File

@@ -1,2 +0,0 @@
"use strict";
//# sourceMappingURL=finalize-db.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"finalize-db.test.js","sourceRoot":"","sources":["../src/finalize-db.test.ts"],"names":[],"mappings":""}

1
lib/fingerprints.js generated
View File

@@ -245,4 +245,3 @@ function addFingerprints(sarifContents) {
return JSON.stringify(sarif); return JSON.stringify(sarif);
} }
exports.addFingerprints = addFingerprints; exports.addFingerprints = addFingerprints;
//# sourceMappingURL=fingerprints.js.map

File diff suppressed because one or more lines are too long

159
lib/fingerprints.test.js generated
View File

@@ -1,159 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const fingerprints = __importStar(require("./fingerprints"));
const testing_utils_1 = require("./testing-utils");
testing_utils_1.silenceDebugOutput(ava_1.default);
function testHash(t, input, expectedHashes) {
let index = 0;
let callback = function (lineNumber, hash) {
t.is(lineNumber, index + 1);
t.is(hash, expectedHashes[index]);
index++;
};
fingerprints.hash(callback, input);
t.is(index, input.split(/\r\n|\r|\n/).length);
}
ava_1.default('hash', (t) => {
// Try empty file
testHash(t, "", ["c129715d7a2bc9a3:1"]);
// Try various combinations of newline characters
testHash(t, " a\nb\n \t\tc\n d", [
"271789c17abda88f:1",
"54703d4cd895b18:1",
"180aee12dab6264:1",
"a23a3dc5e078b07b:1"
]);
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
"8b7cf3e952e7aeb2:1",
"b1ae1287ec4718d9:1",
"bff680108adb0fcc:1",
"c6805c5e1288b612:1",
"b86d3392aea1be30:1",
"e6ceba753e1a442:1",
]);
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End\n", [
"e9496ae3ebfced30:1",
"fb7c023a8b9ccb3f:1",
"ce8ba1a563dcdaca:1",
"e20e36e16fcb0cc8:1",
"b3edc88f2938467e:1",
"c8e28b0b4002a3a0:1",
"c129715d7a2bc9a3:1",
]);
testHash(t, " hello; \t\nworld!!!\r\r\r \t\tGreetings\r End\r", [
"e9496ae3ebfced30:1",
"fb7c023a8b9ccb3f:1",
"ce8ba1a563dcdaca:1",
"e20e36e16fcb0cc8:1",
"b3edc88f2938467e:1",
"c8e28b0b4002a3a0:1",
"c129715d7a2bc9a3:1",
]);
testHash(t, " hello; \t\r\nworld!!!\r\n\r\n\r\n \t\tGreetings\r\n End\r\n", [
"e9496ae3ebfced30:1",
"fb7c023a8b9ccb3f:1",
"ce8ba1a563dcdaca:1",
"e20e36e16fcb0cc8:1",
"b3edc88f2938467e:1",
"c8e28b0b4002a3a0:1",
"c129715d7a2bc9a3:1",
]);
testHash(t, " hello; \t\nworld!!!\r\n\n\r \t\tGreetings\r End\r\n", [
"e9496ae3ebfced30:1",
"fb7c023a8b9ccb3f:1",
"ce8ba1a563dcdaca:1",
"e20e36e16fcb0cc8:1",
"b3edc88f2938467e:1",
"c8e28b0b4002a3a0:1",
"c129715d7a2bc9a3:1",
]);
// Try repeating line that will generate identical hashes
testHash(t, "Lorem ipsum dolor sit amet.\n".repeat(10), [
"a7f2ff13bc495cf2:1",
"a7f2ff13bc495cf2:2",
"a7f2ff13bc495cf2:3",
"a7f2ff13bc495cf2:4",
"a7f2ff13bc495cf2:5",
"a7f2ff13bc495cf2:6",
"a7f2ff1481e87703:1",
"a9cf91f7bbf1862b:1",
"55ec222b86bcae53:1",
"cc97dc7b1d7d8f7b:1",
"c129715d7a2bc9a3:1"
]);
});
function testResolveUriToFile(uri, index, artifactsURIs) {
const location = { "uri": uri, "index": index };
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
return fingerprints.resolveUriToFile(location, artifacts);
}
ava_1.default('resolveUriToFile', t => {
// The resolveUriToFile method checks that the file exists and is in the right directory
// so we need to give it real files to look at. We will use this file as an example.
// For this to work we require the current working directory to be a parent, but this
// should generally always be the case so this is fine.
const cwd = process.cwd();
const filepath = __filename;
t.true(filepath.startsWith(cwd + '/'));
const relativeFilepaht = filepath.substring(cwd.length + 1);
process.env['GITHUB_WORKSPACE'] = cwd;
// Absolute paths are unmodified
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
// Relative paths are made absolute
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
t.is(testResolveUriToFile('file://' + relativeFilepaht, undefined, []), filepath);
// Absolute paths outside the src root are discarded
t.is(testResolveUriToFile('/src/foo/bar.js', undefined, []), undefined);
t.is(testResolveUriToFile('file:///src/foo/bar.js', undefined, []), undefined);
// Other schemes are discarded
t.is(testResolveUriToFile('https://' + filepath, undefined, []), undefined);
t.is(testResolveUriToFile('ftp://' + filepath, undefined, []), undefined);
// Invalid URIs are discarded
t.is(testResolveUriToFile(1, undefined, []), undefined);
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
// Non-existant files are discarded
t.is(testResolveUriToFile(filepath + '2', undefined, []), undefined);
// Index is resolved
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
t.is(testResolveUriToFile(undefined, 1, ['foo', filepath]), filepath);
// Invalid indexes are discarded
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
t.is(testResolveUriToFile(undefined, '0', [filepath]), undefined);
});
ava_1.default('addFingerprints', t => {
// Run an end-to-end test on a test file
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.input.sarif').toString();
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.expected.sarif').toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
t.deepEqual(fingerprints.addFingerprints(input), expected);
});
ava_1.default('missingRegions', t => {
// Run an end-to-end test on a test file
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.input.sarif').toString();
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.expected.sarif').toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
t.deepEqual(fingerprints.addFingerprints(input), expected);
});
//# sourceMappingURL=fingerprints.test.js.map

File diff suppressed because one or more lines are too long

47
lib/setup-tools.js generated
View File

@@ -10,7 +10,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core")); const core = __importStar(require("@actions/core"));
const toolcache = __importStar(require("@actions/tool-cache")); const toolcache = __importStar(require("@actions/tool-cache"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const semver = __importStar(require("semver"));
class CodeQLSetup { class CodeQLSetup {
constructor(codeqlDist) { constructor(codeqlDist) {
this.dist = codeqlDist; this.dist = codeqlDist;
@@ -20,7 +19,7 @@ class CodeQLSetup {
if (process.platform === 'win32') { if (process.platform === 'win32') {
this.platform = 'win64'; this.platform = 'win64';
if (this.cmd.endsWith('codeql')) { if (this.cmd.endsWith('codeql')) {
this.cmd += ".exe"; this.cmd += ".cmd";
} }
} }
else if (process.platform === 'linux') { else if (process.platform === 'linux') {
@@ -36,41 +35,17 @@ class CodeQLSetup {
} }
exports.CodeQLSetup = CodeQLSetup; exports.CodeQLSetup = CodeQLSetup;
async function setupCodeQL() { async function setupCodeQL() {
try { const version = '1.0.0';
const codeqlURL = core.getInput('tools', { required: true }); const codeqlURL = core.getInput('tools', { required: true });
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL); let codeqlFolder = toolcache.find('CodeQL', version);
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion); if (codeqlFolder) {
if (codeqlFolder) { core.debug(`CodeQL found in cache ${codeqlFolder}`);
core.debug(`CodeQL found in cache ${codeqlFolder}`);
}
else {
const codeqlPath = await toolcache.downloadTool(codeqlURL);
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
}
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
} }
catch (e) { else {
core.error(e); const codeqlPath = await toolcache.downloadTool(codeqlURL);
throw new Error("Unable to download and extract CodeQL CLI"); const codeqlExtracted = await toolcache.extractTar(codeqlPath);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', version);
} }
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
} }
exports.setupCodeQL = setupCodeQL; exports.setupCodeQL = setupCodeQL;
function getCodeQLURLVersion(url) {
const match = url.match(/\/codeql-bundle-(.*)\//);
if (match === null || match.length < 2) {
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
}
let version = match[1];
if (!semver.valid(version)) {
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
version = '0.0.0-' + version;
}
const s = semver.clean(version);
if (!s) {
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
}
return s;
}
exports.getCodeQLURLVersion = getCodeQLURLVersion;
//# sourceMappingURL=setup-tools.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"setup-tools.js","sourceRoot":"","sources":["../src/setup-tools.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,+DAAiD;AACjD,2CAA6B;AAC7B,+CAAiC;AAEjC,MAAa,WAAW;IAMtB,YAAY,UAAkB;QAC5B,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAC3C,4BAA4B;QAC5B,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;YACxB,IAAI,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBAC/B,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC;aACpB;SACF;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YACvC,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;SAC3B;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACxC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;SACzB;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;SAC7D;IACH,CAAC;CACF;AAxBD,kCAwBC;AAEM,KAAK,UAAU,WAAW;IAC/B,IAAI;QACF,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;QAC7D,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAExD,IAAI,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,gBAAgB,CAAC,CAAC;QAC9D,IAAI,YAAY,EAAE;YAChB,IAAI,CAAC,KAAK,CAAC,yBAAyB,YAAY,EAAE,CAAC,CAAC;SACrD;aAAM;YACL,MAAM,UAAU,GAAG,MAAM,SAAS,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,eAAe,GAAG,MAAM,SAAS,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC/D,YAAY,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,eAAe,EAAE,QAAQ,EAAE,gBAAgB,CAAC,CAAC;SACtF;QACD,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAE3D;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACd,MAAM,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC;KAC9D;AACH,CAAC;AAnBD,kCAmBC;AAED,SAAgB,mBAAmB,CAAC,GAAW;IAE7C,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,CAAC,wBAAwB,CAAC,CAAC;IAClD,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,wBAAwB,GAAG,iCAAiC,CAAC,CAAC;KAC/E;IAED,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IAEvB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE;QAC1B,IAAI,CAAC,KAAK,CAAC,kBAAkB,OAAO,gEAAgE,OAAO,GAAG,CAAC,CAAC;QAChH,OAAO,GAAG,QAAQ,GAAG,OAAO,CAAC;KAC9B;IAED,MAAM,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;IAChC,IAAI,CAAC,CAAC,EAAE;QACN,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,iDAAiD,OAAO,UAAU,CAAC,CAAC;KAC/G;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AApBD,kDAoBC"}

View File

@@ -1,60 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolcache = __importStar(require("@actions/tool-cache"));
const ava_1 = __importDefault(require("ava"));
const nock_1 = __importDefault(require("nock"));
const path = __importStar(require("path"));
const setupTools = __importStar(require("./setup-tools"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.silenceDebugOutput(ava_1.default);
ava_1.default('download codeql bundle cache', async (t) => {
await util.withTmpDir(async (tmpDir) => {
process.env['GITHUB_WORKSPACE'] = tmpDir;
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
const versions = ['20200601', '20200610'];
for (let i = 0; i < versions.length; i++) {
const version = versions[i];
nock_1.default('https://example.com')
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
await setupTools.setupCodeQL();
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
}
const cachedVersions = toolcache.findAllVersions('CodeQL');
t.is(cachedVersions.length, 2);
});
});
ava_1.default('parse codeql bundle url version', t => {
const tests = {
'20200601': '0.0.0-20200601',
'20200601.0': '0.0.0-20200601.0',
'20200601.0.0': '20200601.0.0',
'1.2.3': '1.2.3',
'1.2.3-alpha': '1.2.3-alpha',
'1.2.3-beta.1': '1.2.3-beta.1',
};
for (const [version, expectedVersion] of Object.entries(tests)) {
const url = `https://github.com/.../codeql-bundle-${version}/...`;
try {
const parsedVersion = setupTools.getCodeQLURLVersion(url);
t.deepEqual(parsedVersion, expectedVersion);
}
catch (e) {
t.fail(e.message);
}
}
});
//# sourceMappingURL=setup-tools.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"setup-tools.test.js","sourceRoot":"","sources":["../src/setup-tools.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,0DAA4C;AAC5C,mDAAmD;AACnD,6CAA+B;AAE/B,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,UAAU,CAAC,WAAW,EAAE,CAAC;YAE/B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,UAAU,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YAC1D,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC"}

44
lib/setup-tracer.js generated
View File

@@ -100,13 +100,12 @@ function concatTracerConfigs(configs) {
totalCount += count; totalCount += count;
totalLines.push(...lines.slice(2)); totalLines.push(...lines.slice(2));
} }
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP'); const newLogFilePath = path.resolve(util.workspaceFolder(), 'compound-build-tracer.log');
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log'); const spec = path.resolve(util.workspaceFolder(), 'compound-spec');
const spec = path.resolve(tempFolder, 'compound-spec'); const tempFolder = path.resolve(util.workspaceFolder(), 'compound-temp');
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines]; const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
if (copyExecutables) { if (copyExecutables) {
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder; env['SEMMLE_COPY_EXECUTABLES_ROOT'] = tempFolder;
envSize += 1; envSize += 1;
} }
fs.writeFileSync(spec, newSpecContent.join('\n')); fs.writeFileSync(spec, newSpecContent.join('\n'));
@@ -127,28 +126,22 @@ function concatTracerConfigs(configs) {
return { env, spec }; return { env, spec };
} }
async function run() { async function run() {
let languages;
try { try {
if (util.should_abort('init', false) || !await util.reportActionStarting('init')) { if (util.should_abort('init', false) || !await util.reportActionStarting('init')) {
return; return;
} }
core.startGroup('Load language configuration'); // The config file MUST be parsed in the init action
const config = await configUtils.loadConfig(); const config = await configUtils.loadConfig();
languages = await util.getLanguages(); core.startGroup('Load language configuration');
const languages = await util.getLanguages();
// If the languages parameter was not given and no languages were // If the languages parameter was not given and no languages were
// detected then fail here as this is a workflow configuration error. // detected then fail here as this is a workflow configuration error.
if (languages.length === 0) { if (languages.length === 0) {
throw new Error("Did not detect any languages to analyze. Please update input in workflow."); core.setFailed("Did not detect any languages to analyze. Please update input in workflow.");
return;
} }
analysisPaths.includeAndExcludeAnalysisPaths(config, languages);
core.endGroup(); core.endGroup();
} analysisPaths.includeAndExcludeAnalysisPaths(config, languages);
catch (e) {
core.setFailed(e.message);
await util.reportActionAborted('init', e.message);
return;
}
try {
const sourceRoot = path.resolve(); const sourceRoot = path.resolve();
core.startGroup('Setup CodeQL tools'); core.startGroup('Setup CodeQL tools');
const codeqlSetup = await setuptools.setupCodeQL(); const codeqlSetup = await setuptools.setupCodeQL();
@@ -163,7 +156,7 @@ async function run() {
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935) // Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
const codeqlRam = process.env['CODEQL_RAM'] || '6500'; const codeqlRam = process.env['CODEQL_RAM'] || '6500';
core.exportVariable('CODEQL_RAM', codeqlRam); core.exportVariable('CODEQL_RAM', codeqlRam);
const databaseFolder = path.resolve(util.getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases'); const databaseFolder = path.resolve(util.workspaceFolder(), 'codeql_databases');
await io.mkdirP(databaseFolder); await io.mkdirP(databaseFolder);
let tracedLanguages = {}; let tracedLanguages = {};
let scannedLanguages = []; let scannedLanguages = [];
@@ -171,13 +164,7 @@ async function run() {
for (let language of languages) { for (let language of languages) {
const languageDatabase = path.join(databaseFolder, language); const languageDatabase = path.join(databaseFolder, language);
// Init language database // Init language database
await exec.exec(codeqlSetup.cmd, [ await exec.exec(codeqlSetup.cmd, ['database', 'init', languageDatabase, '--language=' + language, '--source-root=' + sourceRoot]);
'database',
'init',
languageDatabase,
'--language=' + language,
'--source-root=' + sourceRoot,
]);
// TODO: add better detection of 'traced languages' instead of using a hard coded list // TODO: add better detection of 'traced languages' instead of using a hard coded list
if (['cpp', 'java', 'csharp'].includes(language)) { if (['cpp', 'java', 'csharp'].includes(language)) {
const config = await tracerConfig(codeqlSetup, languageDatabase); const config = await tracerConfig(codeqlSetup, languageDatabase);
@@ -199,10 +186,8 @@ async function run() {
core.exportVariable('DYLD_INSERT_LIBRARIES', path.join(codeqlSetup.tools, 'osx64', 'libtrace.dylib')); core.exportVariable('DYLD_INSERT_LIBRARIES', path.join(codeqlSetup.tools, 'osx64', 'libtrace.dylib'));
} }
else if (process.platform === 'win32') { else if (process.platform === 'win32') {
await exec.exec('powershell', [ await exec.exec('powershell', [path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'), path.resolve(codeqlSetup.tools, 'win64', 'tracer.exe')], { env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
path.resolve(codeqlSetup.tools, 'win64', 'tracer.exe'),
], { env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
} }
else { else {
core.exportVariable('LD_PRELOAD', path.join(codeqlSetup.tools, 'linux64', '${LIB}trace.so')); core.exportVariable('LD_PRELOAD', path.join(codeqlSetup.tools, 'linux64', '${LIB}trace.so'));
@@ -227,4 +212,3 @@ run().catch(e => {
core.setFailed("init action failed: " + e); core.setFailed("init action failed: " + e);
console.log(e); console.log(e);
}); });
//# sourceMappingURL=setup-tracer.js.map

File diff suppressed because one or more lines are too long

View File

@@ -3,7 +3,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
exports.CODEQL_ACTION_CMD = 'CODEQL_ACTION_CMD'; exports.CODEQL_ACTION_CMD = 'CODEQL_ACTION_CMD';
exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR'; exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
exports.CODEQL_ACTION_LANGUAGES = 'CODEQL_ACTION_LANGUAGES'; exports.CODEQL_ACTION_LANGUAGES = 'CODEQL_ACTION_LANGUAGES';
exports.CODEQL_ACTION_ANALYSIS_KEY = 'CODEQL_ACTION_ANALYSIS_KEY';
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION'; exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES'; exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES'; exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
@@ -15,4 +14,3 @@ exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT'; exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
// Populated when the init action completes successfully // Populated when the init action completes successfully
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED'; exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
//# sourceMappingURL=shared-environment.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,iBAAiB,GAAG,mBAAmB,CAAC;AACxC,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,uBAAuB,GAAG,yBAAyB,CAAC;AACpD,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,+BAA+B,GAAG,iCAAiC,CAAC;AACpE,QAAA,8BAA8B,GAAG,gCAAgC,CAAC;AAC/E,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,wBAAwB,GAAG,0BAA0B,CAAC;AACnE,wDAAwD;AAC3C,QAAA,4BAA4B,GAAG,8BAA8B,CAAC"}

22
lib/test-utils.js generated
View File

@@ -1,22 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function silenceDebugOutput(test) {
const typedTest = test;
typedTest.beforeEach(t => {
const processStdoutWrite = process.stdout.write.bind(process.stdout);
t.context.write = processStdoutWrite;
process.stdout.write = (str, encoding, cb) => {
// Core library will directly call process.stdout.write for commands
// We don't want :: commands to be executed by the runner during tests
if (!str.match(/^::/)) {
processStdoutWrite(str, encoding, cb);
}
return true;
};
});
typedTest.afterEach(t => {
process.stdout.write = t.context.write;
});
}
exports.silenceDebugOutput = silenceDebugOutput;
//# sourceMappingURL=test-utils.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"test-utils.js","sourceRoot":"","sources":["../src/test-utils.ts"],"names":[],"mappings":";;AAEA,SAAgB,kBAAkB,CAAC,IAAwB;IACzD,MAAM,SAAS,GAAG,IAAmC,CAAC;IAEtD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACrB,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,KAAK,GAAG,kBAAkB,CAAC;QACrC,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,GAAQ,EAAE,QAAc,EAAE,EAA0B,EAAE,EAAE;YAC5E,oEAAoE;YACpE,sEAAsE;YACtE,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACnB,kBAAkB,CAAC,GAAG,EAAE,QAAQ,EAAE,EAAE,CAAC,CAAC;aACzC;YACD,OAAO,IAAI,CAAC;QAChB,CAAC,CAAC;IACN,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE;QACpB,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC;IAC3C,CAAC,CAAC,CAAC;AACL,CAAC;AAnBD,gDAmBC"}

48
lib/testing-utils.js generated
View File

@@ -1,48 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function wrapOutput(context) {
// Function signature taken from Socket.write.
// Note there are two overloads:
// write(buffer: Uint8Array | string, cb?: (err?: Error) => void): boolean;
// write(str: Uint8Array | string, encoding?: string, cb?: (err?: Error) => void): boolean;
return (chunk, encoding, cb) => {
// Work out which method overload we are in
if (cb === undefined && typeof encoding === 'function') {
cb = encoding;
encoding = undefined;
}
// Record the output
if (typeof chunk === 'string') {
context.testOutput += chunk;
}
else {
context.testOutput += new TextDecoder(encoding || 'utf-8').decode(chunk);
}
// Satisfy contract by calling callback when done
if (cb !== undefined && typeof cb === 'function') {
cb();
}
return true;
};
}
function silenceDebugOutput(test) {
const typedTest = test;
typedTest.beforeEach(t => {
t.context.testOutput = "";
const processStdoutWrite = process.stdout.write.bind(process.stdout);
t.context.stdoutWrite = processStdoutWrite;
process.stdout.write = wrapOutput(t.context);
const processStderrWrite = process.stderr.write.bind(process.stderr);
t.context.stderrWrite = processStderrWrite;
process.stderr.write = wrapOutput(t.context);
});
typedTest.afterEach.always(t => {
process.stdout.write = t.context.stdoutWrite;
process.stderr.write = t.context.stderrWrite;
if (!t.passed) {
process.stdout.write(t.context.testOutput);
}
});
}
exports.silenceDebugOutput = silenceDebugOutput;
//# sourceMappingURL=testing-utils.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;AAIA,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CAAC,KAA0B,EAAE,QAAiB,EAAE,EAA0B,EAAW,EAAE;QAC5F,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,kBAAkB,CAAC,IAAwB;IACzD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACvB,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAE1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;IACtD,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;QAC7B,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAE7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAvBD,gDAuBC"}

1
lib/tracer-env.js generated
View File

@@ -18,4 +18,3 @@ for (let entry of Object.entries(process.env)) {
} }
process.stdout.write(process.argv[2]); process.stdout.write(process.argv[2]);
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8'); fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');
//# sourceMappingURL=tracer-env.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"tracer-env.js","sourceRoot":"","sources":["../src/tracer-env.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AAEzB,MAAM,GAAG,GAAG,EAAE,CAAC;AACf,KAAK,IAAI,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;IAC7C,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACrB,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACvB,IAAI,OAAO,KAAK,KAAK,WAAW,IAAI,GAAG,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;QACtF,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KAClB;CACF;AACD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,CAAC"}

113
lib/upload-lib.js generated
View File

@@ -11,15 +11,28 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core")); const core = __importStar(require("@actions/core"));
const http = __importStar(require("@actions/http-client"));
const auth = __importStar(require("@actions/http-client/auth"));
const io = __importStar(require("@actions/io"));
const file_url_1 = __importDefault(require("file-url")); const file_url_1 = __importDefault(require("file-url"));
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const jsonschema = __importStar(require("jsonschema"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const zlib_1 = __importDefault(require("zlib")); const zlib_1 = __importDefault(require("zlib"));
const api = __importStar(require("./api-client"));
const fingerprints = __importStar(require("./fingerprints")); const fingerprints = __importStar(require("./fingerprints"));
const sharedEnv = __importStar(require("./shared-environment")); const sharedEnv = __importStar(require("./shared-environment"));
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
// Construct the location of the sentinel file for detecting multiple uploads.
// The returned location should be writable.
async function getSentinelFilePath() {
// Use the temp dir instead of placing next to the sarif file because of
// issues with docker actions. The directory containing the sarif file
// may not be writable by us.
const uploadsTmpDir = path.join(process.env['RUNNER_TEMP'] || '/tmp/codeql-action', 'uploads');
await io.mkdirP(uploadsTmpDir);
// Hash the absolute path so we'll behave correctly in the unlikely
// scenario a file is referenced twice with different paths.
return path.join(uploadsTmpDir, 'codeql-action-upload-sentinel');
}
// Takes a list of paths to sarif files and combines them together, // Takes a list of paths to sarif files and combines them together,
// returning the contents of the combined sarif file. // returning the contents of the combined sarif file.
function combineSarifFiles(sarifFiles) { function combineSarifFiles(sarifFiles) {
@@ -45,33 +58,27 @@ exports.combineSarifFiles = combineSarifFiles;
// If the request fails then this will retry a small number of times. // If the request fails then this will retry a small number of times.
async function uploadPayload(payload) { async function uploadPayload(payload) {
core.info('Uploading results'); core.info('Uploading results');
// If in test mode we don't want to upload the results const githubToken = core.getInput('token');
const testMode = process.env['TEST_MODE'] === 'true' || false; const ph = new auth.BearerCredentialHandler(githubToken);
if (testMode) { const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
return true; const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY'] + '/code-scanning/analysis';
}
const [owner, repo] = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
// Make up to 4 attempts to upload, and sleep for these // Make up to 4 attempts to upload, and sleep for these
// number of seconds between each attempt. // number of seconds between each attempt.
// We don't want to backoff too much to avoid wasting action // We don't want to backoff too much to avoid wasting action
// minutes, but just waiting a little bit could maybe help. // minutes, but just waiting a little bit could maybe help.
const backoffPeriods = [1, 5, 15]; const backoffPeriods = [1, 5, 15];
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) { for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
const response = await api.client.request("PUT /repos/:owner/:repo/code-scanning/analysis", ({ const res = await client.put(url, payload);
owner: owner, core.debug('response status: ' + res.message.statusCode);
repo: repo, const statusCode = res.message.statusCode;
data: payload,
}));
core.debug('response status: ' + response.status);
const statusCode = response.status;
if (statusCode === 202) { if (statusCode === 202) {
core.info("Successfully uploaded results"); core.info("Successfully uploaded results");
return true; return true;
} }
const requestID = response.headers["x-github-request-id"]; const requestID = res.message.headers["x-github-request-id"];
// On any other status code that's not 5xx mark the upload as failed // On any other status code that's not 5xx mark the upload as failed
if (!statusCode || statusCode < 500 || statusCode >= 600) { if (!statusCode || statusCode < 500 || statusCode >= 600) {
core.setFailed('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data)); core.setFailed('Upload failed (' + requestID + '): (' + statusCode + ') ' + await res.readBody());
return false; return false;
} }
// On a 5xx status code we may retry the request // On a 5xx status code we may retry the request
@@ -79,7 +86,7 @@ async function uploadPayload(payload) {
// Log the failure as a warning but don't mark the action as failed yet // Log the failure as a warning but don't mark the action as failed yet
core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) + core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] + ') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
' seconds: (' + statusCode + ') ' + JSON.stringify(response.data)); ' seconds: (' + statusCode + ') ' + await res.readBody());
// Sleep for the backoff period // Sleep for the backoff period
await new Promise(r => setTimeout(r, backoffPeriods[attempt] * 1000)); await new Promise(r => setTimeout(r, backoffPeriods[attempt] * 1000));
continue; continue;
@@ -88,7 +95,7 @@ async function uploadPayload(payload) {
// If the upload fails with 5xx then we assume it is a temporary problem // If the upload fails with 5xx then we assume it is a temporary problem
// and not an error that the user has caused or can fix. // and not an error that the user has caused or can fix.
// We avoid marking the job as failed to avoid breaking CI workflows. // We avoid marking the job as failed to avoid breaking CI workflows.
core.error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data)); core.error('Upload failed (' + requestID + '): (' + statusCode + ') ' + await res.readBody());
return false; return false;
} }
} }
@@ -113,63 +120,25 @@ async function upload(input) {
} }
} }
exports.upload = upload; exports.upload = upload;
// Counts the number of results in the given SARIF file
function countResultsInSarif(sarif) {
let numResults = 0;
for (const run of JSON.parse(sarif).runs) {
numResults += run.results.length;
}
return numResults;
}
exports.countResultsInSarif = countResultsInSarif;
// Validates that the given file path refers to a valid SARIF file.
// Returns a non-empty list of error message if the file is invalid,
// otherwise returns the empty list if the file is valid.
function validateSarifFileSchema(sarifFilePath) {
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
const result = new jsonschema.Validator().validate(sarif, schema);
if (result.valid) {
return true;
}
else {
// Set the failure message to the stacks of all the errors.
// This should be of a manageable size and may even give enough to fix the error.
const errorMessages = result.errors.map(e => "- " + e.stack);
core.setFailed("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + errorMessages.join("\n"));
// Also output the more verbose error messages in groups as these may be very large.
for (const error of result.errors) {
core.startGroup("Error details: " + error.stack);
core.info(JSON.stringify(error, null, 2));
core.endGroup();
}
return false;
}
}
exports.validateSarifFileSchema = validateSarifFileSchema;
// Uploads the given set of sarif files. // Uploads the given set of sarif files.
// Returns true iff the upload occurred and succeeded // Returns true iff the upload occurred and succeeded
async function uploadFiles(sarifFiles) { async function uploadFiles(sarifFiles) {
core.startGroup("Uploading results"); core.startGroup("Uploading results");
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles)); let succeeded = false;
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF"; // Check if an upload has happened before. If so then abort.
if (process.env[sentinelEnvVar]) { // This is intended to catch when the finish and upload-sarif actions
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job"); // are used together, and then the upload-sarif action is invoked twice.
const sentinelFile = await getSentinelFilePath();
if (fs.existsSync(sentinelFile)) {
core.info("Aborting as an upload has already happened from this job");
return false; return false;
} }
core.exportVariable(sentinelEnvVar, sentinelEnvVar); const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
// Validate that the files we were asked to upload are all valid SARIF files
for (const file of sarifFiles) {
if (!validateSarifFileSchema(file)) {
return false;
}
}
const commitOid = await util.getCommitOid();
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID'); const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
const ref = util.getRef(); const ref = util.getRequiredEnvParam('GITHUB_REF'); // it's in the form "refs/heads/master"
const analysisKey = await util.getAnalysisKey();
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW'); const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT]; const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
let sarifPayload = combineSarifFiles(sarifFiles); let sarifPayload = combineSarifFiles(sarifFiles);
sarifPayload = fingerprints.addFingerprints(sarifPayload); sarifPayload = fingerprints.addFingerprints(sarifPayload);
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64'); const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
@@ -188,7 +157,6 @@ async function uploadFiles(sarifFiles) {
const payload = JSON.stringify({ const payload = JSON.stringify({
"commit_oid": commitOid, "commit_oid": commitOid,
"ref": ref, "ref": ref,
"analysis_key": analysisKey,
"analysis_name": analysisName, "analysis_name": analysisName,
"sarif": zipped_sarif, "sarif": zipped_sarif,
"workflow_run_id": workflowRunID, "workflow_run_id": workflowRunID,
@@ -197,13 +165,10 @@ async function uploadFiles(sarifFiles) {
"started_at": startedAt, "started_at": startedAt,
"tool_names": toolNames, "tool_names": toolNames,
}); });
// Log some useful debug info about the info
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
// Make the upload // Make the upload
const succeeded = await uploadPayload(payload); succeeded = await uploadPayload(payload);
// Mark that we have made an upload
fs.writeFileSync(sentinelFile, '');
core.endGroup(); core.endGroup();
return succeeded; return succeeded;
} }
//# sourceMappingURL=upload-lib.js.map

File diff suppressed because one or more lines are too long

27
lib/upload-lib.test.js generated
View File

@@ -1,27 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const testing_utils_1 = require("./testing-utils");
const uploadLib = __importStar(require("./upload-lib"));
testing_utils_1.silenceDebugOutput(ava_1.default);
ava_1.default('validateSarifFileSchema - valid', t => {
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
t.true(uploadLib.validateSarifFileSchema(inputFile));
});
ava_1.default('validateSarifFileSchema - invalid', t => {
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
t.false(uploadLib.validateSarifFileSchema(inputFile));
// validateSarifFileSchema calls core.setFailed which sets the exit code on error
process.exitCode = 0;
});
//# sourceMappingURL=upload-lib.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,mDAAmD;AACnD,wDAA0C;AAE1C,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;IACtD,iFAAiF;IACjF,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC"}

1
lib/upload-sarif.js generated
View File

@@ -32,4 +32,3 @@ run().catch(e => {
core.setFailed("codeql/upload-sarif action failed: " + e); core.setFailed("codeql/upload-sarif action failed: " + e);
console.log(e); console.log(e);
}); });
//# sourceMappingURL=upload-sarif.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"upload-sarif.js","sourceRoot":"","sources":["../src/upload-sarif.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,yDAA2C;AAC3C,6CAA+B;AAE/B,KAAK,UAAU,GAAG;IAChB,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,KAAK,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,cAAc,CAAC,EAAE;QAChG,OAAO;KACR;IAED,IAAI;QACF,IAAI,MAAM,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE;YACxD,MAAM,IAAI,CAAC,qBAAqB,CAAC,cAAc,CAAC,CAAC;SAClD;aAAM;YACL,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;SACzD;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QAC1E,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

218
lib/util.js generated
View File

@@ -6,13 +6,16 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod; result["default"] = mod;
return result; return result;
}; };
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core")); const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec")); const http = __importStar(require("@actions/http-client"));
const fs = __importStar(require("fs")); const auth = __importStar(require("@actions/http-client/auth"));
const os = __importStar(require("os")); const octokit = __importStar(require("@octokit/rest"));
const console_log_level_1 = __importDefault(require("console-log-level"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const api = __importStar(require("./api-client"));
const sharedEnv = __importStar(require("./shared-environment")); const sharedEnv = __importStar(require("./shared-environment"));
/** /**
* Should the current action be aborted? * Should the current action be aborted?
@@ -28,6 +31,12 @@ function should_abort(actionName, requireInitActionHasRun) {
core.setFailed('GITHUB_REF must be set.'); core.setFailed('GITHUB_REF must be set.');
return true; return true;
} }
// Should abort if called on a merge commit for a pull request.
if (ref.startsWith('refs/pull/')) {
core.warning('The CodeQL ' + actionName + ' action is intended for workflows triggered on `push` events, '
+ 'but the current workflow is running on a pull request. Aborting.');
return true;
}
// If the init action is required, then check the it completed successfully. // If the init action is required, then check the it completed successfully.
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) { if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.'); core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
@@ -36,12 +45,22 @@ function should_abort(actionName, requireInitActionHasRun) {
return false; return false;
} }
exports.should_abort = should_abort; exports.should_abort = should_abort;
/**
* Resolve the path to the workspace folder.
*/
function workspaceFolder() {
let workspaceFolder = process.env['RUNNER_WORKSPACE'];
if (!workspaceFolder)
workspaceFolder = path.resolve('..');
return workspaceFolder;
}
exports.workspaceFolder = workspaceFolder;
/** /**
* Get an environment parameter, but throw an error if it is not set. * Get an environment parameter, but throw an error if it is not set.
*/ */
function getRequiredEnvParam(paramName) { function getRequiredEnvParam(paramName) {
const value = process.env[paramName]; const value = process.env[paramName];
if (value === undefined || value.length === 0) { if (value === undefined) {
throw new Error(paramName + ' environment variable must be set'); throw new Error(paramName + ' environment variable must be set');
} }
core.debug(paramName + '=' + value); core.debug(paramName + '=' + value);
@@ -69,7 +88,12 @@ async function getLanguagesInRepo() {
let owner = repo_nwo[0]; let owner = repo_nwo[0];
let repo = repo_nwo[1]; let repo = repo_nwo[1];
core.debug(`GitHub repo ${owner} ${repo}`); core.debug(`GitHub repo ${owner} ${repo}`);
const response = await api.client.request("GET /repos/:owner/:repo/languages", ({ let ok = new octokit.Octokit({
auth: core.getInput('token'),
userAgent: "CodeQL Action",
log: console_log_level_1.default({ level: "debug" })
});
const response = await ok.request("GET /repos/:owner/:repo/languages", ({
owner, owner,
repo repo
})); }));
@@ -125,90 +149,6 @@ async function getLanguages() {
return languages; return languages;
} }
exports.getLanguages = getLanguages; exports.getLanguages = getLanguages;
/**
* Gets the SHA of the commit that is currently checked out.
*/
async function getCommitOid() {
// Try to use git to get the current commit SHA. If that fails then
// log but otherwise silently fall back to using the SHA from the environment.
// The only time these two values will differ is during analysis of a PR when
// the workflow has changed the current commit to the head commit instead of
// the merge commit, which must mean that git is available.
// Even if this does go wrong, it's not a huge problem for the alerts to
// reported on the merge commit.
try {
let commitOid = '';
await exec.exec('git', ['rev-parse', 'HEAD'], {
silent: true,
listeners: {
stdout: (data) => { commitOid += data.toString(); },
stderr: (data) => { process.stderr.write(data); }
}
});
return commitOid.trim();
}
catch (e) {
core.info("Failed to call git to get current commit. Continuing with data from environment: " + e);
return getRequiredEnvParam('GITHUB_SHA');
}
}
exports.getCommitOid = getCommitOid;
/**
* Get the path of the currently executing workflow.
*/
async function getWorkflowPath() {
const repo_nwo = getRequiredEnvParam('GITHUB_REPOSITORY').split("/");
const owner = repo_nwo[0];
const repo = repo_nwo[1];
const run_id = Number(getRequiredEnvParam('GITHUB_RUN_ID'));
const runsResponse = await api.client.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
owner,
repo,
run_id
});
const workflowUrl = runsResponse.data.workflow_url;
const workflowResponse = await api.client.request('GET ' + workflowUrl);
return workflowResponse.data.path;
}
/**
* Get the analysis key paramter for the current job.
*
* This will combine the workflow path and current job name.
* Computing this the first time requires making requests to
* the github API, but after that the result will be cached.
*/
async function getAnalysisKey() {
let analysisKey = process.env[sharedEnv.CODEQL_ACTION_ANALYSIS_KEY];
if (analysisKey !== undefined) {
return analysisKey;
}
const workflowPath = await getWorkflowPath();
const jobName = getRequiredEnvParam('GITHUB_JOB');
analysisKey = workflowPath + ':' + jobName;
core.exportVariable(sharedEnv.CODEQL_ACTION_ANALYSIS_KEY, analysisKey);
return analysisKey;
}
exports.getAnalysisKey = getAnalysisKey;
/**
* Get the ref currently being analyzed.
*/
function getRef() {
// Will be in the form "refs/heads/master" on a push event
// or in the form "refs/pull/N/merge" on a pull_request event
const ref = getRequiredEnvParam('GITHUB_REF');
// For pull request refs we want to convert from the 'merge' ref
// to the 'head' ref, as that is what we want to analyse.
// There should have been some code earlier in the workflow to do
// the checkout, but we have no way of verifying that here.
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
if (pull_ref_regex.test(ref)) {
return ref.replace(pull_ref_regex, 'refs/pull/$1/head');
}
else {
return ref;
}
}
exports.getRef = getRef;
/** /**
* Compose a StatusReport. * Compose a StatusReport.
* *
@@ -219,7 +159,6 @@ exports.getRef = getRef;
*/ */
async function createStatusReport(actionName, status, cause, exception) { async function createStatusReport(actionName, status, cause, exception) {
const commitOid = process.env['GITHUB_SHA'] || ''; const commitOid = process.env['GITHUB_SHA'] || '';
const ref = getRef();
const workflowRunIDStr = process.env['GITHUB_RUN_ID']; const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
let workflowRunID = -1; let workflowRunID = -1;
if (workflowRunIDStr) { if (workflowRunIDStr) {
@@ -236,7 +175,6 @@ async function createStatusReport(actionName, status, cause, exception) {
job_name: jobName, job_name: jobName,
languages: languages, languages: languages,
commit_oid: commitOid, commit_oid: commitOid,
ref: ref,
action_name: actionName, action_name: actionName,
action_oid: "unknown", action_oid: "unknown",
started_at: startedAt, started_at: startedAt,
@@ -249,7 +187,7 @@ async function createStatusReport(actionName, status, cause, exception) {
if (exception) { if (exception) {
statusReport.exception = exception; statusReport.exception = exception;
} }
if (status === 'success' || status === 'failure' || status === 'aborted') { if (status === 'success' || status === 'failure') {
statusReport.completed_at = new Date().toISOString(); statusReport.completed_at = new Date().toISOString();
} }
let matrix = core.getInput('matrix'); let matrix = core.getInput('matrix');
@@ -261,19 +199,21 @@ async function createStatusReport(actionName, status, cause, exception) {
/** /**
* Send a status report to the code_scanning/analysis/status endpoint. * Send a status report to the code_scanning/analysis/status endpoint.
* *
* Returns the status code of the response to the status request. * Returns the status code of the response to the status request, or
* undefined if the given statusReport is undefined or no response was
* received.
*/ */
async function sendStatusReport(statusReport) { async function sendStatusReport(statusReport) {
var _a;
const statusReportJSON = JSON.stringify(statusReport); const statusReportJSON = JSON.stringify(statusReport);
core.debug('Sending status report: ' + statusReportJSON); core.debug('Sending status report: ' + statusReportJSON);
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY"); const githubToken = core.getInput('token');
const [owner, repo] = nwo.split("/"); const ph = new auth.BearerCredentialHandler(githubToken);
const statusResponse = await api.client.request('PUT /repos/:owner/:repo/code-scanning/analysis/status', { const client = new http.HttpClient('Code Scanning : Status Report', [ph]);
owner: owner, const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY']
repo: repo, + '/code-scanning/analysis/status';
data: statusReportJSON, const res = await client.put(url, statusReportJSON);
}); return (_a = res.message) === null || _a === void 0 ? void 0 : _a.statusCode;
return statusResponse.status;
} }
/** /**
* Send a status report that an action is starting. * Send a status report that an action is starting.
@@ -322,16 +262,6 @@ async function reportActionSucceeded(action) {
await sendStatusReport(await createStatusReport(action, 'success')); await sendStatusReport(await createStatusReport(action, 'success'));
} }
exports.reportActionSucceeded = reportActionSucceeded; exports.reportActionSucceeded = reportActionSucceeded;
/**
* Report that an action has been aborted.
*
* Note that the started_at date is always that of the `init` action, since
* this is likely to give a more useful duration when inspecting events.
*/
async function reportActionAborted(action, cause) {
await sendStatusReport(await createStatusReport(action, 'aborted', cause));
}
exports.reportActionAborted = reportActionAborted;
/** /**
* Get the array of all the tool names contained in the given sarif contents. * Get the array of all the tool names contained in the given sarif contents.
* *
@@ -350,67 +280,3 @@ function getToolNames(sarifContents) {
return Object.keys(toolNames); return Object.keys(toolNames);
} }
exports.getToolNames = getToolNames; exports.getToolNames = getToolNames;
// Creates a random temporary directory, runs the given body, and then deletes the directory.
// Mostly intended for use within tests.
async function withTmpDir(body) {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
const realSubdir = path.join(tmpDir, 'real');
fs.mkdirSync(realSubdir);
const symlinkSubdir = path.join(tmpDir, 'symlink');
fs.symlinkSync(realSubdir, symlinkSubdir, 'dir');
const result = await body(symlinkSubdir);
fs.rmdirSync(tmpDir, { recursive: true });
return result;
}
exports.withTmpDir = withTmpDir;
/**
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
* specified, the total available memory will be used minus 256 MB.
*
* @returns string
*/
function getMemoryFlag() {
let memoryToUseMegaBytes;
const memoryToUseString = core.getInput("ram");
if (memoryToUseString) {
memoryToUseMegaBytes = Number(memoryToUseString);
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
}
}
else {
const totalMemoryBytes = os.totalmem();
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
const systemReservedMemoryMegaBytes = 256;
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
}
return "--ram=" + Math.floor(memoryToUseMegaBytes);
}
exports.getMemoryFlag = getMemoryFlag;
/**
* Get the codeql `--threads` value specified for the `threads` input. The value
* defaults to 1. The value will be capped to the number of available CPUs.
*
* @returns string
*/
function getThreadsFlag() {
let numThreads = 1;
const numThreadsString = core.getInput("threads");
if (numThreadsString) {
numThreads = Number(numThreadsString);
if (Number.isNaN(numThreads)) {
throw new Error(`Invalid threads setting "${numThreadsString}", specified.`);
}
const maxThreads = os.cpus().length;
if (numThreads > maxThreads) {
numThreads = maxThreads;
}
const minThreads = -maxThreads;
if (numThreads < minThreads) {
numThreads = minThreads;
}
}
return `--threads=${numThreads}`;
}
exports.getThreadsFlag = getThreadsFlag;
//# sourceMappingURL=util.js.map

File diff suppressed because one or more lines are too long

64
lib/util.test.js generated
View File

@@ -1,64 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.silenceDebugOutput(ava_1.default);
ava_1.default('getToolNames', t => {
const input = fs.readFileSync(__dirname + '/../src/testdata/tool-names.sarif', 'utf8');
const toolNames = util.getToolNames(input);
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
});
ava_1.default('getMemoryFlag() should return the correct --ram flag', t => {
const totalMem = Math.floor(os.totalmem() / (1024 * 1024));
const tests = {
"": `--ram=${totalMem - 256}`,
"512": "--ram=512",
};
for (const [input, expectedFlag] of Object.entries(tests)) {
process.env['INPUT_RAM'] = input;
const flag = util.getMemoryFlag();
t.deepEqual(flag, expectedFlag);
}
});
ava_1.default('getMemoryFlag() throws if the ram input is < 0 or NaN', t => {
for (const input of ["-1", "hello!"]) {
process.env['INPUT_RAM'] = input;
t.throws(util.getMemoryFlag);
}
});
ava_1.default('getThreadsFlag() should return the correct --threads flag', t => {
const numCpus = os.cpus().length;
const tests = {
"0": "--threads=0",
"1": "--threads=1",
[`${numCpus + 1}`]: `--threads=${numCpus}`,
[`${-numCpus - 1}`]: `--threads=${-numCpus}`
};
for (const [input, expectedFlag] of Object.entries(tests)) {
process.env['INPUT_THREADS'] = input;
const flag = util.getThreadsFlag();
t.deepEqual(flag, expectedFlag);
}
});
ava_1.default('getThreadsFlag() throws if the threads input is not an integer', t => {
process.env['INPUT_THREADS'] = "hello!";
t.throws(util.getThreadsFlag);
});
ava_1.default('getRef() throws on the empty string', t => {
process.env["GITHUB_REF"] = "";
t.throws(util.getRef);
});
//# sourceMappingURL=util.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"util.test.js","sourceRoot":"","sources":["../src/util.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,uCAAyB;AAEzB,mDAAmD;AACnD,6CAA+B;AAE/B,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACvB,MAAM,KAAK,GAAG,EAAE,CAAC,YAAY,CAAC,SAAS,GAAG,mCAAmC,EAAE,MAAM,CAAC,CAAC;IACvF,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,+BAA+B,EAAE,QAAQ,CAAC,CAAC,CAAC;AACtE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,sDAAsD,EAAE,CAAC,CAAC,EAAE;IAE/D,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC;IAE3D,MAAM,KAAK,GAAG;QACZ,EAAE,EAAE,SAAS,QAAQ,GAAG,GAAG,EAAE;QAC7B,KAAK,EAAE,WAAW;KACnB,CAAC;IAEF,KAAK,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAEzD,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC;QAEjC,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,EAAE,CAAC;QAClC,CAAC,CAAC,SAAS,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;KACjC;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,uDAAuD,EAAE,CAAC,CAAC,EAAE;IAChE,KAAK,MAAM,KAAK,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,EAAE;QACpC,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC;QACjC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;KAC9B;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,2DAA2D,EAAE,CAAC,CAAC,EAAE;IAEpE,MAAM,OAAO,GAAG,EAAE,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC;IAEjC,MAAM,KAAK,GAAG;QACZ,GAAG,EAAE,aAAa;QAClB,GAAG,EAAE,aAAa;QAClB,CAAC,GAAG,OAAO,GAAG,CAAC,EAAE,CAAC,EAAE,aAAa,OAAO,EAAE;QAC1C,CAAC,GAAG,CAAC,OAAO,GAAG,CAAC,EAAE,CAAC,EAAE,aAAa,CAAC,OAAO,EAAE;KAC7C,CAAC;IAEF,KAAK,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAEzD,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,GAAG,KAAK,CAAC;QAErC,MAAM,IAAI,GAAG,IAAI,CAAC,cAAc,EAAE,CAAC;QACnC,CAAC,CAAC,SAAS,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;KACjC;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,gEAAgE,EAAE,CAAC,CAAC,EAAE;IACzE,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,GAAG,QAAQ,CAAC;IACxC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;AAChC,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE;IAC9C,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC;IAC/B,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACxB,CAAC,CAAC,CAAC"}

1
node_modules/.bin/atob generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../atob/bin/atob.js

1
node_modules/.bin/ava generated vendored
View File

@@ -1 +0,0 @@
../ava/cli.js

1
node_modules/.bin/escodegen generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../escodegen/bin/escodegen.js

1
node_modules/.bin/esgenerate generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../escodegen/bin/esgenerate.js

1
node_modules/.bin/esparse generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../esprima/bin/esparse.js

1
node_modules/.bin/esvalidate generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../esprima/bin/esvalidate.js

1
node_modules/.bin/jest generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../jest/bin/jest.js

1
node_modules/.bin/jest-runtime generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../jest-runtime/bin/jest-runtime.js

1
node_modules/.bin/jsesc generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../jsesc/bin/jsesc

1
node_modules/.bin/json5 generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../json5/lib/cli.js

1
node_modules/.bin/parser generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../@babel/parser/bin/babel-parser.js

1
node_modules/.bin/rc generated vendored
View File

@@ -1 +0,0 @@
../rc/cli.js

View File

@@ -1 +0,0 @@
../removeNPMAbsolutePaths/bin/removeNPMAbsolutePaths

1
node_modules/.bin/sane generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../sane/src/cli.js

2
node_modules/.bin/semver generated vendored
View File

@@ -1 +1 @@
../semver/bin/semver.js ../semver/bin/semver

1
node_modules/.bin/sshpk-conv generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../sshpk/bin/sshpk-conv

1
node_modules/.bin/sshpk-sign generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../sshpk/bin/sshpk-sign

1
node_modules/.bin/sshpk-verify generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../sshpk/bin/sshpk-verify

1
node_modules/.bin/ts-jest generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../ts-jest/cli.js

1
node_modules/.bin/watch generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../@cnakazawa/watch/cli.js

View File

@@ -18,8 +18,6 @@ A lightweight HTTP client optimized for use with actions, TypeScript with generi
- Basic, Bearer and PAT Support out of the box. Extensible handlers for others. - Basic, Bearer and PAT Support out of the box. Extensible handlers for others.
- Redirects supported - Redirects supported
Features and releases [here](./RELEASES.md)
## Install ## Install
``` ```
@@ -51,11 +49,7 @@ export NODE_DEBUG=http
## Node support ## Node support
The http-client is built using the latest LTS version of Node 12. It may work on previous node LTS versions but it's tested and officially supported on Node12+. The http-client is built using the latest LTS version of Node 12. We also support the latest LTS for Node 6, 8 and Node 10.
## Support and Versioning
We follow semver and will hold compatibility between major versions and increment the minor version with new features and capabilities (while holding compat).
## Contributing ## Contributing

View File

@@ -1,16 +0,0 @@
## Releases
## 1.0.7
Update NPM dependencies and add 429 to the list of HttpCodes
## 1.0.6
Automatically sends Content-Type and Accept application/json headers for \<verb>Json() helper methods if not set in the client or parameters.
## 1.0.5
Adds \<verb>Json() helper methods for json over http scenarios.
## 1.0.4
Started to add \<verb>Json() helper methods. Do not use this release for that. Use >= 1.0.5 since there was an issue with types.
## 1.0.1 to 1.0.3
Adds proxy support.

View File

@@ -6,9 +6,7 @@ class BasicCredentialHandler {
this.password = password; this.password = password;
} }
prepareRequest(options) { prepareRequest(options) {
options.headers['Authorization'] = options.headers['Authorization'] = 'Basic ' + Buffer.from(this.username + ':' + this.password).toString('base64');
'Basic ' +
Buffer.from(this.username + ':' + this.password).toString('base64');
} }
// This handler cannot handle 401 // This handler cannot handle 401
canHandleAuthentication(response) { canHandleAuthentication(response) {
@@ -44,8 +42,7 @@ class PersonalAccessTokenCredentialHandler {
// currently implements pre-authorization // currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401 // TODO: support preAuth = false where it hooks on 401
prepareRequest(options) { prepareRequest(options) {
options.headers['Authorization'] = options.headers['Authorization'] = 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
} }
// This handler cannot handle 401 // This handler cannot handle 401
canHandleAuthentication(response) { canHandleAuthentication(response) {

View File

@@ -1,5 +1,5 @@
/// <reference types="node" /> /// <reference types="node" />
import http = require('http'); import http = require("http");
import ifm = require('./interfaces'); import ifm = require('./interfaces');
export declare enum HttpCodes { export declare enum HttpCodes {
OK = 200, OK = 200,
@@ -23,20 +23,12 @@ export declare enum HttpCodes {
RequestTimeout = 408, RequestTimeout = 408,
Conflict = 409, Conflict = 409,
Gone = 410, Gone = 410,
TooManyRequests = 429,
InternalServerError = 500, InternalServerError = 500,
NotImplemented = 501, NotImplemented = 501,
BadGateway = 502, BadGateway = 502,
ServiceUnavailable = 503, ServiceUnavailable = 503,
GatewayTimeout = 504 GatewayTimeout = 504
} }
export declare enum Headers {
Accept = "accept",
ContentType = "content-type"
}
export declare enum MediaTypes {
ApplicationJson = "application/json"
}
/** /**
* Returns the proxy URL, depending upon the supplied url and proxy environment variables. * Returns the proxy URL, depending upon the supplied url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
@@ -47,6 +39,11 @@ export declare class HttpClientResponse implements ifm.IHttpClientResponse {
message: http.IncomingMessage; message: http.IncomingMessage;
readBody(): Promise<string>; readBody(): Promise<string>;
} }
export interface ITypedResponse<T> {
statusCode: number;
result: T | null;
headers: Object;
}
export declare function isHttps(requestUrl: string): boolean; export declare function isHttps(requestUrl: string): boolean;
export declare class HttpClient { export declare class HttpClient {
userAgent: string | undefined; userAgent: string | undefined;
@@ -76,10 +73,10 @@ export declare class HttpClient {
* Gets a typed object from an endpoint * Gets a typed object from an endpoint
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
*/ */
getJson<T>(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>; getJson<T>(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
postJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>; postJson<T>(requestUrl: string, obj: T, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
putJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>; putJson<T>(requestUrl: string, obj: T, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
patchJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>; patchJson<T>(requestUrl: string, obj: T, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
/** /**
* Makes a raw http request. * Makes a raw http request.
* All other methods such as get, post, patch, and request ultimately call this. * All other methods such as get, post, patch, and request ultimately call this.
@@ -111,7 +108,6 @@ export declare class HttpClient {
getAgent(serverUrl: string): http.Agent; getAgent(serverUrl: string): http.Agent;
private _prepareRequest; private _prepareRequest;
private _mergeHeaders; private _mergeHeaders;
private _getExistingOrDefaultHeader;
private _getAgent; private _getAgent;
private _performExponentialBackoff; private _performExponentialBackoff;
private static dateTimeDeserializer; private static dateTimeDeserializer;

View File

@@ -28,22 +28,12 @@ var HttpCodes;
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); })(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
var Headers;
(function (Headers) {
Headers["Accept"] = "accept";
Headers["ContentType"] = "content-type";
})(Headers = exports.Headers || (exports.Headers = {}));
var MediaTypes;
(function (MediaTypes) {
MediaTypes["ApplicationJson"] = "application/json";
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
/** /**
* Returns the proxy URL, depending upon the supplied url and proxy environment variables. * Returns the proxy URL, depending upon the supplied url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
@@ -53,18 +43,8 @@ function getProxyUrl(serverUrl) {
return proxyUrl ? proxyUrl.href : ''; return proxyUrl ? proxyUrl.href : '';
} }
exports.getProxyUrl = getProxyUrl; exports.getProxyUrl = getProxyUrl;
const HttpRedirectCodes = [ const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect];
HttpCodes.MovedPermanently, const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout];
HttpCodes.ResourceMoved,
HttpCodes.SeeOther,
HttpCodes.TemporaryRedirect,
HttpCodes.PermanentRedirect
];
const HttpResponseRetryCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
HttpCodes.GatewayTimeout
];
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
const ExponentialBackoffCeiling = 10; const ExponentialBackoffCeiling = 10;
const ExponentialBackoffTimeSlice = 5; const ExponentialBackoffTimeSlice = 5;
@@ -156,29 +136,22 @@ class HttpClient {
* Gets a typed object from an endpoint * Gets a typed object from an endpoint
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
*/ */
async getJson(requestUrl, additionalHeaders = {}) { async getJson(requestUrl, additionalHeaders) {
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
let res = await this.get(requestUrl, additionalHeaders); let res = await this.get(requestUrl, additionalHeaders);
return this._processResponse(res, this.requestOptions); return this._processResponse(res, this.requestOptions);
} }
async postJson(requestUrl, obj, additionalHeaders = {}) { async postJson(requestUrl, obj, additionalHeaders) {
let data = JSON.stringify(obj, null, 2); let data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
let res = await this.post(requestUrl, data, additionalHeaders); let res = await this.post(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions); return this._processResponse(res, this.requestOptions);
} }
async putJson(requestUrl, obj, additionalHeaders = {}) { async putJson(requestUrl, obj, additionalHeaders) {
let data = JSON.stringify(obj, null, 2); let data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
let res = await this.put(requestUrl, data, additionalHeaders); let res = await this.put(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions); return this._processResponse(res, this.requestOptions);
} }
async patchJson(requestUrl, obj, additionalHeaders = {}) { async patchJson(requestUrl, obj, additionalHeaders) {
let data = JSON.stringify(obj, null, 2); let data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
let res = await this.patch(requestUrl, data, additionalHeaders); let res = await this.patch(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions); return this._processResponse(res, this.requestOptions);
} }
@@ -189,22 +162,18 @@ class HttpClient {
*/ */
async request(verb, requestUrl, data, headers) { async request(verb, requestUrl, data, headers) {
if (this._disposed) { if (this._disposed) {
throw new Error('Client has already been disposed.'); throw new Error("Client has already been disposed.");
} }
let parsedUrl = url.parse(requestUrl); let parsedUrl = url.parse(requestUrl);
let info = this._prepareRequest(verb, parsedUrl, headers); let info = this._prepareRequest(verb, parsedUrl, headers);
// Only perform retries on reads since writes may not be idempotent. // Only perform retries on reads since writes may not be idempotent.
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1;
? this._maxRetries + 1
: 1;
let numTries = 0; let numTries = 0;
let response; let response;
while (numTries < maxTries) { while (numTries < maxTries) {
response = await this.requestRaw(info, data); response = await this.requestRaw(info, data);
// Check if it's an authentication challenge // Check if it's an authentication challenge
if (response && if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) {
response.message &&
response.message.statusCode === HttpCodes.Unauthorized) {
let authenticationHandler; let authenticationHandler;
for (let i = 0; i < this.handlers.length; i++) { for (let i = 0; i < this.handlers.length; i++) {
if (this.handlers[i].canHandleAuthentication(response)) { if (this.handlers[i].canHandleAuthentication(response)) {
@@ -222,32 +191,21 @@ class HttpClient {
} }
} }
let redirectsRemaining = this._maxRedirects; let redirectsRemaining = this._maxRedirects;
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1
this._allowRedirects && && this._allowRedirects
redirectsRemaining > 0) { && redirectsRemaining > 0) {
const redirectUrl = response.message.headers['location']; const redirectUrl = response.message.headers["location"];
if (!redirectUrl) { if (!redirectUrl) {
// if there's no location to redirect to, we won't // if there's no location to redirect to, we won't
break; break;
} }
let parsedRedirectUrl = url.parse(redirectUrl); let parsedRedirectUrl = url.parse(redirectUrl);
if (parsedUrl.protocol == 'https:' && if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) {
parsedUrl.protocol != parsedRedirectUrl.protocol && throw new Error("Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.");
!this._allowRedirectDowngrade) {
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
} }
// we need to finish reading the response before reassigning response // we need to finish reading the response before reassigning response
// which will leak the open socket. // which will leak the open socket.
await response.readBody(); await response.readBody();
// strip authorization header if redirected to a different hostname
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
for (let header in headers) {
// header names are case insensitive
if (header.toLowerCase() === 'authorization') {
delete headers[header];
}
}
}
// let's make the request with the new redirectUrl // let's make the request with the new redirectUrl
info = this._prepareRequest(verb, parsedRedirectUrl, headers); info = this._prepareRequest(verb, parsedRedirectUrl, headers);
response = await this.requestRaw(info, data); response = await this.requestRaw(info, data);
@@ -298,8 +256,8 @@ class HttpClient {
*/ */
requestRawWithCallback(info, data, onResult) { requestRawWithCallback(info, data, onResult) {
let socket; let socket;
if (typeof data === 'string') { if (typeof (data) === 'string') {
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8');
} }
let callbackCalled = false; let callbackCalled = false;
let handleResult = (err, res) => { let handleResult = (err, res) => {
@@ -312,7 +270,7 @@ class HttpClient {
let res = new HttpClientResponse(msg); let res = new HttpClientResponse(msg);
handleResult(null, res); handleResult(null, res);
}); });
req.on('socket', sock => { req.on('socket', (sock) => {
socket = sock; socket = sock;
}); });
// If we ever get disconnected, we want the socket to timeout eventually // If we ever get disconnected, we want the socket to timeout eventually
@@ -327,10 +285,10 @@ class HttpClient {
// res should have headers // res should have headers
handleResult(err, null); handleResult(err, null);
}); });
if (data && typeof data === 'string') { if (data && typeof (data) === 'string') {
req.write(data, 'utf8'); req.write(data, 'utf8');
} }
if (data && typeof data !== 'string') { if (data && typeof (data) !== 'string') {
data.on('close', function () { data.on('close', function () {
req.end(); req.end();
}); });
@@ -357,40 +315,29 @@ class HttpClient {
const defaultPort = usingSsl ? 443 : 80; const defaultPort = usingSsl ? 443 : 80;
info.options = {}; info.options = {};
info.options.host = info.parsedUrl.hostname; info.options.host = info.parsedUrl.hostname;
info.options.port = info.parsedUrl.port info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort;
? parseInt(info.parsedUrl.port) info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
: defaultPort;
info.options.path =
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
info.options.method = method; info.options.method = method;
info.options.headers = this._mergeHeaders(headers); info.options.headers = this._mergeHeaders(headers);
if (this.userAgent != null) { if (this.userAgent != null) {
info.options.headers['user-agent'] = this.userAgent; info.options.headers["user-agent"] = this.userAgent;
} }
info.options.agent = this._getAgent(info.parsedUrl); info.options.agent = this._getAgent(info.parsedUrl);
// gives handlers an opportunity to participate // gives handlers an opportunity to participate
if (this.handlers) { if (this.handlers) {
this.handlers.forEach(handler => { this.handlers.forEach((handler) => {
handler.prepareRequest(info.options); handler.prepareRequest(info.options);
}); });
} }
return info; return info;
} }
_mergeHeaders(headers) { _mergeHeaders(headers) {
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {});
if (this.requestOptions && this.requestOptions.headers) { if (this.requestOptions && this.requestOptions.headers) {
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
} }
return lowercaseKeys(headers || {}); return lowercaseKeys(headers || {});
} }
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
let clientHeader;
if (this.requestOptions && this.requestOptions.headers) {
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
}
return additionalHeaders[header] || clientHeader || _default;
}
_getAgent(parsedUrl) { _getAgent(parsedUrl) {
let agent; let agent;
let proxyUrl = pm.getProxyUrl(parsedUrl); let proxyUrl = pm.getProxyUrl(parsedUrl);
@@ -422,7 +369,7 @@ class HttpClient {
proxyAuth: proxyUrl.auth, proxyAuth: proxyUrl.auth,
host: proxyUrl.hostname, host: proxyUrl.hostname,
port: proxyUrl.port port: proxyUrl.port
} },
}; };
let tunnelAgent; let tunnelAgent;
const overHttps = proxyUrl.protocol === 'https:'; const overHttps = proxyUrl.protocol === 'https:';
@@ -449,9 +396,7 @@ class HttpClient {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly // we have to cast it to any and change it directly
agent.options = Object.assign(agent.options || {}, { agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false });
rejectUnauthorized: false
});
} }
return agent; return agent;
} }
@@ -512,7 +457,7 @@ class HttpClient {
msg = contents; msg = contents;
} }
else { else {
msg = 'Failed request: (' + statusCode + ')'; msg = "Failed request: (" + statusCode + ")";
} }
let err = new Error(msg); let err = new Error(msg);
// attach statusCode and body obj (if available) to the error object // attach statusCode and body obj (if available) to the error object

View File

@@ -1,6 +1,6 @@
/// <reference types="node" /> /// <reference types="node" />
import http = require('http'); import http = require("http");
import url = require('url'); import url = require("url");
export interface IHeaders { export interface IHeaders {
[key: string]: any; [key: string]: any;
} }
@@ -43,8 +43,3 @@ export interface IRequestOptions {
allowRetries?: boolean; allowRetries?: boolean;
maxRetries?: number; maxRetries?: number;
} }
export interface ITypedResponse<T> {
statusCode: number;
result: T | null;
headers: Object;
}

View File

@@ -1,2 +1,3 @@
"use strict"; "use strict";
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
;

View File

@@ -1,7 +1,25 @@
{ {
"name": "tunnel", "author": {
"version": "0.0.6", "name": "Koichi Kobayashi",
"email": "koichik@improvement.jp"
},
"bugs": {
"url": "https://github.com/koichik/node-tunnel/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "Node HTTP/HTTPS Agents for tunneling proxies", "description": "Node HTTP/HTTPS Agents for tunneling proxies",
"devDependencies": {
"mocha": "^5.2.0",
"should": "^13.2.3"
},
"directories": {
"lib": "./lib"
},
"engines": {
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
},
"homepage": "https://github.com/koichik/node-tunnel/",
"keywords": [ "keywords": [
"http", "http",
"https", "https",
@@ -9,26 +27,15 @@
"proxy", "proxy",
"tunnel" "tunnel"
], ],
"homepage": "https://github.com/koichik/node-tunnel/",
"bugs": "https://github.com/koichik/node-tunnel/issues",
"license": "MIT", "license": "MIT",
"author": "Koichi Kobayashi <koichik@improvement.jp>",
"main": "./index.js", "main": "./index.js",
"directories": { "name": "tunnel",
"lib": "./lib"
},
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://github.com/koichik/node-tunnel.git" "url": "git+https://github.com/koichik/node-tunnel.git"
}, },
"scripts": { "scripts": {
"test": "mocha" "test": "mocha"
}, },
"devDependencies": { "version": "0.0.6"
"mocha": "^5.2.0",
"should": "^13.2.3"
},
"engines": {
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
}
} }

View File

@@ -1,39 +1,39 @@
{ {
"name": "@actions/http-client", "author": {
"version": "1.0.8", "name": "GitHub, Inc."
},
"bugs": {
"url": "https://github.com/actions/http-client/issues"
},
"bundleDependencies": false,
"dependencies": {
"tunnel": "0.0.6"
},
"deprecated": false,
"description": "Actions Http Client", "description": "Actions Http Client",
"main": "index.js", "devDependencies": {
"scripts": { "@types/jest": "^24.0.25",
"build": "rm -Rf ./_out && tsc && cp package*.json ./_out && cp *.md ./_out && cp LICENSE ./_out && cp actions.png ./_out", "@types/node": "^12.12.24",
"test": "jest", "jest": "^24.9.0",
"format": "prettier --write *.ts && prettier --write **/*.ts", "proxy": "^1.0.1",
"format-check": "prettier --check *.ts && prettier --check **/*.ts", "ts-jest": "^24.3.0",
"audit-check": "npm audit --audit-level=moderate" "typescript": "^3.7.4"
},
"repository": {
"type": "git",
"url": "git+https://github.com/actions/http-client.git"
}, },
"homepage": "https://github.com/actions/http-client#readme",
"keywords": [ "keywords": [
"Actions", "Actions",
"Http" "Http"
], ],
"author": "GitHub, Inc.",
"license": "MIT", "license": "MIT",
"bugs": { "main": "index.js",
"url": "https://github.com/actions/http-client/issues" "name": "@actions/http-client",
"repository": {
"type": "git",
"url": "git+https://github.com/actions/http-client.git"
}, },
"homepage": "https://github.com/actions/http-client#readme", "scripts": {
"devDependencies": { "build": "rm -Rf ./_out && tsc && cp package*.json ./_out && cp *.md ./_out && cp LICENSE ./_out && cp actions.png ./_out",
"@types/jest": "^25.1.4", "test": "jest"
"@types/node": "^12.12.31",
"jest": "^25.1.0",
"prettier": "^2.0.4",
"proxy": "^1.0.1",
"ts-jest": "^25.2.1",
"typescript": "^3.8.3"
}, },
"dependencies": { "version": "1.0.4"
"tunnel": "0.0.6"
}
} }

View File

@@ -9,10 +9,12 @@ function getProxyUrl(reqUrl) {
} }
let proxyVar; let proxyVar;
if (usingSsl) { if (usingSsl) {
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; proxyVar = process.env["https_proxy"] ||
process.env["HTTPS_PROXY"];
} }
else { else {
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; proxyVar = process.env["http_proxy"] ||
process.env["HTTP_PROXY"];
} }
if (proxyVar) { if (proxyVar) {
proxyUrl = url.parse(proxyVar); proxyUrl = url.parse(proxyVar);
@@ -24,7 +26,7 @@ function checkBypass(reqUrl) {
if (!reqUrl.hostname) { if (!reqUrl.hostname) {
return false; return false;
} }
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; let noProxy = process.env["no_proxy"] || process.env["NO_PROXY"] || '';
if (!noProxy) { if (!noProxy) {
return false; return false;
} }
@@ -45,10 +47,7 @@ function checkBypass(reqUrl) {
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
} }
// Compare request host against noproxy // Compare request host against noproxy
for (let upperNoProxyItem of noProxy for (let upperNoProxyItem of noProxy.split(',').map(x => x.trim().toUpperCase()).filter(x => x)) {
.split(',')
.map(x => x.trim().toUpperCase())
.filter(x => x)) {
if (upperReqHosts.some(x => x === upperNoProxyItem)) { if (upperReqHosts.some(x => x === upperNoProxyItem)) {
return true; return true;
} }

View File

@@ -22,11 +22,11 @@ These can then be extracted in platform specific ways:
const tc = require('@actions/tool-cache'); const tc = require('@actions/tool-cache');
if (process.platform === 'win32') { if (process.platform === 'win32') {
const node12Path = await tc.downloadTool('https://nodejs.org/dist/v12.7.0/node-v12.7.0-win-x64.zip'); const node12Path = tc.downloadTool('https://nodejs.org/dist/v12.7.0/node-v12.7.0-win-x64.zip');
const node12ExtractedFolder = await tc.extractZip(node12Path, 'path/to/extract/to'); const node12ExtractedFolder = await tc.extractZip(node12Path, 'path/to/extract/to');
// Or alternately // Or alternately
const node12Path = await tc.downloadTool('https://nodejs.org/dist/v12.7.0/node-v12.7.0-win-x64.7z'); const node12Path = tc.downloadTool('https://nodejs.org/dist/v12.7.0/node-v12.7.0-win-x64.7z');
const node12ExtractedFolder = await tc.extract7z(node12Path, 'path/to/extract/to'); const node12ExtractedFolder = await tc.extract7z(node12Path, 'path/to/extract/to');
} }
else { else {
@@ -37,7 +37,7 @@ else {
#### Cache #### Cache
Finally, you can cache these directories in our tool-cache. This is useful if you want to switch back and forth between versions of a tool, or save a tool between runs for self-hosted runners. Finally, you can cache these directories in our tool-cache. This is useful if you want to switch back and forth between versions of a tool, or save a tool between runs for private runners (private runners are still in development but are on the roadmap).
You'll often want to add it to the path as part of this step: You'll often want to add it to the path as part of this step:
@@ -57,7 +57,7 @@ You can also cache files for reuse.
```js ```js
const tc = require('@actions/tool-cache'); const tc = require('@actions/tool-cache');
const cachedPath = await tc.cacheFile('path/to/exe', 'destFileName.exe', 'myExeName', '1.1.0'); tc.cacheFile('path/to/exe', 'destFileName.exe', 'myExeName', '1.1.0');
``` ```
#### Find #### Find

View File

@@ -1,16 +0,0 @@
export interface IToolReleaseFile {
filename: string;
platform: string;
platform_version?: string;
arch: string;
download_url: string;
}
export interface IToolRelease {
version: string;
stable: boolean;
release_url: string;
files: IToolReleaseFile[];
}
export declare function _findMatch(versionSpec: string, stable: boolean, candidates: IToolRelease[], archFilter: string): Promise<IToolRelease | undefined>;
export declare function _getOsVersion(): string;
export declare function _readLinuxVersionFile(): string;

View File

@@ -1,106 +0,0 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const semver = __importStar(require("semver"));
const core_1 = require("@actions/core");
// needs to be require for core node modules to be mocked
/* eslint @typescript-eslint/no-require-imports: 0 */
const os = require("os");
const cp = require("child_process");
const fs = require("fs");
function _findMatch(versionSpec, stable, candidates, archFilter) {
return __awaiter(this, void 0, void 0, function* () {
const platFilter = os.platform();
let result;
let match;
let file;
for (const candidate of candidates) {
const version = candidate.version;
core_1.debug(`check ${version} satisfies ${versionSpec}`);
if (semver.satisfies(version, versionSpec) &&
(!stable || candidate.stable === stable)) {
file = candidate.files.find(item => {
core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);
let chk = item.arch === archFilter && item.platform === platFilter;
if (chk && item.platform_version) {
const osVersion = module.exports._getOsVersion();
if (osVersion === item.platform_version) {
chk = true;
}
else {
chk = semver.satisfies(osVersion, item.platform_version);
}
}
return chk;
});
if (file) {
core_1.debug(`matched ${candidate.version}`);
match = candidate;
break;
}
}
}
if (match && file) {
// clone since we're mutating the file list to be only the file that matches
result = Object.assign({}, match);
result.files = [file];
}
return result;
});
}
exports._findMatch = _findMatch;
function _getOsVersion() {
// TODO: add windows and other linux, arm variants
// right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python)
const plat = os.platform();
let version = '';
if (plat === 'darwin') {
version = cp.execSync('sw_vers -productVersion').toString();
}
else if (plat === 'linux') {
// lsb_release process not in some containers, readfile
// Run cat /etc/lsb-release
// DISTRIB_ID=Ubuntu
// DISTRIB_RELEASE=18.04
// DISTRIB_CODENAME=bionic
// DISTRIB_DESCRIPTION="Ubuntu 18.04.4 LTS"
const lsbContents = module.exports._readLinuxVersionFile();
if (lsbContents) {
const lines = lsbContents.split('\n');
for (const line of lines) {
const parts = line.split('=');
if (parts.length === 2 && parts[0].trim() === 'DISTRIB_RELEASE') {
version = parts[1].trim();
break;
}
}
}
}
return version;
}
exports._getOsVersion = _getOsVersion;
function _readLinuxVersionFile() {
const lsbFile = '/etc/lsb-release';
let contents = '';
if (fs.existsSync(lsbFile)) {
contents = fs.readFileSync(lsbFile).toString();
}
return contents;
}
exports._readLinuxVersionFile = _readLinuxVersionFile;
//# sourceMappingURL=manifest.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"manifest.js","sourceRoot":"","sources":["../src/manifest.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,+CAAgC;AAChC,wCAAmC;AAEnC,yDAAyD;AACzD,qDAAqD;AAErD,yBAAyB;AACzB,oCAAoC;AACpC,yBAAyB;AAqDzB,SAAsB,UAAU,CAC9B,WAAmB,EACnB,MAAe,EACf,UAA0B,EAC1B,UAAkB;;QAElB,MAAM,UAAU,GAAG,EAAE,CAAC,QAAQ,EAAE,CAAA;QAEhC,IAAI,MAAgC,CAAA;QACpC,IAAI,KAA+B,CAAA;QAEnC,IAAI,IAAkC,CAAA;QACtC,KAAK,MAAM,SAAS,IAAI,UAAU,EAAE;YAClC,MAAM,OAAO,GAAG,SAAS,CAAC,OAAO,CAAA;YAEjC,YAAK,CAAC,SAAS,OAAO,cAAc,WAAW,EAAE,CAAC,CAAA;YAClD,IACE,MAAM,CAAC,SAAS,CAAC,OAAO,EAAE,WAAW,CAAC;gBACtC,CAAC,CAAC,MAAM,IAAI,SAAS,CAAC,MAAM,KAAK,MAAM,CAAC,EACxC;gBACA,IAAI,GAAG,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;oBACjC,YAAK,CACH,GAAG,IAAI,CAAC,IAAI,MAAM,UAAU,OAAO,IAAI,CAAC,QAAQ,MAAM,UAAU,EAAE,CACnE,CAAA;oBAED,IAAI,GAAG,GAAG,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,IAAI,CAAC,QAAQ,KAAK,UAAU,CAAA;oBAClE,IAAI,GAAG,IAAI,IAAI,CAAC,gBAAgB,EAAE;wBAChC,MAAM,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC,aAAa,EAAE,CAAA;wBAEhD,IAAI,SAAS,KAAK,IAAI,CAAC,gBAAgB,EAAE;4BACvC,GAAG,GAAG,IAAI,CAAA;yBACX;6BAAM;4BACL,GAAG,GAAG,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAA;yBACzD;qBACF;oBAED,OAAO,GAAG,CAAA;gBACZ,CAAC,CAAC,CAAA;gBAEF,IAAI,IAAI,EAAE;oBACR,YAAK,CAAC,WAAW,SAAS,CAAC,OAAO,EAAE,CAAC,CAAA;oBACrC,KAAK,GAAG,SAAS,CAAA;oBACjB,MAAK;iBACN;aACF;SACF;QAED,IAAI,KAAK,IAAI,IAAI,EAAE;YACjB,4EAA4E;YAC5E,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,CAAA;YACjC,MAAM,CAAC,KAAK,GAAG,CAAC,IAAI,CAAC,CAAA;SACtB;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CAAA;AAtDD,gCAsDC;AAED,SAAgB,aAAa;IAC3B,kDAAkD;IAClD,6GAA6G;IAC7G,MAAM,IAAI,GAAG,EAAE,CAAC,QAAQ,EAAE,CAAA;IAC1B,IAAI,OAAO,GAAG,EAAE,CAAA;IAEhB,IAAI,IAAI,KAAK,QAAQ,EAAE;QACrB,OAAO,GAAG,EAAE,CAAC,QAAQ,CAAC,yBAAyB,CAAC,CAAC,QAAQ,EAAE,CAAA;KAC5D;SAAM,IAAI,IAAI,KAAK,OAAO,EAAE;QAC3B,uDAAuD;QACvD,2BAA2B;QAC3B,oBAAoB;QACpB,wBAAwB;QACxB,0BAA0B;QAC1B,2CAA2C;QAC3C,MAAM,WAAW,GAAG,MAAM,CAAC,OAAO,CAAC,qBAAqB,EAAE,CAAA;QAC1D,IAAI,WAAW,EAAE;YACf,MAAM,KAAK,GAAG,WAAW,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;YACrC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;gBACxB,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;gBAC7B,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,KAAK,iBAAiB,EAAE;oBAC/D,OAAO,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAA;oBACzB,MAAK;iBACN;aACF;SACF;KACF;IAED,OAAO,OAAO,CAAA;AAChB,CAAC;AA7BD,sCA6BC;AAED,SAAgB,qBAAqB;IACnC,MAAM,OAAO,GAAG,kBAAkB,CAAA;IAClC,IAAI,QAAQ,GAAG,EAAE,CAAA;IAEjB,IAAI,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QAC1B,QAAQ,GAAG,EAAE,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC,QAAQ,EAAE,CAAA;KAC/C;IAED,OAAO,QAAQ,CAAA;AACjB,CAAC;AATD,sDASC"}

View File

@@ -1,12 +0,0 @@
/**
* Internal class for retries
*/
export declare class RetryHelper {
private maxAttempts;
private minSeconds;
private maxSeconds;
constructor(maxAttempts: number, minSeconds: number, maxSeconds: number);
execute<T>(action: () => Promise<T>, isRetryable?: (e: Error) => boolean): Promise<T>;
private getSleepAmount;
private sleep;
}

View File

@@ -1,70 +0,0 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
/**
* Internal class for retries
*/
class RetryHelper {
constructor(maxAttempts, minSeconds, maxSeconds) {
if (maxAttempts < 1) {
throw new Error('max attempts should be greater than or equal to 1');
}
this.maxAttempts = maxAttempts;
this.minSeconds = Math.floor(minSeconds);
this.maxSeconds = Math.floor(maxSeconds);
if (this.minSeconds > this.maxSeconds) {
throw new Error('min seconds should be less than or equal to max seconds');
}
}
execute(action, isRetryable) {
return __awaiter(this, void 0, void 0, function* () {
let attempt = 1;
while (attempt < this.maxAttempts) {
// Try
try {
return yield action();
}
catch (err) {
if (isRetryable && !isRetryable(err)) {
throw err;
}
core.info(err.message);
}
// Sleep
const seconds = this.getSleepAmount();
core.info(`Waiting ${seconds} seconds before trying again`);
yield this.sleep(seconds);
attempt++;
}
// Last attempt
return yield action();
});
}
getSleepAmount() {
return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) +
this.minSeconds);
}
sleep(seconds) {
return __awaiter(this, void 0, void 0, function* () {
return new Promise(resolve => setTimeout(resolve, seconds * 1000));
});
}
}
exports.RetryHelper = RetryHelper;
//# sourceMappingURL=retry-helper.js.map

Some files were not shown because too many files have changed in this diff Show More