mirror of
https://github.com/github/codeql-action.git
synced 2025-12-12 02:34:39 +08:00
Compare commits
2 Commits
test-paths
...
always-rep
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4cd2eff354 | ||
|
|
cce113b7fc |
5
.github/ISSUE_TEMPLATE/config.yml
vendored
5
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,5 +0,0 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Contact GitHub Support
|
||||
url: https://support.github.com/contact?subject=Code+Scanning+Beta+Support&tags=code-scanning-support
|
||||
about: Contact Support about code scanning
|
||||
11
.github/codeql/codeql-config.yml
vendored
11
.github/codeql/codeql-config.yml
vendored
@@ -1,13 +1,4 @@
|
||||
name: "CodeQL config"
|
||||
me: "CodeQL config"
|
||||
queries:
|
||||
- name: Run custom queries
|
||||
uses: ./queries
|
||||
# Run all extra query suites, both because we want to
|
||||
# and because it'll act as extra testing. This is why
|
||||
# we include both even though one is a superset of the
|
||||
# other, because we're testing the parsing logic and
|
||||
# that the suites exist in the codeql bundle.
|
||||
- uses: security-extended
|
||||
- uses: security-and-quality
|
||||
paths-ignore:
|
||||
- tests
|
||||
178
.github/update-release-branch.py
vendored
178
.github/update-release-branch.py
vendored
@@ -1,178 +0,0 @@
|
||||
import datetime
|
||||
from github import Github
|
||||
import random
|
||||
import requests
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# The branch being merged from.
|
||||
# This is the one that contains day-to-day development work.
|
||||
MASTER_BRANCH = 'master'
|
||||
# The branch being merged into.
|
||||
# This is the release branch that users reference.
|
||||
LATEST_RELEASE_BRANCH = 'v1'
|
||||
# Name of the remote
|
||||
ORIGIN = 'origin'
|
||||
|
||||
# Runs git with the given args and returns the stdout.
|
||||
# Raises an error if git does not exit successfully.
|
||||
def run_git(*args):
|
||||
cmd = ['git', *args]
|
||||
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if (p.returncode != 0):
|
||||
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
|
||||
return p.stdout.decode('ascii')
|
||||
|
||||
# Returns true if the given branch exists on the origin remote
|
||||
def branch_exists_on_remote(branch_name):
|
||||
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
||||
|
||||
# Opens a PR from the given branch to the release branch
|
||||
def open_pr(repo, all_commits, short_master_sha, branch_name):
|
||||
# Sort the commits into the pull requests that introduced them,
|
||||
# and any commits that don't have a pull request
|
||||
pull_requests = []
|
||||
commits_without_pull_requests = []
|
||||
for commit in all_commits:
|
||||
pr = get_pr_for_commit(repo, commit)
|
||||
|
||||
if pr is None:
|
||||
commits_without_pull_requests.append(commit)
|
||||
elif not any(p for p in pull_requests if p.number == pr.number):
|
||||
pull_requests.append(pr)
|
||||
|
||||
print('Found ' + str(len(pull_requests)) + ' pull requests')
|
||||
print('Found ' + str(len(commits_without_pull_requests)) + ' commits not in a pull request')
|
||||
|
||||
# Sort PRs and commits by age
|
||||
sorted(pull_requests, key=lambda pr: pr.number)
|
||||
sorted(commits_without_pull_requests, key=lambda c: c.commit.author.date)
|
||||
|
||||
# Start constructing the body text
|
||||
body = 'Merging ' + short_master_sha + ' into ' + LATEST_RELEASE_BRANCH
|
||||
|
||||
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
|
||||
body += '\n\nConductor for this PR is @' + conductor
|
||||
|
||||
# List all PRs merged
|
||||
if len(pull_requests) > 0:
|
||||
body += '\n\nContains the following pull requests:'
|
||||
for pr in pull_requests:
|
||||
merger = get_merger_of_pr(repo, pr)
|
||||
body += '\n- #' + str(pr.number)
|
||||
body += ' - ' + pr.title
|
||||
body += ' (@' + merger + ')'
|
||||
|
||||
# List all commits not part of a PR
|
||||
if len(commits_without_pull_requests) > 0:
|
||||
body += '\n\nContains the following commits not from a pull request:'
|
||||
for commit in commits_without_pull_requests:
|
||||
body += '\n- ' + commit.sha
|
||||
body += ' - ' + get_truncated_commit_message(commit)
|
||||
body += ' (@' + commit.author.login + ')'
|
||||
|
||||
title = 'Merge ' + MASTER_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
|
||||
|
||||
# Create the pull request
|
||||
pr = repo.create_pull(title=title, body=body, head=branch_name, base=LATEST_RELEASE_BRANCH)
|
||||
print('Created PR #' + str(pr.number))
|
||||
|
||||
# Assign the conductor
|
||||
pr.add_to_assignees(conductor)
|
||||
print('Assigned PR to ' + conductor)
|
||||
|
||||
# Gets the person who should be in charge of the mergeback PR
|
||||
def get_conductor(repo, pull_requests, other_commits):
|
||||
# If there are any PRs then use whoever merged the last one
|
||||
if len(pull_requests) > 0:
|
||||
return get_merger_of_pr(repo, pull_requests[-1])
|
||||
|
||||
# Otherwise take the author of the latest commit
|
||||
return other_commits[-1].author.login
|
||||
|
||||
# Gets a list of the SHAs of all commits that have happened on master
|
||||
# since the release branched off.
|
||||
# This will not include any commits that exist on the release branch
|
||||
# that aren't on master.
|
||||
def get_commit_difference(repo):
|
||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '...' + MASTER_BRANCH).strip().split('\n')
|
||||
|
||||
# Convert to full-fledged commit objects
|
||||
commits = [repo.get_commit(c) for c in commits]
|
||||
|
||||
# Filter out merge commits for PRs
|
||||
return list(filter(lambda c: not is_pr_merge_commit(c), commits))
|
||||
|
||||
# Is the given commit the automatic merge commit from when merging a PR
|
||||
def is_pr_merge_commit(commit):
|
||||
return commit.committer.login == 'web-flow' and len(commit.parents) > 1
|
||||
|
||||
# Gets a copy of the commit message that should display nicely
|
||||
def get_truncated_commit_message(commit):
|
||||
message = commit.commit.message.split('\n')[0]
|
||||
if len(message) > 60:
|
||||
return message[:57] + '...'
|
||||
else:
|
||||
return message
|
||||
|
||||
# Converts a commit into the PR that introduced it to the master branch.
|
||||
# Returns the PR object, or None if no PR could be found.
|
||||
def get_pr_for_commit(repo, commit):
|
||||
prs = commit.get_pulls()
|
||||
|
||||
if prs.totalCount > 0:
|
||||
# In the case that there are multiple PRs, return the earliest one
|
||||
prs = list(prs)
|
||||
sorted(prs, key=lambda pr: int(pr.number))
|
||||
return prs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
# Get the person who merged the pull request.
|
||||
# For most cases this will be the same as the author, but for PRs opened
|
||||
# by external contributors getting the merger will get us the GitHub
|
||||
# employee who reviewed and merged the PR.
|
||||
def get_merger_of_pr(repo, pr):
|
||||
return repo.get_commit(pr.merge_commit_sha).author.login
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
raise Exception('Usage: update-release.branch.py <github token> <repository nwo>')
|
||||
github_token = sys.argv[1]
|
||||
repository_nwo = sys.argv[2]
|
||||
|
||||
repo = Github(github_token).get_repo(repository_nwo)
|
||||
|
||||
# Print what we intend to go
|
||||
print('Considering difference between ' + MASTER_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
|
||||
short_master_sha = run_git('rev-parse', '--short', MASTER_BRANCH).strip()
|
||||
print('Current head of ' + MASTER_BRANCH + ' is ' + short_master_sha)
|
||||
|
||||
# See if there are any commits to merge in
|
||||
commits = get_commit_difference(repo)
|
||||
if len(commits) == 0:
|
||||
print('No commits to merge from ' + MASTER_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
|
||||
return
|
||||
|
||||
# The branch name is based off of the name of branch being merged into
|
||||
# and the SHA of the branch being merged from. Thus if the branch already
|
||||
# exists we can assume we don't need to recreate it.
|
||||
new_branch_name = 'update-' + LATEST_RELEASE_BRANCH + '-' + short_master_sha
|
||||
print('Branch name is ' + new_branch_name)
|
||||
|
||||
# Check if the branch already exists. If so we can abort as this script
|
||||
# has already run on this combination of branches.
|
||||
if branch_exists_on_remote(new_branch_name):
|
||||
print('Branch ' + new_branch_name + ' already exists. Nothing to do.')
|
||||
return
|
||||
|
||||
# Create the new branch and push it to the remote
|
||||
print('Creating branch ' + new_branch_name)
|
||||
run_git('checkout', '-b', new_branch_name, MASTER_BRANCH)
|
||||
run_git('push', ORIGIN, new_branch_name)
|
||||
|
||||
# Open a PR to update the branch
|
||||
open_pr(repo, commits, short_master_sha, new_branch_name)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
15
.github/workflows/codeql.yml
vendored
15
.github/workflows/codeql.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: "CodeQL action"
|
||||
|
||||
on: [push, pull_request]
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -11,18 +11,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
with:
|
||||
# Must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head of the pull request.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
- uses: ./init
|
||||
with:
|
||||
languages: javascript
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
- uses: ./analyze
|
||||
|
||||
132
.github/workflows/integration-testing.yml
vendored
132
.github/workflows/integration-testing.yml
vendored
@@ -1,126 +1,22 @@
|
||||
name: "Integration Testing"
|
||||
|
||||
on: [push, pull_request]
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
multi-language-repo_test-autodetect-languages:
|
||||
dispatch-events:
|
||||
if: github.event.repository.full_name == 'github/codeql-action'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
- name: Send repository dispatch events
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- run: |
|
||||
cd "$CODEQL_ACTION_DATABASE_DIR"
|
||||
# List all directories as there will be precisely one directory per database
|
||||
# but there may be other files in this directory such as query suites.
|
||||
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
||||
[[ ! -d cpp ]] || \
|
||||
[[ ! -d csharp ]] || \
|
||||
[[ ! -d go ]] || \
|
||||
[[ ! -d java ]] || \
|
||||
[[ ! -d javascript ]] || \
|
||||
[[ ! -d python ]]; then
|
||||
echo "Did not find expected number of databases. Database dir contains: $(ls)"
|
||||
exit 1
|
||||
fi
|
||||
curl -X POST \
|
||||
-H "Authorization: Bearer ${{ secrets.CODEQL_TESTING_TOKEN }}" \
|
||||
-H "Accept: application/vnd.github.everest-preview+json" \
|
||||
https://api.github.com/repos/Anthophila/amazon-cognito-js-copy/dispatches \
|
||||
-d '{"event_type":"codeql-integration","client_payload": {"sha": "${{ github.sha }}"}}'
|
||||
|
||||
multi-language-repo_test-custom-queries:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
config-file: ./.github/codeql/custom-queries.yml
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
# Currently is not possible to analyze Go in conjunction with other languages in macos
|
||||
multi-language-repo_test-go-custom-queries:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/setup-go@v2
|
||||
if: ${{ matrix.os == 'macos-latest' }}
|
||||
with:
|
||||
go-version: '^1.13.1'
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
config-file: ./.github/codeql/custom-queries.yml
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
|
||||
multi-language-repo_rubocop:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
||||
- name: Install dependencies
|
||||
run: bundle install
|
||||
- name: Rubocop run
|
||||
run: |
|
||||
bash -c "
|
||||
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
||||
[[ $? -ne 2 ]]
|
||||
"
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
sarif_file: rubocop.sarif
|
||||
env:
|
||||
TEST_MODE: true
|
||||
curl -X POST \
|
||||
-H "Authorization: Bearer ${{ secrets.CODEQL_TESTING_TOKEN }}" \
|
||||
-H "Accept: application/vnd.github.everest-preview+json" \
|
||||
https://api.github.com/repos/Anthophila/electron-test-action/dispatches \
|
||||
-d '{"event_type":"codeql-integration","client_payload": {"sha": "${{ github.sha }}"}}'
|
||||
|
||||
27
.github/workflows/js-uptodate-check.yml
vendored
Normal file
27
.github/workflows/js-uptodate-check.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: "Check generated JavaScript"
|
||||
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
check-js:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Check generated JavaScript
|
||||
run: |
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
# Generate the JavaScript files
|
||||
npm run-script build
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: JavaScript files are up to date"
|
||||
12
.github/workflows/npm-test.yml
vendored
Normal file
12
.github/workflows/npm-test.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
name: "npm run-script test"
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
npm-test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: npm run-script test
|
||||
run: npm run-script test
|
||||
71
.github/workflows/pr-checks.yml
vendored
71
.github/workflows/pr-checks.yml
vendored
@@ -1,71 +0,0 @@
|
||||
name: "PR checks"
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
tslint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: tslint
|
||||
run: npm run-script lint
|
||||
|
||||
check-js:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Check generated JavaScript
|
||||
run: |
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
# Generate the JavaScript files
|
||||
npm run-script build
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||
git status
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: JavaScript files are up to date"
|
||||
|
||||
check-node-modules:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Check node modules up to date
|
||||
run: |
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Reinstall modules and then clean to remove absolute paths
|
||||
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||
npm ci
|
||||
npm run removeNPMAbsolutePaths
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
|
||||
git status
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: node_modules are up to date"
|
||||
|
||||
npm-test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: npm run-script test
|
||||
run: npm run-script test
|
||||
12
.github/workflows/ts-lint.yml
vendored
Normal file
12
.github/workflows/ts-lint.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
name: "TSLint"
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
tslint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: tslint
|
||||
run: npm run-script lint
|
||||
31
.github/workflows/update-release-branch.yml
vendored
31
.github/workflows/update-release-branch.yml
vendored
@@ -1,31 +0,0 @@
|
||||
name: Update release branch
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 9 * * 1
|
||||
repository_dispatch:
|
||||
# Example of how to trigger this:
|
||||
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
|
||||
# Replace <token> with a personal access token from this page: https://github.com/settings/tokens
|
||||
types: [update-release-branch]
|
||||
|
||||
jobs:
|
||||
update:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
# Need full history so we calculate diffs
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.5
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install PyGithub==1.51 requests
|
||||
|
||||
- name: Update release branch
|
||||
run: python .github/update-release-branch.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }}
|
||||
99
README.md
99
README.md
@@ -1,6 +1,6 @@
|
||||
# CodeQL Action
|
||||
|
||||
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/semmle/ql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||
|
||||
## License
|
||||
|
||||
@@ -10,8 +10,6 @@ The underlying CodeQL CLI, used in this action, is licensed under the [GitHub Co
|
||||
|
||||
## Usage
|
||||
|
||||
This is a short walkthrough, but for more information read [configuring code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning).
|
||||
|
||||
To get code scanning results from CodeQL analysis on your repo you can use the following workflow as a template:
|
||||
|
||||
```yaml
|
||||
@@ -20,7 +18,6 @@ name: "Code Scanning - Action"
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 0 * * 0'
|
||||
|
||||
@@ -36,17 +33,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# Must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head of the pull request.
|
||||
# Only include this option if you are running this workflow on pull requests.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
# Only include this step if you are running this workflow on pull requests.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
@@ -92,9 +78,24 @@ If you prefer to integrate this within an existing CI workflow, it should end up
|
||||
uses: github/codeql-action/analyze@v1
|
||||
```
|
||||
|
||||
### Configuration file
|
||||
### Actions triggers
|
||||
|
||||
Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`.
|
||||
The CodeQL action should be run on `push` events, and on a `schedule`. `Push` events allow us to do a detailed analysis of the delta in a pull request, while the `schedule` event ensures that GitHub regularly scans the repository for the latest vulnerabilities, even if the repository becomes inactive. This action does not support the `pull_request` event.
|
||||
|
||||
### Configuration
|
||||
|
||||
You may optionally specify additional queries for CodeQL to execute by using a config file. The queries must belong to a [QL pack](https://help.semmle.com/codeql/codeql-cli/reference/qlpack-overview.html) and can be in your repository or any public repository. You can choose a single .ql file, a folder containing multiple .ql files, a .qls [query suite](https://help.semmle.com/codeql/codeql-cli/procedures/query-suites.html) file, or any combination of the above. To use queries from other repositories use the same syntax as when [using an action](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepsuses).
|
||||
|
||||
You can disable the default queries using `disable-default-queries: true`.
|
||||
|
||||
You can choose to ignore some files or folders from the analysis, or include additional files/folders for analysis. This *only* works for Javascript and Python analysis.
|
||||
Identifying potential files for extraction:
|
||||
|
||||
- Scans each folder that's defined as `paths` in turn, traversing subfolders, and looking for relevant files.
|
||||
- If it finds a subfolder that's defined as `paths-ignore`, stop traversing.
|
||||
- If a file or folder is both in `paths` and `paths-ignore`, the `paths-ignore` is ignored.
|
||||
|
||||
Use the `config-file` parameter of the init action to enable the configuration file. For example:
|
||||
|
||||
```yaml
|
||||
- uses: github/codeql-action/init@v1
|
||||
@@ -102,8 +103,68 @@ Use the `config-file` parameter of the `init` action to enable the configuration
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
```
|
||||
|
||||
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration)."
|
||||
A config file looks like this:
|
||||
|
||||
```yaml
|
||||
name: "My CodeQL config"
|
||||
|
||||
disable-default-queries: true
|
||||
|
||||
queries:
|
||||
- name: In-repo queries (Runs the queries located in the my-queries folder of the repo)
|
||||
uses: ./my-queries
|
||||
- name: External Javascript QL pack (Runs a QL pack located in an external repo)
|
||||
uses: /Semmle/ql/javascript/ql/src/Electron@master
|
||||
- name: External query (Runs a single query located in an external QL pack)
|
||||
uses: Semmle/ql/javascript/ql/src/AngularJS/DeadAngularJSEventListener.ql@master
|
||||
- name: Select query suite (Runs a query suites)
|
||||
uses: ./codeql-querypacks/complex-python-querypack/rootAndBar.qls
|
||||
|
||||
paths:
|
||||
- src/util.ts
|
||||
|
||||
paths-ignore:
|
||||
- src
|
||||
- lib
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
|
||||
### Trouble with Go dependencies
|
||||
|
||||
#### If you use a vendor directory
|
||||
|
||||
Try passing
|
||||
|
||||
```yaml
|
||||
env:
|
||||
GOFLAGS: "-mod=vendor"
|
||||
```
|
||||
|
||||
to `github/codeql-action/analyze`.
|
||||
|
||||
### If you do not use a vendor directory
|
||||
|
||||
Dependencies on public repositories should just work. If you have dependencies on private repositories, one option is to use `git config` and a [personal access token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) to authenticate when downloading dependencies. Add a section like
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- name: Configure git private repo access
|
||||
env:
|
||||
TOKEN: ${{ secrets.GITHUB_PAT }}
|
||||
run: |
|
||||
git config --global url."https://${TOKEN}@github.com/foo/bar".insteadOf "https://github.com/foo/bar"
|
||||
git config --global url."https://${TOKEN}@github.com/foo/baz".insteadOf "https://github.com/foo/baz"
|
||||
```
|
||||
|
||||
before any codeql actions. A similar thing can also be done with an SSH key or deploy key.
|
||||
|
||||
### C# using dotnet version 2 on linux
|
||||
|
||||
This currently requires invoking `dotnet` with the `/p:UseSharedCompilation=false` flag. For example:
|
||||
|
||||
```shell
|
||||
dotnet build /p:UseSharedCompilation=false
|
||||
```
|
||||
|
||||
Version 3 does not require the additional flag.
|
||||
|
||||
@@ -12,9 +12,6 @@ inputs:
|
||||
description: Upload the SARIF file
|
||||
required: false
|
||||
default: true
|
||||
ram:
|
||||
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
|
||||
required: false
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
|
||||
@@ -5,14 +5,12 @@ inputs:
|
||||
tools:
|
||||
description: URL of CodeQL tools
|
||||
required: false
|
||||
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200601/codeql-bundle.tar.gz
|
||||
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200427/codeql-bundle.tar.gz
|
||||
languages:
|
||||
description: The languages to be analysed
|
||||
required: false
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
default: ${{ toJson(matrix) }}
|
||||
config-file:
|
||||
description: Path of the config file to use
|
||||
required: false
|
||||
|
||||
11
jest.config.js
Normal file
11
jest.config.js
Normal file
@@ -0,0 +1,11 @@
|
||||
module.exports = {
|
||||
clearMocks: true,
|
||||
moduleFileExtensions: ['js', 'ts'],
|
||||
testEnvironment: 'node',
|
||||
testMatch: ['**/*.test.ts'],
|
||||
testRunner: 'jest-circus/runner',
|
||||
transform: {
|
||||
'^.+\\.ts$': 'ts-jest'
|
||||
},
|
||||
verbose: true
|
||||
}
|
||||
3
lib/analysis-paths.js
generated
3
lib/analysis-paths.js
generated
@@ -16,7 +16,7 @@ function includeAndExcludeAnalysisPaths(config, languages) {
|
||||
core.exportVariable('LGTM_INDEX_EXCLUDE', config.pathsIgnore.join('\n'));
|
||||
}
|
||||
function isInterpretedLanguage(language) {
|
||||
return language === 'javascript' || language === 'python';
|
||||
return language === 'javascript' && language === 'python';
|
||||
}
|
||||
// Index include/exclude only work in javascript and python
|
||||
// If some other language is detected/configured show a warning
|
||||
@@ -25,4 +25,3 @@ function includeAndExcludeAnalysisPaths(config, languages) {
|
||||
}
|
||||
}
|
||||
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;
|
||||
//# sourceMappingURL=analysis-paths.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAgB,8BAA8B,CAAC,MAA0B,EAAE,SAAmB;IAC1F,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KACtE;IAED,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACjC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC5E;IAED,SAAS,qBAAqB,CAAC,QAAQ;QACnC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;IAC9D,CAAC;IAED,2DAA2D;IAC3D,+DAA+D;IAC/D,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAC3G,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC9G;AACL,CAAC;AAlBD,wEAkBC"}
|
||||
30
lib/analysis-paths.test.js
generated
30
lib/analysis-paths.test.js
generated
@@ -1,30 +0,0 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
ava_1.default("emptyPaths", async (t) => {
|
||||
let config = new configUtils.Config();
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
|
||||
});
|
||||
ava_1.default("nonEmptyPaths", async (t) => {
|
||||
let config = new configUtils.Config();
|
||||
config.paths.push('path1', 'path2');
|
||||
config.pathsIgnore.push('path3', 'path4');
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path3\npath4');
|
||||
});
|
||||
//# sourceMappingURL=analysis-paths.test.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,4DAA8C;AAE9C,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACzB,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC5B,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IACpC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC1C,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;AAC5D,CAAC,CAAC,CAAC"}
|
||||
1
lib/autobuild.js
generated
1
lib/autobuild.js
generated
@@ -58,4 +58,3 @@ run().catch(e => {
|
||||
core.setFailed("autobuild action failed. " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=autobuild.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,2CAA6B;AAE7B,gEAAkD;AAClD,6CAA+B;AAE/B,KAAK,UAAU,GAAG;;IAChB,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,EAAE;YACzF,OAAO;SACR;QAED,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,OAAA,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,8BAA8B,CAAC,0CAAE,KAAK,CAAC,GAAG,MAAK,EAAE,CAAC;QACnG,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,8DAA8D;QAC9D,MAAM,SAAS,GAAG,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,iBAAiB,CAAC,CAAC;QAExE,MAAM,OAAO,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,cAAc,CAAC;QAChF,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;QAGpF,+DAA+D;QAC/D,0FAA0F;QAC1F,qDAAqD;QACrD,8EAA8E;QAC9E,gHAAgH;QAChH,IAAI,eAAe,GAAG,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,EAAE,CAAC;QAC7D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,wBAAwB,EAAE,+BAA+B,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAE1I,MAAM,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QAC9B,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,MAAM,IAAI,CAAC,kBAAkB,CAAC,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QACvE,OAAO;KACR;IAED,MAAM,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,CAAC;AAChD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
181
lib/config-utils.js
generated
181
lib/config-utils.js
generated
@@ -12,13 +12,6 @@ const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const path = __importStar(require("path"));
|
||||
const util = __importStar(require("./util"));
|
||||
const NAME_PROPERTY = 'name';
|
||||
const DISPLAY_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
|
||||
const QUERIES_PROPERTY = 'queries';
|
||||
const QUERIES_USES_PROPERTY = 'uses';
|
||||
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
|
||||
const PATHS_PROPERTY = 'paths';
|
||||
class ExternalQuery {
|
||||
constructor(repository, ref) {
|
||||
this.path = '';
|
||||
@@ -27,72 +20,39 @@ class ExternalQuery {
|
||||
}
|
||||
}
|
||||
exports.ExternalQuery = ExternalQuery;
|
||||
// The set of acceptable values for built-in suites from the codeql bundle
|
||||
const builtinSuites = ['security-extended', 'security-and-quality'];
|
||||
class Config {
|
||||
constructor() {
|
||||
this.name = "";
|
||||
this.disableDefaultQueries = false;
|
||||
this.additionalQueries = [];
|
||||
this.externalQueries = [];
|
||||
this.additionalSuites = [];
|
||||
this.pathsIgnore = [];
|
||||
this.paths = [];
|
||||
}
|
||||
addQuery(configFile, queryUses) {
|
||||
addQuery(queryUses) {
|
||||
// The logic for parsing the string is based on what actions does for
|
||||
// parsing the 'uses' actions in the workflow file
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
throw '"uses" value for queries cannot be blank';
|
||||
}
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
const localQueryPath = queryUses.slice(2);
|
||||
// Resolve the local path against the workspace so that when this is
|
||||
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
||||
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
const absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(absoluteQueryPath)) {
|
||||
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||
}
|
||||
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||
if (!(fs.realpathSync(absoluteQueryPath) + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||
}
|
||||
this.additionalQueries.push(absoluteQueryPath);
|
||||
this.additionalQueries.push(queryUses.slice(2));
|
||||
return;
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
|
||||
const suite = builtinSuites.find((suite) => suite === queryUses);
|
||||
if (suite) {
|
||||
this.additionalSuites.push(suite);
|
||||
return;
|
||||
}
|
||||
else {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
}
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
}
|
||||
const ref = tok[1];
|
||||
tok = tok[0].split('/');
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length < 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
if (tok.length > 3) {
|
||||
tok = [tok[0], tok[1], tok.slice(2).join('/')];
|
||||
}
|
||||
// Check none of the parts of the repository name are empty
|
||||
if (tok[0].trim() === '' || tok[1].trim() === '') {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
if (tok.length < 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
}
|
||||
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
|
||||
if (tok.length === 3) {
|
||||
@@ -102,137 +62,57 @@ class Config {
|
||||
}
|
||||
}
|
||||
exports.Config = Config;
|
||||
function getNameInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
|
||||
}
|
||||
exports.getNameInvalid = getNameInvalid;
|
||||
function getDisableDefaultQueriesInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, DISPLAY_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
|
||||
}
|
||||
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
|
||||
function getQueriesInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
|
||||
}
|
||||
exports.getQueriesInvalid = getQueriesInvalid;
|
||||
function getQueryUsesInvalid(configFile, queryUses) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'must be a built-in suite (' + builtinSuites.join(' or ') +
|
||||
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
|
||||
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
|
||||
}
|
||||
exports.getQueryUsesInvalid = getQueryUsesInvalid;
|
||||
function getPathsIgnoreInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
exports.getPathsIgnoreInvalid = getPathsIgnoreInvalid;
|
||||
function getPathsInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
exports.getPathsInvalid = getPathsInvalid;
|
||||
function getLocalPathOutsideOfRepository(configFile, localPath) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" is outside of the repository');
|
||||
}
|
||||
exports.getLocalPathOutsideOfRepository = getLocalPathOutsideOfRepository;
|
||||
function getLocalPathDoesNotExist(configFile, localPath) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" does not exist in the repository');
|
||||
}
|
||||
exports.getLocalPathDoesNotExist = getLocalPathDoesNotExist;
|
||||
function getConfigFileOutsideWorkspaceErrorMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" is outside of the workspace';
|
||||
}
|
||||
exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage;
|
||||
function getConfigFileDoesNotExistErrorMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" does not exist';
|
||||
}
|
||||
exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage;
|
||||
function getConfigFilePropertyError(configFile, property, error) {
|
||||
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
|
||||
}
|
||||
const configFolder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
function initConfig() {
|
||||
let configFile = core.getInput('config-file');
|
||||
const configFile = core.getInput('config-file');
|
||||
const config = new Config();
|
||||
// If no config file was provided create an empty one
|
||||
if (configFile === '') {
|
||||
core.debug('No configuration file was provided');
|
||||
return config;
|
||||
}
|
||||
// Treat the config file as relative to the workspace
|
||||
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
configFile = path.resolve(workspacePath, configFile);
|
||||
// Error if the config file is now outside of the workspace
|
||||
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
}
|
||||
// Error if the file does not exist
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
||||
}
|
||||
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
if (NAME_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
if (parsedYAML[NAME_PROPERTY].length === 0) {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
config.name = parsedYAML[NAME_PROPERTY];
|
||||
if (parsedYAML.name && typeof parsedYAML.name === "string") {
|
||||
config.name = parsedYAML.name;
|
||||
}
|
||||
if (DISPLAY_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
||||
throw new Error(getDisableDefaultQueriesInvalid(configFile));
|
||||
}
|
||||
config.disableDefaultQueries = parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY];
|
||||
if (parsedYAML['disable-default-queries'] && typeof parsedYAML['disable-default-queries'] === "boolean") {
|
||||
config.disableDefaultQueries = parsedYAML['disable-default-queries'];
|
||||
}
|
||||
if (QUERIES_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getQueriesInvalid(configFile));
|
||||
}
|
||||
parsedYAML[QUERIES_PROPERTY].forEach(query => {
|
||||
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
const queries = parsedYAML.queries;
|
||||
if (queries && queries instanceof Array) {
|
||||
queries.forEach(query => {
|
||||
if (query.uses && typeof query.uses === "string") {
|
||||
config.addQuery(query.uses);
|
||||
}
|
||||
config.addQuery(configFile, query[QUERIES_USES_PROPERTY]);
|
||||
});
|
||||
}
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
const pathsIgnore = parsedYAML['paths-ignore'];
|
||||
if (pathsIgnore && pathsIgnore instanceof Array) {
|
||||
pathsIgnore.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
config.pathsIgnore.push(path);
|
||||
}
|
||||
config.pathsIgnore.push(path);
|
||||
});
|
||||
}
|
||||
if (PATHS_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
const paths = parsedYAML.paths;
|
||||
if (paths && paths instanceof Array) {
|
||||
paths.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
config.paths.push(path);
|
||||
}
|
||||
config.paths.push(path);
|
||||
});
|
||||
}
|
||||
return config;
|
||||
}
|
||||
function getConfigFolder() {
|
||||
return util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
}
|
||||
function getConfigFile() {
|
||||
return path.join(getConfigFolder(), 'config');
|
||||
}
|
||||
exports.getConfigFile = getConfigFile;
|
||||
async function saveConfig(config) {
|
||||
const configString = JSON.stringify(config);
|
||||
await io.mkdirP(getConfigFolder());
|
||||
fs.writeFileSync(getConfigFile(), configString, 'utf8');
|
||||
await io.mkdirP(configFolder);
|
||||
fs.writeFileSync(path.join(configFolder, 'config'), configString, 'utf8');
|
||||
core.debug('Saved config:');
|
||||
core.debug(configString);
|
||||
}
|
||||
async function loadConfig() {
|
||||
const configFile = getConfigFile();
|
||||
const configFile = path.join(configFolder, 'config');
|
||||
if (fs.existsSync(configFile)) {
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
@@ -248,4 +128,3 @@ async function loadConfig() {
|
||||
}
|
||||
}
|
||||
exports.loadConfig = loadConfig;
|
||||
//# sourceMappingURL=config-utils.js.map
|
||||
File diff suppressed because one or more lines are too long
162
lib/config-utils.test.js
generated
162
lib/config-utils.test.js
generated
@@ -1,162 +0,0 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const util = __importStar(require("./util"));
|
||||
function setInput(name, value) {
|
||||
// Transformation copied from
|
||||
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
|
||||
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
|
||||
if (value !== undefined) {
|
||||
process.env[envVar] = value;
|
||||
}
|
||||
else {
|
||||
delete process.env[envVar];
|
||||
}
|
||||
}
|
||||
ava_1.default("load empty config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', undefined);
|
||||
const config = await configUtils.loadConfig();
|
||||
t.deepEqual(config, new configUtils.Config());
|
||||
});
|
||||
});
|
||||
ava_1.default("loading config saves config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const configFile = configUtils.getConfigFile();
|
||||
// Sanity check the saved config file does not already exist
|
||||
t.false(fs.existsSync(configFile));
|
||||
const config = await configUtils.loadConfig();
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configFile));
|
||||
// And the contents should parse correctly to the config that was returned
|
||||
t.deepEqual(fs.readFileSync(configFile, 'utf8'), JSON.stringify(config));
|
||||
});
|
||||
});
|
||||
ava_1.default("load input outside of workspace", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', '../input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("load non-existent input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
t.false(fs.existsSync(path.join(tmpDir, 'input')));
|
||||
setInput('config-file', 'input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("load non-empty input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
// Just create a generic config object with non-default values for all fields
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- uses: ./
|
||||
- uses: ./foo
|
||||
- uses: foo/bar@dev
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = new configUtils.Config();
|
||||
expectedConfig.name = 'my config';
|
||||
expectedConfig.disableDefaultQueries = true;
|
||||
expectedConfig.additionalQueries.push(tmpDir);
|
||||
expectedConfig.additionalQueries.push(path.join(tmpDir, 'foo'));
|
||||
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
|
||||
expectedConfig.pathsIgnore = ['a', 'b'];
|
||||
expectedConfig.paths = ['c/d'];
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
const actualConfig = await configUtils.loadConfig();
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
|
||||
ava_1.default("load invalid input - " + testName, async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const inputFile = path.join(tmpDir, 'input');
|
||||
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
doInvalidInputTest('name invalid type', `
|
||||
name:
|
||||
- foo: bar`, configUtils.getNameInvalid);
|
||||
doInvalidInputTest('disable-default-queries invalid type', `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
|
||||
doInvalidInputTest('queries invalid type', `queries: foo`, configUtils.getQueriesInvalid);
|
||||
doInvalidInputTest('paths-ignore invalid type', `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
|
||||
doInvalidInputTest('paths invalid type', `paths: 17`, configUtils.getPathsInvalid);
|
||||
doInvalidInputTest('queries uses invalid type', `
|
||||
queries:
|
||||
- uses:
|
||||
- hello: world`, configUtils.getQueryUsesInvalid);
|
||||
function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) {
|
||||
// Invalid contents of a "queries.uses" field.
|
||||
// Should fail with the expected error message
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
queries:
|
||||
- name: foo
|
||||
uses: ` + input;
|
||||
doInvalidInputTest("queries uses \"" + input + "\"", inputFileContents, expectedErrorMessageGenerator);
|
||||
}
|
||||
// Various "uses" fields, and the errors they should produce
|
||||
doInvalidQueryUsesTest("''", c => configUtils.getQueryUsesInvalid(c, undefined));
|
||||
doInvalidQueryUsesTest("foo/bar", c => configUtils.getQueryUsesInvalid(c, "foo/bar"));
|
||||
doInvalidQueryUsesTest("foo/bar@v1@v2", c => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
|
||||
doInvalidQueryUsesTest("foo@master", c => configUtils.getQueryUsesInvalid(c, "foo@master"));
|
||||
doInvalidQueryUsesTest("https://github.com/foo/bar@master", c => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
|
||||
doInvalidQueryUsesTest("./foo", c => configUtils.getLocalPathDoesNotExist(c, "foo"));
|
||||
doInvalidQueryUsesTest("./..", c => configUtils.getLocalPathOutsideOfRepository(c, ".."));
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
File diff suppressed because one or more lines are too long
4
lib/external-queries.js
generated
4
lib/external-queries.js
generated
@@ -11,9 +11,8 @@ const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function checkoutExternalQueries(config) {
|
||||
const folder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
const folder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
for (const externalQuery of config.externalQueries) {
|
||||
core.info('Checking out ' + externalQuery.repository);
|
||||
const checkoutLocation = path.join(folder, externalQuery.repository);
|
||||
@@ -30,4 +29,3 @@ async function checkoutExternalQueries(config) {
|
||||
}
|
||||
}
|
||||
exports.checkoutExternalQueries = checkoutExternalQueries;
|
||||
//# sourceMappingURL=external-queries.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAG7B,6CAA+B;AAExB,KAAK,UAAU,uBAAuB,CAAC,MAA0B;IACtE,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC;IAEvD,KAAK,MAAM,aAAa,IAAI,MAAM,CAAC,eAAe,EAAE;QAClD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,UAAU,CAAC,CAAC;QAEtD,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,aAAa,CAAC,UAAU,CAAC,CAAC;QACrE,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;YACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,aAAa,CAAC,UAAU,GAAG,MAAM,CAAC;YAC1E,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;YAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;gBACrB,cAAc,GAAG,gBAAgB;gBACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;gBACzC,UAAU,EAAE,aAAa,CAAC,GAAG;aAC9B,CAAC,CAAC;SACJ;QAED,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;KAChF;AACH,CAAC;AAnBD,0DAmBC"}
|
||||
31
lib/external-queries.test.js
generated
31
lib/external-queries.test.js
generated
@@ -1,31 +0,0 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const util = __importStar(require("./util"));
|
||||
ava_1.default("checkoutExternalQueries", async (t) => {
|
||||
let config = new configUtils.Config();
|
||||
config.externalQueries = [
|
||||
new configUtils.ExternalQuery("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b"),
|
||||
];
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
|
||||
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=external-queries.test.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,4DAA8C;AAC9C,oEAAsD;AACtD,6CAA+B;AAE/B,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACtC,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,eAAe,GAAG;QACrB,IAAI,WAAW,CAAC,aAAa,CAAC,kBAAkB,EAAE,0CAA0C,CAAC;KAChG,CAAC;IAEF,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACjC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QACpC,MAAM,eAAe,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtD,uFAAuF;QACvF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IACjF,CAAC,CAAC,CAAC;AACP,CAAC,CAAC,CAAC"}
|
||||
110
lib/finalize-db.js
generated
110
lib/finalize-db.js
generated
@@ -11,49 +11,12 @@ const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
/**
|
||||
* A list of queries from https://github.com/github/codeql that
|
||||
* we don't want to run. Disabling them here is a quicker alternative to
|
||||
* disabling them in the code scanning query suites. Queries should also
|
||||
* be disabled in the suites, and removed from this list here once the
|
||||
* bundle is updated to make those suite changes live.
|
||||
*
|
||||
* Format is a map from language to an array of path suffixes of .ql files.
|
||||
*/
|
||||
const DISABLED_BUILTIN_QUERIES = {
|
||||
'csharp': [
|
||||
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
|
||||
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
|
||||
]
|
||||
};
|
||||
function queryIsDisabled(language, query) {
|
||||
return (DISABLED_BUILTIN_QUERIES[language] || [])
|
||||
.some(disabledQuery => query.endsWith(disabledQuery));
|
||||
}
|
||||
function getMemoryFlag() {
|
||||
let memoryToUseMegaBytes;
|
||||
const memoryToUseString = core.getInput("ram");
|
||||
if (memoryToUseString) {
|
||||
memoryToUseMegaBytes = Number(memoryToUseString);
|
||||
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
|
||||
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
|
||||
}
|
||||
}
|
||||
else {
|
||||
const totalMemoryBytes = os.totalmem();
|
||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||
const systemReservedMemoryMegaBytes = 256;
|
||||
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
|
||||
}
|
||||
return "--ram=" + Math.floor(memoryToUseMegaBytes);
|
||||
}
|
||||
async function createdDBForScannedLanguages(codeqlCmd, databaseFolder) {
|
||||
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
||||
if (scannedLanguages) {
|
||||
@@ -86,50 +49,26 @@ async function finalizeDatabaseCreation(codeqlCmd, databaseFolder) {
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
async function runResolveQueries(codeqlCmd, queries) {
|
||||
let output = '';
|
||||
const options = {
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
output += data.toString();
|
||||
}
|
||||
}
|
||||
};
|
||||
await exec.exec(codeqlCmd, [
|
||||
'resolve',
|
||||
'queries',
|
||||
...queries,
|
||||
'--format=bylanguage'
|
||||
], options);
|
||||
return JSON.parse(output);
|
||||
}
|
||||
async function resolveQueryLanguages(codeqlCmd, config) {
|
||||
let res = new Map();
|
||||
if (!config.disableDefaultQueries || config.additionalSuites.length !== 0) {
|
||||
const suites = [];
|
||||
for (const language of await util.getLanguages()) {
|
||||
if (!config.disableDefaultQueries) {
|
||||
suites.push(language + '-code-scanning.qls');
|
||||
}
|
||||
for (const additionalSuite of config.additionalSuites) {
|
||||
suites.push(language + '-' + additionalSuite + '.qls');
|
||||
}
|
||||
}
|
||||
const resolveQueriesOutputObject = await runResolveQueries(codeqlCmd, suites);
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
if (res[language] === undefined) {
|
||||
res[language] = [];
|
||||
}
|
||||
res[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
|
||||
}
|
||||
}
|
||||
if (config.additionalQueries.length !== 0) {
|
||||
const resolveQueriesOutputObject = await runResolveQueries(codeqlCmd, config.additionalQueries);
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
if (res[language] === undefined) {
|
||||
res[language] = [];
|
||||
let resolveQueriesOutput = '';
|
||||
const options = {
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
resolveQueriesOutput += data.toString();
|
||||
}
|
||||
}
|
||||
res[language].push(...Object.keys(queries));
|
||||
};
|
||||
await exec.exec(codeqlCmd, [
|
||||
'resolve',
|
||||
'queries',
|
||||
...config.additionalQueries,
|
||||
'--format=bylanguage'
|
||||
], options);
|
||||
const resolveQueriesOutputObject = JSON.parse(resolveQueriesOutput);
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
res[language] = Object.keys(queries);
|
||||
}
|
||||
const noDeclaredLanguage = resolveQueriesOutputObject.noDeclaredLanguage;
|
||||
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
||||
@@ -149,26 +88,20 @@ async function runQueries(codeqlCmd, databaseFolder, sarifFolder, config) {
|
||||
const queriesPerLanguage = await resolveQueryLanguages(codeqlCmd, config);
|
||||
for (let database of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + database);
|
||||
const queries = queriesPerLanguage[database] || [];
|
||||
if (queries.length === 0) {
|
||||
throw new Error('Unable to analyse ' + database + ' as no queries were selected for this language');
|
||||
const queries = [];
|
||||
if (!config.disableDefaultQueries) {
|
||||
queries.push(database + '-code-scanning.qls');
|
||||
}
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = path.join(databaseFolder, database + '-queries.qls');
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
core.debug('Query suite file for ' + database + '...\n' + querySuiteContents);
|
||||
queries.push(...(queriesPerLanguage[database] || []));
|
||||
const sarifFile = path.join(sarifFolder, database + '.sarif');
|
||||
await exec.exec(codeqlCmd, [
|
||||
'database',
|
||||
'analyze',
|
||||
getMemoryFlag(),
|
||||
path.join(databaseFolder, database),
|
||||
'--format=sarif-latest',
|
||||
'--output=' + sarifFile,
|
||||
'--no-sarif-add-snippets',
|
||||
querySuite
|
||||
...queries
|
||||
]);
|
||||
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
@@ -209,4 +142,3 @@ run().catch(e => {
|
||||
core.setFailed("analyze action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=finalize-db.js.map
|
||||
File diff suppressed because one or more lines are too long
1
lib/fingerprints.js
generated
1
lib/fingerprints.js
generated
@@ -245,4 +245,3 @@ function addFingerprints(sarifContents) {
|
||||
return JSON.stringify(sarif);
|
||||
}
|
||||
exports.addFingerprints = addFingerprints;
|
||||
//# sourceMappingURL=fingerprints.js.map
|
||||
File diff suppressed because one or more lines are too long
157
lib/fingerprints.test.js
generated
157
lib/fingerprints.test.js
generated
@@ -1,157 +0,0 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const fingerprints = __importStar(require("./fingerprints"));
|
||||
function testHash(t, input, expectedHashes) {
|
||||
let index = 0;
|
||||
let callback = function (lineNumber, hash) {
|
||||
t.is(lineNumber, index + 1);
|
||||
t.is(hash, expectedHashes[index]);
|
||||
index++;
|
||||
};
|
||||
fingerprints.hash(callback, input);
|
||||
t.is(index, input.split(/\r\n|\r|\n/).length);
|
||||
}
|
||||
ava_1.default('hash', (t) => {
|
||||
// Try empty file
|
||||
testHash(t, "", ["c129715d7a2bc9a3:1"]);
|
||||
// Try various combinations of newline characters
|
||||
testHash(t, " a\nb\n \t\tc\n d", [
|
||||
"271789c17abda88f:1",
|
||||
"54703d4cd895b18:1",
|
||||
"180aee12dab6264:1",
|
||||
"a23a3dc5e078b07b:1"
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
|
||||
"8b7cf3e952e7aeb2:1",
|
||||
"b1ae1287ec4718d9:1",
|
||||
"bff680108adb0fcc:1",
|
||||
"c6805c5e1288b612:1",
|
||||
"b86d3392aea1be30:1",
|
||||
"e6ceba753e1a442:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End\n", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\r\r\r \t\tGreetings\r End\r", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\r\nworld!!!\r\n\r\n\r\n \t\tGreetings\r\n End\r\n", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(t, " hello; \t\nworld!!!\r\n\n\r \t\tGreetings\r End\r\n", [
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
// Try repeating line that will generate identical hashes
|
||||
testHash(t, "Lorem ipsum dolor sit amet.\n".repeat(10), [
|
||||
"a7f2ff13bc495cf2:1",
|
||||
"a7f2ff13bc495cf2:2",
|
||||
"a7f2ff13bc495cf2:3",
|
||||
"a7f2ff13bc495cf2:4",
|
||||
"a7f2ff13bc495cf2:5",
|
||||
"a7f2ff13bc495cf2:6",
|
||||
"a7f2ff1481e87703:1",
|
||||
"a9cf91f7bbf1862b:1",
|
||||
"55ec222b86bcae53:1",
|
||||
"cc97dc7b1d7d8f7b:1",
|
||||
"c129715d7a2bc9a3:1"
|
||||
]);
|
||||
});
|
||||
function testResolveUriToFile(uri, index, artifactsURIs) {
|
||||
const location = { "uri": uri, "index": index };
|
||||
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
|
||||
return fingerprints.resolveUriToFile(location, artifacts);
|
||||
}
|
||||
ava_1.default('resolveUriToFile', t => {
|
||||
// The resolveUriToFile method checks that the file exists and is in the right directory
|
||||
// so we need to give it real files to look at. We will use this file as an example.
|
||||
// For this to work we require the current working directory to be a parent, but this
|
||||
// should generally always be the case so this is fine.
|
||||
const cwd = process.cwd();
|
||||
const filepath = __filename;
|
||||
t.true(filepath.startsWith(cwd + '/'));
|
||||
const relativeFilepaht = filepath.substring(cwd.length + 1);
|
||||
process.env['GITHUB_WORKSPACE'] = cwd;
|
||||
// Absolute paths are unmodified
|
||||
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
|
||||
// Relative paths are made absolute
|
||||
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile('file://' + relativeFilepaht, undefined, []), filepath);
|
||||
// Absolute paths outside the src root are discarded
|
||||
t.is(testResolveUriToFile('/src/foo/bar.js', undefined, []), undefined);
|
||||
t.is(testResolveUriToFile('file:///src/foo/bar.js', undefined, []), undefined);
|
||||
// Other schemes are discarded
|
||||
t.is(testResolveUriToFile('https://' + filepath, undefined, []), undefined);
|
||||
t.is(testResolveUriToFile('ftp://' + filepath, undefined, []), undefined);
|
||||
// Invalid URIs are discarded
|
||||
t.is(testResolveUriToFile(1, undefined, []), undefined);
|
||||
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
|
||||
// Non-existant files are discarded
|
||||
t.is(testResolveUriToFile(filepath + '2', undefined, []), undefined);
|
||||
// Index is resolved
|
||||
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
|
||||
t.is(testResolveUriToFile(undefined, 1, ['foo', filepath]), filepath);
|
||||
// Invalid indexes are discarded
|
||||
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
|
||||
t.is(testResolveUriToFile(undefined, '0', [filepath]), undefined);
|
||||
});
|
||||
ava_1.default('addFingerprints', t => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.input.sarif').toString();
|
||||
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.expected.sarif').toString();
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||
});
|
||||
ava_1.default('missingRegions', t => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.input.sarif').toString();
|
||||
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.expected.sarif').toString();
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||
});
|
||||
//# sourceMappingURL=fingerprints.test.js.map
|
||||
File diff suppressed because one or more lines are too long
23
lib/setup-tools.js
generated
23
lib/setup-tools.js
generated
@@ -37,22 +37,15 @@ exports.CodeQLSetup = CodeQLSetup;
|
||||
async function setupCodeQL() {
|
||||
const version = '1.0.0';
|
||||
const codeqlURL = core.getInput('tools', { required: true });
|
||||
try {
|
||||
let codeqlFolder = toolcache.find('CodeQL', version);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
}
|
||||
else {
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', version);
|
||||
}
|
||||
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
||||
let codeqlFolder = toolcache.find('CodeQL', version);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
}
|
||||
catch (e) {
|
||||
core.error(e);
|
||||
throw new Error("Unable to download and extract CodeQL CLI");
|
||||
else {
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', version);
|
||||
}
|
||||
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
||||
}
|
||||
exports.setupCodeQL = setupCodeQL;
|
||||
//# sourceMappingURL=setup-tools.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"setup-tools.js","sourceRoot":"","sources":["../src/setup-tools.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,+DAAiD;AACjD,2CAA6B;AAE7B,MAAa,WAAW;IAMpB,YAAY,UAAkB;QAC1B,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAC3C,4BAA4B;QAC5B,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAC9B,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;YACxB,IAAI,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBAC7B,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC;aACtB;SACJ;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YACrC,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;SAC7B;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACtC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;SAC3B;aAAM;YACH,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;SAC/D;IACL,CAAC;CACJ;AAxBD,kCAwBC;AAEM,KAAK,UAAU,WAAW;IAC7B,MAAM,OAAO,GAAG,OAAO,CAAC;IACxB,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7D,IAAI;QACA,IAAI,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QACrD,IAAI,YAAY,EAAE;YACd,IAAI,CAAC,KAAK,CAAC,yBAAyB,YAAY,EAAE,CAAC,CAAC;SACvD;aAAM;YACH,MAAM,UAAU,GAAG,MAAM,SAAS,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,eAAe,GAAG,MAAM,SAAS,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC/D,YAAY,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,eAAe,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC;SAC/E;QACD,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAE7D;IAAC,OAAO,CAAC,EAAE;QACR,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACd,MAAM,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC;KAChE;AACL,CAAC;AAnBD,kCAmBC"}
|
||||
12
lib/setup-tracer.js
generated
12
lib/setup-tracer.js
generated
@@ -100,13 +100,12 @@ function concatTracerConfigs(configs) {
|
||||
totalCount += count;
|
||||
totalLines.push(...lines.slice(2));
|
||||
}
|
||||
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
|
||||
const spec = path.resolve(tempFolder, 'compound-spec');
|
||||
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
|
||||
const newLogFilePath = path.resolve(util.workspaceFolder(), 'compound-build-tracer.log');
|
||||
const spec = path.resolve(util.workspaceFolder(), 'compound-spec');
|
||||
const tempFolder = path.resolve(util.workspaceFolder(), 'compound-temp');
|
||||
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
|
||||
if (copyExecutables) {
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = tempFolder;
|
||||
envSize += 1;
|
||||
}
|
||||
fs.writeFileSync(spec, newSpecContent.join('\n'));
|
||||
@@ -157,7 +156,7 @@ async function run() {
|
||||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
||||
core.exportVariable('CODEQL_RAM', codeqlRam);
|
||||
const databaseFolder = path.resolve(util.getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
|
||||
const databaseFolder = path.resolve(util.workspaceFolder(), 'codeql_databases');
|
||||
await io.mkdirP(databaseFolder);
|
||||
let tracedLanguages = {};
|
||||
let scannedLanguages = [];
|
||||
@@ -213,4 +212,3 @@ run().catch(e => {
|
||||
core.setFailed("init action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=setup-tracer.js.map
|
||||
File diff suppressed because one or more lines are too long
2
lib/shared-environment.js
generated
2
lib/shared-environment.js
generated
@@ -3,7 +3,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CODEQL_ACTION_CMD = 'CODEQL_ACTION_CMD';
|
||||
exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
|
||||
exports.CODEQL_ACTION_LANGUAGES = 'CODEQL_ACTION_LANGUAGES';
|
||||
exports.CODEQL_ACTION_ANALYSIS_KEY = 'CODEQL_ACTION_ANALYSIS_KEY';
|
||||
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
|
||||
exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
|
||||
exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
||||
@@ -15,4 +14,3 @@ exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
||||
exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
|
||||
// Populated when the init action completes successfully
|
||||
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
|
||||
//# sourceMappingURL=shared-environment.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,iBAAiB,GAAG,mBAAmB,CAAC;AACxC,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,uBAAuB,GAAG,yBAAyB,CAAC;AACpD,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,+BAA+B,GAAG,iCAAiC,CAAC;AACpE,QAAA,8BAA8B,GAAG,gCAAgC,CAAC;AAC/E,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,wBAAwB,GAAG,0BAA0B,CAAC;AACnE,wDAAwD;AAC3C,QAAA,4BAA4B,GAAG,8BAA8B,CAAC"}
|
||||
1
lib/tracer-env.js
generated
1
lib/tracer-env.js
generated
@@ -18,4 +18,3 @@ for (let entry of Object.entries(process.env)) {
|
||||
}
|
||||
process.stdout.write(process.argv[2]);
|
||||
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');
|
||||
//# sourceMappingURL=tracer-env.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"tracer-env.js","sourceRoot":"","sources":["../src/tracer-env.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AAEzB,MAAM,GAAG,GAAG,EAAE,CAAC;AACf,KAAK,IAAI,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;IAC3C,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACrB,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACvB,IAAI,OAAO,KAAK,KAAK,WAAW,IAAI,GAAG,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;QACpF,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KACpB;CACJ;AACD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,CAAC"}
|
||||
87
lib/upload-lib.js
generated
87
lib/upload-lib.js
generated
@@ -13,14 +13,26 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const auth = __importStar(require("@actions/http-client/auth"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const file_url_1 = __importDefault(require("file-url"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const jsonschema = __importStar(require("jsonschema"));
|
||||
const path = __importStar(require("path"));
|
||||
const zlib_1 = __importDefault(require("zlib"));
|
||||
const fingerprints = __importStar(require("./fingerprints"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util = __importStar(require("./util"));
|
||||
// Construct the location of the sentinel file for detecting multiple uploads.
|
||||
// The returned location should be writable.
|
||||
async function getSentinelFilePath() {
|
||||
// Use the temp dir instead of placing next to the sarif file because of
|
||||
// issues with docker actions. The directory containing the sarif file
|
||||
// may not be writable by us.
|
||||
const uploadsTmpDir = path.join(process.env['RUNNER_TEMP'] || '/tmp/codeql-action', 'uploads');
|
||||
await io.mkdirP(uploadsTmpDir);
|
||||
// Hash the absolute path so we'll behave correctly in the unlikely
|
||||
// scenario a file is referenced twice with different paths.
|
||||
return path.join(uploadsTmpDir, 'codeql-action-upload-sentinel');
|
||||
}
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
function combineSarifFiles(sarifFiles) {
|
||||
@@ -46,11 +58,6 @@ exports.combineSarifFiles = combineSarifFiles;
|
||||
// If the request fails then this will retry a small number of times.
|
||||
async function uploadPayload(payload) {
|
||||
core.info('Uploading results');
|
||||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env['TEST_MODE'] === 'true' || false;
|
||||
if (testMode) {
|
||||
return true;
|
||||
}
|
||||
const githubToken = core.getInput('token');
|
||||
const ph = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
|
||||
@@ -113,63 +120,25 @@ async function upload(input) {
|
||||
}
|
||||
}
|
||||
exports.upload = upload;
|
||||
// Counts the number of results in the given SARIF file
|
||||
function countResultsInSarif(sarif) {
|
||||
let numResults = 0;
|
||||
for (const run of JSON.parse(sarif).runs) {
|
||||
numResults += run.results.length;
|
||||
}
|
||||
return numResults;
|
||||
}
|
||||
exports.countResultsInSarif = countResultsInSarif;
|
||||
// Validates that the given file path refers to a valid SARIF file.
|
||||
// Returns a non-empty list of error message if the file is invalid,
|
||||
// otherwise returns the empty list if the file is valid.
|
||||
function validateSarifFileSchema(sarifFilePath) {
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
|
||||
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
if (result.valid) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
// Set the failure message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const errorMessages = result.errors.map(e => "- " + e.stack);
|
||||
core.setFailed("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + errorMessages.join("\n"));
|
||||
// Also output the more verbose error messages in groups as these may be very large.
|
||||
for (const error of result.errors) {
|
||||
core.startGroup("Error details: " + error.stack);
|
||||
core.info(JSON.stringify(error, null, 2));
|
||||
core.endGroup();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles) {
|
||||
core.startGroup("Uploading results");
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
let succeeded = false;
|
||||
// Check if an upload has happened before. If so then abort.
|
||||
// This is intended to catch when the finish and upload-sarif actions
|
||||
// are used together, and then the upload-sarif action is invoked twice.
|
||||
const sentinelFile = await getSentinelFilePath();
|
||||
if (fs.existsSync(sentinelFile)) {
|
||||
core.info("Aborting as an upload has already happened from this job");
|
||||
return false;
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
if (!validateSarifFileSchema(file)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
const commitOid = await util.getCommitOid();
|
||||
const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRef();
|
||||
const analysisKey = await util.getAnalysisKey();
|
||||
const ref = util.getRequiredEnvParam('GITHUB_REF'); // it's in the form "refs/heads/master"
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||
@@ -188,7 +157,6 @@ async function uploadFiles(sarifFiles) {
|
||||
const payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_key": analysisKey,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
@@ -197,13 +165,10 @@ async function uploadFiles(sarifFiles) {
|
||||
"started_at": startedAt,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
// Log some useful debug info about the info
|
||||
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
|
||||
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
|
||||
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
|
||||
// Make the upload
|
||||
const succeeded = await uploadPayload(payload);
|
||||
succeeded = await uploadPayload(payload);
|
||||
// Mark that we have made an upload
|
||||
fs.writeFileSync(sentinelFile, '');
|
||||
core.endGroup();
|
||||
return succeeded;
|
||||
}
|
||||
//# sourceMappingURL=upload-lib.js.map
|
||||
File diff suppressed because one or more lines are too long
25
lib/upload-lib.test.js
generated
25
lib/upload-lib.test.js
generated
@@ -1,25 +0,0 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const uploadLib = __importStar(require("./upload-lib"));
|
||||
ava_1.default('validateSarifFileSchema - valid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
|
||||
t.true(uploadLib.validateSarifFileSchema(inputFile));
|
||||
});
|
||||
ava_1.default('validateSarifFileSchema - invalid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
|
||||
t.false(uploadLib.validateSarifFileSchema(inputFile));
|
||||
// validateSarifFileSchema calls core.setFailed which sets the exit code on error
|
||||
process.exitCode = 0;
|
||||
});
|
||||
//# sourceMappingURL=upload-lib.test.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,wDAA0C;AAE1C,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;IACtD,iFAAiF;IACjF,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC"}
|
||||
1
lib/upload-sarif.js
generated
1
lib/upload-sarif.js
generated
@@ -32,4 +32,3 @@ run().catch(e => {
|
||||
core.setFailed("codeql/upload-sarif action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
//# sourceMappingURL=upload-sarif.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"upload-sarif.js","sourceRoot":"","sources":["../src/upload-sarif.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,yDAA2C;AAC3C,6CAA+B;AAE/B,KAAK,UAAU,GAAG;IACd,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,KAAK,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,cAAc,CAAC,EAAE;QAC9F,OAAO;KACV;IAED,IAAI;QACA,IAAI,MAAM,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE;YACtD,MAAM,IAAI,CAAC,qBAAqB,CAAC,cAAc,CAAC,CAAC;SACpD;aAAM;YACH,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;SAC3D;KACJ;IAAC,OAAO,KAAK,EAAE;QACZ,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QAC1E,OAAO;KACV;AACL,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACZ,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACnB,CAAC,CAAC,CAAC"}
|
||||
107
lib/util.js
generated
107
lib/util.js
generated
@@ -11,13 +11,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const auth = __importStar(require("@actions/http-client/auth"));
|
||||
const octokit = __importStar(require("@octokit/rest"));
|
||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
/**
|
||||
@@ -34,6 +31,12 @@ function should_abort(actionName, requireInitActionHasRun) {
|
||||
core.setFailed('GITHUB_REF must be set.');
|
||||
return true;
|
||||
}
|
||||
// Should abort if called on a merge commit for a pull request.
|
||||
if (ref.startsWith('refs/pull/')) {
|
||||
core.warning('The CodeQL ' + actionName + ' action is intended for workflows triggered on `push` events, '
|
||||
+ 'but the current workflow is running on a pull request. Aborting.');
|
||||
return true;
|
||||
}
|
||||
// If the init action is required, then check the it completed successfully.
|
||||
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
|
||||
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
|
||||
@@ -42,6 +45,16 @@ function should_abort(actionName, requireInitActionHasRun) {
|
||||
return false;
|
||||
}
|
||||
exports.should_abort = should_abort;
|
||||
/**
|
||||
* Resolve the path to the workspace folder.
|
||||
*/
|
||||
function workspaceFolder() {
|
||||
let workspaceFolder = process.env['RUNNER_WORKSPACE'];
|
||||
if (!workspaceFolder)
|
||||
workspaceFolder = path.resolve('..');
|
||||
return workspaceFolder;
|
||||
}
|
||||
exports.workspaceFolder = workspaceFolder;
|
||||
/**
|
||||
* Get an environment parameter, but throw an error if it is not set.
|
||||
*/
|
||||
@@ -136,82 +149,6 @@ async function getLanguages() {
|
||||
return languages;
|
||||
}
|
||||
exports.getLanguages = getLanguages;
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
async function getCommitOid() {
|
||||
let commitOid = '';
|
||||
await exec.exec('git', ['rev-parse', 'HEAD'], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => { commitOid += data.toString(); },
|
||||
stderr: (data) => { process.stderr.write(data); }
|
||||
}
|
||||
});
|
||||
return commitOid.trim();
|
||||
}
|
||||
exports.getCommitOid = getCommitOid;
|
||||
/**
|
||||
* Get the path of the currently executing workflow.
|
||||
*/
|
||||
async function getWorkflowPath() {
|
||||
const repo_nwo = getRequiredEnvParam('GITHUB_REPOSITORY').split("/");
|
||||
const owner = repo_nwo[0];
|
||||
const repo = repo_nwo[1];
|
||||
const run_id = getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ok = new octokit.Octokit({
|
||||
auth: core.getInput('token'),
|
||||
userAgent: "CodeQL Action",
|
||||
log: console_log_level_1.default({ level: 'debug' })
|
||||
});
|
||||
const runsResponse = await ok.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
|
||||
owner,
|
||||
repo,
|
||||
run_id
|
||||
});
|
||||
const workflowUrl = runsResponse.data.workflow_url;
|
||||
const workflowResponse = await ok.request('GET ' + workflowUrl);
|
||||
return workflowResponse.data.path;
|
||||
}
|
||||
/**
|
||||
* Get the analysis key paramter for the current job.
|
||||
*
|
||||
* This will combine the workflow path and current job name.
|
||||
* Computing this the first time requires making requests to
|
||||
* the github API, but after that the result will be cached.
|
||||
*/
|
||||
async function getAnalysisKey() {
|
||||
let analysisKey = process.env[sharedEnv.CODEQL_ACTION_ANALYSIS_KEY];
|
||||
if (analysisKey !== undefined) {
|
||||
return analysisKey;
|
||||
}
|
||||
const workflowPath = await getWorkflowPath();
|
||||
const jobName = getRequiredEnvParam('GITHUB_JOB');
|
||||
analysisKey = workflowPath + ':' + jobName;
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_ANALYSIS_KEY, analysisKey);
|
||||
return analysisKey;
|
||||
}
|
||||
exports.getAnalysisKey = getAnalysisKey;
|
||||
/**
|
||||
* Get the ref currently being analyzed.
|
||||
*/
|
||||
function getRef() {
|
||||
// Will be in the form "refs/heads/master" on a push event
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const ref = getRequiredEnvParam('GITHUB_REF');
|
||||
// For pull request refs we want to convert from the 'merge' ref
|
||||
// to the 'head' ref, as that is what we want to analyse.
|
||||
// There should have been some code earlier in the workflow to do
|
||||
// the checkout, but we have no way of verifying that here.
|
||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||
if (pull_ref_regex.test(ref)) {
|
||||
return ref.replace(pull_ref_regex, 'refs/pull/$1/head');
|
||||
}
|
||||
else {
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
exports.getRef = getRef;
|
||||
/**
|
||||
* Compose a StatusReport.
|
||||
*
|
||||
@@ -222,7 +159,6 @@ exports.getRef = getRef;
|
||||
*/
|
||||
async function createStatusReport(actionName, status, cause, exception) {
|
||||
const commitOid = process.env['GITHUB_SHA'] || '';
|
||||
const ref = getRef();
|
||||
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
|
||||
let workflowRunID = -1;
|
||||
if (workflowRunIDStr) {
|
||||
@@ -239,7 +175,6 @@ async function createStatusReport(actionName, status, cause, exception) {
|
||||
job_name: jobName,
|
||||
languages: languages,
|
||||
commit_oid: commitOid,
|
||||
ref: ref,
|
||||
action_name: actionName,
|
||||
action_oid: "unknown",
|
||||
started_at: startedAt,
|
||||
@@ -345,13 +280,3 @@ function getToolNames(sarifContents) {
|
||||
return Object.keys(toolNames);
|
||||
}
|
||||
exports.getToolNames = getToolNames;
|
||||
// Creates a random temporary directory, runs the given body, and then deletes the directory.
|
||||
// Mostly intended for use within tests.
|
||||
async function withTmpDir(body) {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
|
||||
const result = await body(tmpDir);
|
||||
fs.rmdirSync(tmpDir, { recursive: true });
|
||||
return result;
|
||||
}
|
||||
exports.withTmpDir = withTmpDir;
|
||||
//# sourceMappingURL=util.js.map
|
||||
File diff suppressed because one or more lines are too long
21
lib/util.test.js
generated
21
lib/util.test.js
generated
@@ -1,21 +0,0 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const util = __importStar(require("./util"));
|
||||
ava_1.default('getToolNames', t => {
|
||||
const input = fs.readFileSync(__dirname + '/../src/testdata/tool-names.sarif', 'utf8');
|
||||
const toolNames = util.getToolNames(input);
|
||||
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
|
||||
});
|
||||
//# sourceMappingURL=util.test.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"util.test.js","sourceRoot":"","sources":["../src/util.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AAEzB,6CAA+B;AAE/B,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACvB,MAAM,KAAK,GAAG,EAAE,CAAC,YAAY,CAAC,SAAS,GAAG,mCAAmC,EAAE,MAAM,CAAC,CAAC;IACvF,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,+BAA+B,EAAE,QAAQ,CAAC,CAAC,CAAC;AACtE,CAAC,CAAC,CAAC"}
|
||||
1
node_modules/.bin/atob
generated
vendored
Symbolic link
1
node_modules/.bin/atob
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../atob/bin/atob.js
|
||||
1
node_modules/.bin/ava
generated
vendored
1
node_modules/.bin/ava
generated
vendored
@@ -1 +0,0 @@
|
||||
../ava/cli.js
|
||||
1
node_modules/.bin/escodegen
generated
vendored
Symbolic link
1
node_modules/.bin/escodegen
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../escodegen/bin/escodegen.js
|
||||
1
node_modules/.bin/esgenerate
generated
vendored
Symbolic link
1
node_modules/.bin/esgenerate
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../escodegen/bin/esgenerate.js
|
||||
1
node_modules/.bin/esparse
generated
vendored
Symbolic link
1
node_modules/.bin/esparse
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../esprima/bin/esparse.js
|
||||
1
node_modules/.bin/esvalidate
generated
vendored
Symbolic link
1
node_modules/.bin/esvalidate
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../esprima/bin/esvalidate.js
|
||||
1
node_modules/.bin/jest
generated
vendored
Symbolic link
1
node_modules/.bin/jest
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../jest/bin/jest.js
|
||||
1
node_modules/.bin/jest-runtime
generated
vendored
Symbolic link
1
node_modules/.bin/jest-runtime
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../jest-runtime/bin/jest-runtime.js
|
||||
1
node_modules/.bin/jsesc
generated
vendored
Symbolic link
1
node_modules/.bin/jsesc
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../jsesc/bin/jsesc
|
||||
1
node_modules/.bin/json5
generated
vendored
Symbolic link
1
node_modules/.bin/json5
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../json5/lib/cli.js
|
||||
1
node_modules/.bin/parser
generated
vendored
Symbolic link
1
node_modules/.bin/parser
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../@babel/parser/bin/babel-parser.js
|
||||
1
node_modules/.bin/rc
generated
vendored
1
node_modules/.bin/rc
generated
vendored
@@ -1 +0,0 @@
|
||||
../rc/cli.js
|
||||
1
node_modules/.bin/removeNPMAbsolutePaths
generated
vendored
1
node_modules/.bin/removeNPMAbsolutePaths
generated
vendored
@@ -1 +0,0 @@
|
||||
../removeNPMAbsolutePaths/bin/removeNPMAbsolutePaths
|
||||
1
node_modules/.bin/sane
generated
vendored
Symbolic link
1
node_modules/.bin/sane
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../sane/src/cli.js
|
||||
1
node_modules/.bin/sshpk-conv
generated
vendored
Symbolic link
1
node_modules/.bin/sshpk-conv
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../sshpk/bin/sshpk-conv
|
||||
1
node_modules/.bin/sshpk-sign
generated
vendored
Symbolic link
1
node_modules/.bin/sshpk-sign
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../sshpk/bin/sshpk-sign
|
||||
1
node_modules/.bin/sshpk-verify
generated
vendored
Symbolic link
1
node_modules/.bin/sshpk-verify
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../sshpk/bin/sshpk-verify
|
||||
1
node_modules/.bin/ts-jest
generated
vendored
Symbolic link
1
node_modules/.bin/ts-jest
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../ts-jest/cli.js
|
||||
1
node_modules/.bin/watch
generated
vendored
Symbolic link
1
node_modules/.bin/watch
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../@cnakazawa/watch/cli.js
|
||||
8
node_modules/@actions/http-client/README.md
generated
vendored
8
node_modules/@actions/http-client/README.md
generated
vendored
@@ -18,8 +18,6 @@ A lightweight HTTP client optimized for use with actions, TypeScript with generi
|
||||
- Basic, Bearer and PAT Support out of the box. Extensible handlers for others.
|
||||
- Redirects supported
|
||||
|
||||
Features and releases [here](./RELEASES.md)
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
@@ -51,11 +49,7 @@ export NODE_DEBUG=http
|
||||
|
||||
## Node support
|
||||
|
||||
The http-client is built using the latest LTS version of Node 12. It may work on previous node LTS versions but it's tested and officially supported on Node12+.
|
||||
|
||||
## Support and Versioning
|
||||
|
||||
We follow semver and will hold compatibility between major versions and increment the minor version with new features and capabilities (while holding compat).
|
||||
The http-client is built using the latest LTS version of Node 12. We also support the latest LTS for Node 6, 8 and Node 10.
|
||||
|
||||
## Contributing
|
||||
|
||||
|
||||
16
node_modules/@actions/http-client/RELEASES.md
generated
vendored
16
node_modules/@actions/http-client/RELEASES.md
generated
vendored
@@ -1,16 +0,0 @@
|
||||
## Releases
|
||||
|
||||
## 1.0.7
|
||||
Update NPM dependencies and add 429 to the list of HttpCodes
|
||||
|
||||
## 1.0.6
|
||||
Automatically sends Content-Type and Accept application/json headers for \<verb>Json() helper methods if not set in the client or parameters.
|
||||
|
||||
## 1.0.5
|
||||
Adds \<verb>Json() helper methods for json over http scenarios.
|
||||
|
||||
## 1.0.4
|
||||
Started to add \<verb>Json() helper methods. Do not use this release for that. Use >= 1.0.5 since there was an issue with types.
|
||||
|
||||
## 1.0.1 to 1.0.3
|
||||
Adds proxy support.
|
||||
7
node_modules/@actions/http-client/auth.js
generated
vendored
7
node_modules/@actions/http-client/auth.js
generated
vendored
@@ -6,9 +6,7 @@ class BasicCredentialHandler {
|
||||
this.password = password;
|
||||
}
|
||||
prepareRequest(options) {
|
||||
options.headers['Authorization'] =
|
||||
'Basic ' +
|
||||
Buffer.from(this.username + ':' + this.password).toString('base64');
|
||||
options.headers['Authorization'] = 'Basic ' + Buffer.from(this.username + ':' + this.password).toString('base64');
|
||||
}
|
||||
// This handler cannot handle 401
|
||||
canHandleAuthentication(response) {
|
||||
@@ -44,8 +42,7 @@ class PersonalAccessTokenCredentialHandler {
|
||||
// currently implements pre-authorization
|
||||
// TODO: support preAuth = false where it hooks on 401
|
||||
prepareRequest(options) {
|
||||
options.headers['Authorization'] =
|
||||
'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
||||
options.headers['Authorization'] = 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
||||
}
|
||||
// This handler cannot handle 401
|
||||
canHandleAuthentication(response) {
|
||||
|
||||
24
node_modules/@actions/http-client/index.d.ts
generated
vendored
24
node_modules/@actions/http-client/index.d.ts
generated
vendored
@@ -1,5 +1,5 @@
|
||||
/// <reference types="node" />
|
||||
import http = require('http');
|
||||
import http = require("http");
|
||||
import ifm = require('./interfaces');
|
||||
export declare enum HttpCodes {
|
||||
OK = 200,
|
||||
@@ -23,20 +23,12 @@ export declare enum HttpCodes {
|
||||
RequestTimeout = 408,
|
||||
Conflict = 409,
|
||||
Gone = 410,
|
||||
TooManyRequests = 429,
|
||||
InternalServerError = 500,
|
||||
NotImplemented = 501,
|
||||
BadGateway = 502,
|
||||
ServiceUnavailable = 503,
|
||||
GatewayTimeout = 504
|
||||
}
|
||||
export declare enum Headers {
|
||||
Accept = "accept",
|
||||
ContentType = "content-type"
|
||||
}
|
||||
export declare enum MediaTypes {
|
||||
ApplicationJson = "application/json"
|
||||
}
|
||||
/**
|
||||
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
@@ -47,6 +39,11 @@ export declare class HttpClientResponse implements ifm.IHttpClientResponse {
|
||||
message: http.IncomingMessage;
|
||||
readBody(): Promise<string>;
|
||||
}
|
||||
export interface ITypedResponse<T> {
|
||||
statusCode: number;
|
||||
result: T | null;
|
||||
headers: Object;
|
||||
}
|
||||
export declare function isHttps(requestUrl: string): boolean;
|
||||
export declare class HttpClient {
|
||||
userAgent: string | undefined;
|
||||
@@ -76,10 +73,10 @@ export declare class HttpClient {
|
||||
* Gets a typed object from an endpoint
|
||||
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||
*/
|
||||
getJson<T>(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
postJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
putJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
patchJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
getJson<T>(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
||||
postJson<T>(requestUrl: string, obj: T, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
||||
putJson<T>(requestUrl: string, obj: T, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
||||
patchJson<T>(requestUrl: string, obj: T, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
||||
/**
|
||||
* Makes a raw http request.
|
||||
* All other methods such as get, post, patch, and request ultimately call this.
|
||||
@@ -111,7 +108,6 @@ export declare class HttpClient {
|
||||
getAgent(serverUrl: string): http.Agent;
|
||||
private _prepareRequest;
|
||||
private _mergeHeaders;
|
||||
private _getExistingOrDefaultHeader;
|
||||
private _getAgent;
|
||||
private _performExponentialBackoff;
|
||||
private static dateTimeDeserializer;
|
||||
|
||||
111
node_modules/@actions/http-client/index.js
generated
vendored
111
node_modules/@actions/http-client/index.js
generated
vendored
@@ -28,22 +28,12 @@ var HttpCodes;
|
||||
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
||||
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
||||
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
||||
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
|
||||
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
||||
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
||||
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
||||
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
||||
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
||||
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
||||
var Headers;
|
||||
(function (Headers) {
|
||||
Headers["Accept"] = "accept";
|
||||
Headers["ContentType"] = "content-type";
|
||||
})(Headers = exports.Headers || (exports.Headers = {}));
|
||||
var MediaTypes;
|
||||
(function (MediaTypes) {
|
||||
MediaTypes["ApplicationJson"] = "application/json";
|
||||
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
||||
/**
|
||||
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
@@ -53,18 +43,8 @@ function getProxyUrl(serverUrl) {
|
||||
return proxyUrl ? proxyUrl.href : '';
|
||||
}
|
||||
exports.getProxyUrl = getProxyUrl;
|
||||
const HttpRedirectCodes = [
|
||||
HttpCodes.MovedPermanently,
|
||||
HttpCodes.ResourceMoved,
|
||||
HttpCodes.SeeOther,
|
||||
HttpCodes.TemporaryRedirect,
|
||||
HttpCodes.PermanentRedirect
|
||||
];
|
||||
const HttpResponseRetryCodes = [
|
||||
HttpCodes.BadGateway,
|
||||
HttpCodes.ServiceUnavailable,
|
||||
HttpCodes.GatewayTimeout
|
||||
];
|
||||
const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect];
|
||||
const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout];
|
||||
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
||||
const ExponentialBackoffCeiling = 10;
|
||||
const ExponentialBackoffTimeSlice = 5;
|
||||
@@ -156,29 +136,22 @@ class HttpClient {
|
||||
* Gets a typed object from an endpoint
|
||||
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||
*/
|
||||
async getJson(requestUrl, additionalHeaders = {}) {
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
async getJson(requestUrl, additionalHeaders) {
|
||||
let res = await this.get(requestUrl, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
async postJson(requestUrl, obj, additionalHeaders = {}) {
|
||||
async postJson(requestUrl, obj, additionalHeaders) {
|
||||
let data = JSON.stringify(obj, null, 2);
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||
let res = await this.post(requestUrl, data, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
async putJson(requestUrl, obj, additionalHeaders = {}) {
|
||||
async putJson(requestUrl, obj, additionalHeaders) {
|
||||
let data = JSON.stringify(obj, null, 2);
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||
let res = await this.put(requestUrl, data, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
async patchJson(requestUrl, obj, additionalHeaders = {}) {
|
||||
async patchJson(requestUrl, obj, additionalHeaders) {
|
||||
let data = JSON.stringify(obj, null, 2);
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||
let res = await this.patch(requestUrl, data, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
@@ -189,22 +162,18 @@ class HttpClient {
|
||||
*/
|
||||
async request(verb, requestUrl, data, headers) {
|
||||
if (this._disposed) {
|
||||
throw new Error('Client has already been disposed.');
|
||||
throw new Error("Client has already been disposed.");
|
||||
}
|
||||
let parsedUrl = url.parse(requestUrl);
|
||||
let info = this._prepareRequest(verb, parsedUrl, headers);
|
||||
// Only perform retries on reads since writes may not be idempotent.
|
||||
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
|
||||
? this._maxRetries + 1
|
||||
: 1;
|
||||
let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1;
|
||||
let numTries = 0;
|
||||
let response;
|
||||
while (numTries < maxTries) {
|
||||
response = await this.requestRaw(info, data);
|
||||
// Check if it's an authentication challenge
|
||||
if (response &&
|
||||
response.message &&
|
||||
response.message.statusCode === HttpCodes.Unauthorized) {
|
||||
if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) {
|
||||
let authenticationHandler;
|
||||
for (let i = 0; i < this.handlers.length; i++) {
|
||||
if (this.handlers[i].canHandleAuthentication(response)) {
|
||||
@@ -222,32 +191,21 @@ class HttpClient {
|
||||
}
|
||||
}
|
||||
let redirectsRemaining = this._maxRedirects;
|
||||
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
|
||||
this._allowRedirects &&
|
||||
redirectsRemaining > 0) {
|
||||
const redirectUrl = response.message.headers['location'];
|
||||
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1
|
||||
&& this._allowRedirects
|
||||
&& redirectsRemaining > 0) {
|
||||
const redirectUrl = response.message.headers["location"];
|
||||
if (!redirectUrl) {
|
||||
// if there's no location to redirect to, we won't
|
||||
break;
|
||||
}
|
||||
let parsedRedirectUrl = url.parse(redirectUrl);
|
||||
if (parsedUrl.protocol == 'https:' &&
|
||||
parsedUrl.protocol != parsedRedirectUrl.protocol &&
|
||||
!this._allowRedirectDowngrade) {
|
||||
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
||||
if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) {
|
||||
throw new Error("Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.");
|
||||
}
|
||||
// we need to finish reading the response before reassigning response
|
||||
// which will leak the open socket.
|
||||
await response.readBody();
|
||||
// strip authorization header if redirected to a different hostname
|
||||
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
||||
for (let header in headers) {
|
||||
// header names are case insensitive
|
||||
if (header.toLowerCase() === 'authorization') {
|
||||
delete headers[header];
|
||||
}
|
||||
}
|
||||
}
|
||||
// let's make the request with the new redirectUrl
|
||||
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
||||
response = await this.requestRaw(info, data);
|
||||
@@ -298,8 +256,8 @@ class HttpClient {
|
||||
*/
|
||||
requestRawWithCallback(info, data, onResult) {
|
||||
let socket;
|
||||
if (typeof data === 'string') {
|
||||
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
||||
if (typeof (data) === 'string') {
|
||||
info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8');
|
||||
}
|
||||
let callbackCalled = false;
|
||||
let handleResult = (err, res) => {
|
||||
@@ -312,7 +270,7 @@ class HttpClient {
|
||||
let res = new HttpClientResponse(msg);
|
||||
handleResult(null, res);
|
||||
});
|
||||
req.on('socket', sock => {
|
||||
req.on('socket', (sock) => {
|
||||
socket = sock;
|
||||
});
|
||||
// If we ever get disconnected, we want the socket to timeout eventually
|
||||
@@ -327,10 +285,10 @@ class HttpClient {
|
||||
// res should have headers
|
||||
handleResult(err, null);
|
||||
});
|
||||
if (data && typeof data === 'string') {
|
||||
if (data && typeof (data) === 'string') {
|
||||
req.write(data, 'utf8');
|
||||
}
|
||||
if (data && typeof data !== 'string') {
|
||||
if (data && typeof (data) !== 'string') {
|
||||
data.on('close', function () {
|
||||
req.end();
|
||||
});
|
||||
@@ -357,40 +315,29 @@ class HttpClient {
|
||||
const defaultPort = usingSsl ? 443 : 80;
|
||||
info.options = {};
|
||||
info.options.host = info.parsedUrl.hostname;
|
||||
info.options.port = info.parsedUrl.port
|
||||
? parseInt(info.parsedUrl.port)
|
||||
: defaultPort;
|
||||
info.options.path =
|
||||
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||||
info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort;
|
||||
info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||||
info.options.method = method;
|
||||
info.options.headers = this._mergeHeaders(headers);
|
||||
if (this.userAgent != null) {
|
||||
info.options.headers['user-agent'] = this.userAgent;
|
||||
info.options.headers["user-agent"] = this.userAgent;
|
||||
}
|
||||
info.options.agent = this._getAgent(info.parsedUrl);
|
||||
// gives handlers an opportunity to participate
|
||||
if (this.handlers) {
|
||||
this.handlers.forEach(handler => {
|
||||
this.handlers.forEach((handler) => {
|
||||
handler.prepareRequest(info.options);
|
||||
});
|
||||
}
|
||||
return info;
|
||||
}
|
||||
_mergeHeaders(headers) {
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {});
|
||||
if (this.requestOptions && this.requestOptions.headers) {
|
||||
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
||||
}
|
||||
return lowercaseKeys(headers || {});
|
||||
}
|
||||
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||
let clientHeader;
|
||||
if (this.requestOptions && this.requestOptions.headers) {
|
||||
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
||||
}
|
||||
return additionalHeaders[header] || clientHeader || _default;
|
||||
}
|
||||
_getAgent(parsedUrl) {
|
||||
let agent;
|
||||
let proxyUrl = pm.getProxyUrl(parsedUrl);
|
||||
@@ -422,7 +369,7 @@ class HttpClient {
|
||||
proxyAuth: proxyUrl.auth,
|
||||
host: proxyUrl.hostname,
|
||||
port: proxyUrl.port
|
||||
}
|
||||
},
|
||||
};
|
||||
let tunnelAgent;
|
||||
const overHttps = proxyUrl.protocol === 'https:';
|
||||
@@ -449,9 +396,7 @@ class HttpClient {
|
||||
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||
// we have to cast it to any and change it directly
|
||||
agent.options = Object.assign(agent.options || {}, {
|
||||
rejectUnauthorized: false
|
||||
});
|
||||
agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false });
|
||||
}
|
||||
return agent;
|
||||
}
|
||||
@@ -512,7 +457,7 @@ class HttpClient {
|
||||
msg = contents;
|
||||
}
|
||||
else {
|
||||
msg = 'Failed request: (' + statusCode + ')';
|
||||
msg = "Failed request: (" + statusCode + ")";
|
||||
}
|
||||
let err = new Error(msg);
|
||||
// attach statusCode and body obj (if available) to the error object
|
||||
|
||||
9
node_modules/@actions/http-client/interfaces.d.ts
generated
vendored
9
node_modules/@actions/http-client/interfaces.d.ts
generated
vendored
@@ -1,6 +1,6 @@
|
||||
/// <reference types="node" />
|
||||
import http = require('http');
|
||||
import url = require('url');
|
||||
import http = require("http");
|
||||
import url = require("url");
|
||||
export interface IHeaders {
|
||||
[key: string]: any;
|
||||
}
|
||||
@@ -43,8 +43,3 @@ export interface IRequestOptions {
|
||||
allowRetries?: boolean;
|
||||
maxRetries?: number;
|
||||
}
|
||||
export interface ITypedResponse<T> {
|
||||
statusCode: number;
|
||||
result: T | null;
|
||||
headers: Object;
|
||||
}
|
||||
|
||||
1
node_modules/@actions/http-client/interfaces.js
generated
vendored
1
node_modules/@actions/http-client/interfaces.js
generated
vendored
@@ -1,2 +1,3 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
;
|
||||
|
||||
39
node_modules/@actions/http-client/node_modules/tunnel/package.json
generated
vendored
39
node_modules/@actions/http-client/node_modules/tunnel/package.json
generated
vendored
@@ -1,7 +1,25 @@
|
||||
{
|
||||
"name": "tunnel",
|
||||
"version": "0.0.6",
|
||||
"author": {
|
||||
"name": "Koichi Kobayashi",
|
||||
"email": "koichik@improvement.jp"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/koichik/node-tunnel/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"deprecated": false,
|
||||
"description": "Node HTTP/HTTPS Agents for tunneling proxies",
|
||||
"devDependencies": {
|
||||
"mocha": "^5.2.0",
|
||||
"should": "^13.2.3"
|
||||
},
|
||||
"directories": {
|
||||
"lib": "./lib"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
||||
},
|
||||
"homepage": "https://github.com/koichik/node-tunnel/",
|
||||
"keywords": [
|
||||
"http",
|
||||
"https",
|
||||
@@ -9,26 +27,15 @@
|
||||
"proxy",
|
||||
"tunnel"
|
||||
],
|
||||
"homepage": "https://github.com/koichik/node-tunnel/",
|
||||
"bugs": "https://github.com/koichik/node-tunnel/issues",
|
||||
"license": "MIT",
|
||||
"author": "Koichi Kobayashi <koichik@improvement.jp>",
|
||||
"main": "./index.js",
|
||||
"directories": {
|
||||
"lib": "./lib"
|
||||
},
|
||||
"name": "tunnel",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/koichik/node-tunnel.git"
|
||||
"url": "git+https://github.com/koichik/node-tunnel.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "^5.2.0",
|
||||
"should": "^13.2.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
||||
}
|
||||
"version": "0.0.6"
|
||||
}
|
||||
56
node_modules/@actions/http-client/package.json
generated
vendored
56
node_modules/@actions/http-client/package.json
generated
vendored
@@ -1,39 +1,39 @@
|
||||
{
|
||||
"name": "@actions/http-client",
|
||||
"version": "1.0.8",
|
||||
"author": {
|
||||
"name": "GitHub, Inc."
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/http-client/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {
|
||||
"tunnel": "0.0.6"
|
||||
},
|
||||
"deprecated": false,
|
||||
"description": "Actions Http Client",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"build": "rm -Rf ./_out && tsc && cp package*.json ./_out && cp *.md ./_out && cp LICENSE ./_out && cp actions.png ./_out",
|
||||
"test": "jest",
|
||||
"format": "prettier --write *.ts && prettier --write **/*.ts",
|
||||
"format-check": "prettier --check *.ts && prettier --check **/*.ts",
|
||||
"audit-check": "npm audit --audit-level=moderate"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/http-client.git"
|
||||
"devDependencies": {
|
||||
"@types/jest": "^24.0.25",
|
||||
"@types/node": "^12.12.24",
|
||||
"jest": "^24.9.0",
|
||||
"proxy": "^1.0.1",
|
||||
"ts-jest": "^24.3.0",
|
||||
"typescript": "^3.7.4"
|
||||
},
|
||||
"homepage": "https://github.com/actions/http-client#readme",
|
||||
"keywords": [
|
||||
"Actions",
|
||||
"Http"
|
||||
],
|
||||
"author": "GitHub, Inc.",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/http-client/issues"
|
||||
"main": "index.js",
|
||||
"name": "@actions/http-client",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/http-client.git"
|
||||
},
|
||||
"homepage": "https://github.com/actions/http-client#readme",
|
||||
"devDependencies": {
|
||||
"@types/jest": "^25.1.4",
|
||||
"@types/node": "^12.12.31",
|
||||
"jest": "^25.1.0",
|
||||
"prettier": "^2.0.4",
|
||||
"proxy": "^1.0.1",
|
||||
"ts-jest": "^25.2.1",
|
||||
"typescript": "^3.8.3"
|
||||
"scripts": {
|
||||
"build": "rm -Rf ./_out && tsc && cp package*.json ./_out && cp *.md ./_out && cp LICENSE ./_out && cp actions.png ./_out",
|
||||
"test": "jest"
|
||||
},
|
||||
"dependencies": {
|
||||
"tunnel": "0.0.6"
|
||||
}
|
||||
"version": "1.0.4"
|
||||
}
|
||||
13
node_modules/@actions/http-client/proxy.js
generated
vendored
13
node_modules/@actions/http-client/proxy.js
generated
vendored
@@ -9,10 +9,12 @@ function getProxyUrl(reqUrl) {
|
||||
}
|
||||
let proxyVar;
|
||||
if (usingSsl) {
|
||||
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];
|
||||
proxyVar = process.env["https_proxy"] ||
|
||||
process.env["HTTPS_PROXY"];
|
||||
}
|
||||
else {
|
||||
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
||||
proxyVar = process.env["http_proxy"] ||
|
||||
process.env["HTTP_PROXY"];
|
||||
}
|
||||
if (proxyVar) {
|
||||
proxyUrl = url.parse(proxyVar);
|
||||
@@ -24,7 +26,7 @@ function checkBypass(reqUrl) {
|
||||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
let noProxy = process.env["no_proxy"] || process.env["NO_PROXY"] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
}
|
||||
@@ -45,10 +47,7 @@ function checkBypass(reqUrl) {
|
||||
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
||||
}
|
||||
// Compare request host against noproxy
|
||||
for (let upperNoProxyItem of noProxy
|
||||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
for (let upperNoProxyItem of noProxy.split(',').map(x => x.trim().toUpperCase()).filter(x => x)) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
63
node_modules/@ava/typescript/README.md
generated
vendored
63
node_modules/@ava/typescript/README.md
generated
vendored
@@ -1,63 +0,0 @@
|
||||
# @ava/typescript
|
||||
|
||||
Adds rudimentary [TypeScript](https://www.typescriptlang.org/) support to [AVA](https://avajs.dev).
|
||||
|
||||
This is designed to work for projects that precompile TypeScript. It allows AVA to load the compiled JavaScript, while configuring AVA to treat the TypeScript files as test files.
|
||||
|
||||
In other words, say you have a test file at `src/test.ts`. You've configured TypeScript to output to `build/`. Using `@ava/typescript` you can run the test using `npx ava src/test.ts`.
|
||||
|
||||
## Enabling TypeScript support
|
||||
|
||||
Add this package to your project:
|
||||
|
||||
```console
|
||||
npm install --save-dev @ava/typescript
|
||||
```
|
||||
|
||||
Then, enable TypeScript support either in `package.json` or `ava.config.*`:
|
||||
|
||||
**`package.json`:**
|
||||
|
||||
```json
|
||||
{
|
||||
"ava": {
|
||||
"typescript": {
|
||||
"rewritePaths": {
|
||||
"src/": "build/"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Both keys and values of the `rewritePaths` object must end with a `/`. Paths are relative to your project directory.
|
||||
|
||||
Output files are expected to have the `.js` extension.
|
||||
|
||||
AVA searches your entire project for `*.js`, `*.cjs`, `*.mjs` and `*.ts` files (or other extensions you've configured). It will ignore such files found in the `rewritePaths` targets (e.g. `build/`). If you use more specific paths, for instance `build/main/`, you may need to change AVA's `files` configuration to ignore other directories.
|
||||
|
||||
## Add additional extensions
|
||||
|
||||
You can configure AVA to recognize additional file extensions. To add (partial†) JSX support:
|
||||
|
||||
**`package.json`:**
|
||||
|
||||
```json
|
||||
{
|
||||
"ava": {
|
||||
"typescript": {
|
||||
"extensions": [
|
||||
"ts",
|
||||
"tsx"
|
||||
],
|
||||
"rewritePaths": {
|
||||
"src/": "build/"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
See also AVA's [`extensions` option](https://github.com/avajs/ava/blob/master/docs/06-configuration.md#options).
|
||||
|
||||
† Note that the [*preserve* mode for JSX](https://www.typescriptlang.org/docs/handbook/jsx.html) is not (yet) supported.
|
||||
136
node_modules/@ava/typescript/index.js
generated
vendored
136
node_modules/@ava/typescript/index.js
generated
vendored
@@ -1,136 +0,0 @@
|
||||
'use strict';
|
||||
const path = require('path');
|
||||
|
||||
const escapeStringRegexp = require('escape-string-regexp');
|
||||
|
||||
const pkg = require('./package.json');
|
||||
|
||||
function isPlainObject(x) {
|
||||
return x !== null && typeof x === 'object' && Reflect.getPrototypeOf(x) === Object.prototype;
|
||||
}
|
||||
|
||||
function isValidExtensions(extensions) {
|
||||
return Array.isArray(extensions) &&
|
||||
extensions.length > 0 &&
|
||||
extensions.every(ext => typeof ext === 'string' && ext !== '') &&
|
||||
new Set(extensions).size === extensions.length;
|
||||
}
|
||||
|
||||
function isValidRewritePaths(rewritePaths) {
|
||||
if (!isPlainObject(rewritePaths)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return Object.entries(rewritePaths).every(([from, to]) => {
|
||||
return from.endsWith('/') && typeof to === 'string' && to.endsWith('/');
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = ({negotiateProtocol}) => {
|
||||
const protocol = negotiateProtocol(['ava-3.2', 'ava-3'], {version: pkg.version});
|
||||
if (protocol === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
return {
|
||||
main({config}) {
|
||||
let valid = false;
|
||||
if (isPlainObject(config)) {
|
||||
const keys = Object.keys(config);
|
||||
if (keys.every(key => key === 'extensions' || key === 'rewritePaths')) {
|
||||
valid =
|
||||
(config.extensions === undefined || isValidExtensions(config.extensions)) &&
|
||||
isValidRewritePaths(config.rewritePaths);
|
||||
}
|
||||
}
|
||||
|
||||
if (!valid) {
|
||||
throw new Error(`Unexpected Typescript configuration for AVA. See https://github.com/avajs/typescript/blob/v${pkg.version}/README.md for allowed values.`);
|
||||
}
|
||||
|
||||
const {
|
||||
extensions = ['ts'],
|
||||
rewritePaths: relativeRewritePaths
|
||||
} = config;
|
||||
|
||||
const rewritePaths = Object.entries(relativeRewritePaths).map(([from, to]) => [
|
||||
path.join(protocol.projectDir, from),
|
||||
path.join(protocol.projectDir, to)
|
||||
]);
|
||||
const testFileExtension = new RegExp(`\\.(${extensions.map(ext => escapeStringRegexp(ext)).join('|')})$`);
|
||||
|
||||
return {
|
||||
async compile() {
|
||||
return {
|
||||
extensions: extensions.slice(),
|
||||
rewritePaths: rewritePaths.slice()
|
||||
};
|
||||
},
|
||||
|
||||
get extensions() {
|
||||
return extensions.slice();
|
||||
},
|
||||
|
||||
ignoreChange(filePath) {
|
||||
if (!testFileExtension.test(filePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return rewritePaths.some(([from]) => filePath.startsWith(from));
|
||||
},
|
||||
|
||||
resolveTestFile(testfile) {
|
||||
if (!testFileExtension.test(testfile)) {
|
||||
return testfile;
|
||||
}
|
||||
|
||||
const rewrite = rewritePaths.find(([from]) => testfile.startsWith(from));
|
||||
if (rewrite === undefined) {
|
||||
return testfile;
|
||||
}
|
||||
|
||||
const [from, to] = rewrite;
|
||||
// TODO: Support JSX preserve mode — https://www.typescriptlang.org/docs/handbook/jsx.html
|
||||
return `${to}${testfile.slice(from.length)}`.replace(testFileExtension, '.js');
|
||||
},
|
||||
|
||||
updateGlobs({filePatterns, ignoredByWatcherPatterns}) {
|
||||
return {
|
||||
filePatterns: [
|
||||
...filePatterns,
|
||||
'!**/*.d.ts',
|
||||
...Object.values(relativeRewritePaths).map(to => `!${to}**`)
|
||||
],
|
||||
ignoredByWatcherPatterns: [
|
||||
...ignoredByWatcherPatterns,
|
||||
...Object.values(relativeRewritePaths).map(to => `${to}**/*.js.map`)
|
||||
]
|
||||
};
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
worker({extensionsToLoadAsModules, state: {extensions, rewritePaths}}) {
|
||||
const testFileExtension = new RegExp(`\\.(${extensions.map(ext => escapeStringRegexp(ext)).join('|')})$`);
|
||||
|
||||
return {
|
||||
canLoad(ref) {
|
||||
return testFileExtension.test(ref) && rewritePaths.some(([from]) => ref.startsWith(from));
|
||||
},
|
||||
|
||||
async load(ref, {requireFn}) {
|
||||
for (const extension of extensionsToLoadAsModules) {
|
||||
if (ref.endsWith(`.${extension}`)) {
|
||||
throw new Error('@ava/typescript cannot yet load ESM files');
|
||||
}
|
||||
}
|
||||
|
||||
const [from, to] = rewritePaths.find(([from]) => ref.startsWith(from));
|
||||
// TODO: Support JSX preserve mode — https://www.typescriptlang.org/docs/handbook/jsx.html
|
||||
const rewritten = `${to}${ref.slice(from.length)}`.replace(testFileExtension, '.js');
|
||||
return requireFn(rewritten);
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
};
|
||||
18
node_modules/@ava/typescript/node_modules/escape-string-regexp/index.d.ts
generated
vendored
18
node_modules/@ava/typescript/node_modules/escape-string-regexp/index.d.ts
generated
vendored
@@ -1,18 +0,0 @@
|
||||
/**
|
||||
Escape RegExp special characters.
|
||||
|
||||
You can also use this to escape a string that is inserted into the middle of a regex, for example, into a character class.
|
||||
|
||||
@example
|
||||
```
|
||||
import escapeStringRegexp = require('escape-string-regexp');
|
||||
|
||||
const escapedString = escapeStringRegexp('How much $ for a 🦄?');
|
||||
//=> 'How much \\$ for a 🦄\\?'
|
||||
|
||||
new RegExp(escapedString);
|
||||
```
|
||||
*/
|
||||
declare const escapeStringRegexp: (string: string) => string;
|
||||
|
||||
export = escapeStringRegexp;
|
||||
11
node_modules/@ava/typescript/node_modules/escape-string-regexp/index.js
generated
vendored
11
node_modules/@ava/typescript/node_modules/escape-string-regexp/index.js
generated
vendored
@@ -1,11 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const matchOperatorsRegex = /[|\\{}()[\]^$+*?.-]/g;
|
||||
|
||||
module.exports = string => {
|
||||
if (typeof string !== 'string') {
|
||||
throw new TypeError('Expected a string');
|
||||
}
|
||||
|
||||
return string.replace(matchOperatorsRegex, '\\$&');
|
||||
};
|
||||
43
node_modules/@ava/typescript/node_modules/escape-string-regexp/package.json
generated
vendored
43
node_modules/@ava/typescript/node_modules/escape-string-regexp/package.json
generated
vendored
@@ -1,43 +0,0 @@
|
||||
{
|
||||
"name": "escape-string-regexp",
|
||||
"version": "2.0.0",
|
||||
"description": "Escape RegExp special characters",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/escape-string-regexp",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "sindresorhus.com"
|
||||
},
|
||||
"maintainers": [
|
||||
"Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)",
|
||||
"Joshua Boy Nicolai Appelman <joshua@jbna.nl> (jbna.nl)"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava && tsd"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"escape",
|
||||
"regex",
|
||||
"regexp",
|
||||
"re",
|
||||
"regular",
|
||||
"expression",
|
||||
"string",
|
||||
"str",
|
||||
"special",
|
||||
"characters"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "^1.4.1",
|
||||
"tsd": "^0.7.2",
|
||||
"xo": "^0.24.0"
|
||||
}
|
||||
}
|
||||
29
node_modules/@ava/typescript/node_modules/escape-string-regexp/readme.md
generated
vendored
29
node_modules/@ava/typescript/node_modules/escape-string-regexp/readme.md
generated
vendored
@@ -1,29 +0,0 @@
|
||||
# escape-string-regexp [](https://travis-ci.org/sindresorhus/escape-string-regexp)
|
||||
|
||||
> Escape RegExp special characters
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install escape-string-regexp
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const escapeStringRegexp = require('escape-string-regexp');
|
||||
|
||||
const escapedString = escapeStringRegexp('How much $ for a 🦄?');
|
||||
//=> 'How much \\$ for a 🦄\\?'
|
||||
|
||||
new RegExp(escapedString);
|
||||
```
|
||||
|
||||
You can also use this to escape a string that is inserted into the middle of a regex, for example, into a character class.
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||
42
node_modules/@ava/typescript/package.json
generated
vendored
42
node_modules/@ava/typescript/package.json
generated
vendored
@@ -1,42 +0,0 @@
|
||||
{
|
||||
"name": "@ava/typescript",
|
||||
"version": "1.1.1",
|
||||
"description": "TypeScript provider for AVA",
|
||||
"engines": {
|
||||
"node": ">=10.18.0 <11 || >=12.14.0 <13 || >=13.5.0"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"author": "Mark Wubben (https://novemberborn.net)",
|
||||
"repository": "avajs/typescript",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"ava",
|
||||
"typescript"
|
||||
],
|
||||
"scripts": {
|
||||
"test": "xo && nyc ava"
|
||||
},
|
||||
"dependencies": {
|
||||
"escape-string-regexp": "^2.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"ava": "^3.0.0",
|
||||
"execa": "^4.0.0",
|
||||
"nyc": "^15.0.0",
|
||||
"xo": "^0.25.3"
|
||||
},
|
||||
"nyc": {
|
||||
"reporter": [
|
||||
"html",
|
||||
"lcov",
|
||||
"text"
|
||||
]
|
||||
},
|
||||
"xo": {
|
||||
"rules": {
|
||||
"import/order": "off"
|
||||
}
|
||||
}
|
||||
}
|
||||
22
node_modules/@babel/core/LICENSE
generated
vendored
Normal file
22
node_modules/@babel/core/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
19
node_modules/@babel/core/README.md
generated
vendored
Normal file
19
node_modules/@babel/core/README.md
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
# @babel/core
|
||||
|
||||
> Babel compiler core.
|
||||
|
||||
See our website [@babel/core](https://babeljs.io/docs/en/next/babel-core.html) for more information or the [issues](https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20core%22+is%3Aopen) associated with this package.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @babel/core
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/core --dev
|
||||
```
|
||||
198
node_modules/@babel/core/lib/config/caching.js
generated
vendored
Normal file
198
node_modules/@babel/core/lib/config/caching.js
generated
vendored
Normal file
@@ -0,0 +1,198 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.makeStrongCache = makeStrongCache;
|
||||
exports.makeWeakCache = makeWeakCache;
|
||||
exports.assertSimpleType = assertSimpleType;
|
||||
|
||||
function makeStrongCache(handler) {
|
||||
return makeCachedFunction(new Map(), handler);
|
||||
}
|
||||
|
||||
function makeWeakCache(handler) {
|
||||
return makeCachedFunction(new WeakMap(), handler);
|
||||
}
|
||||
|
||||
function makeCachedFunction(callCache, handler) {
|
||||
return function cachedFunction(arg, data) {
|
||||
let cachedValue = callCache.get(arg);
|
||||
|
||||
if (cachedValue) {
|
||||
for (const {
|
||||
value,
|
||||
valid
|
||||
} of cachedValue) {
|
||||
if (valid(data)) return value;
|
||||
}
|
||||
}
|
||||
|
||||
const cache = new CacheConfigurator(data);
|
||||
const value = handler(arg, cache);
|
||||
if (!cache.configured()) cache.forever();
|
||||
cache.deactivate();
|
||||
|
||||
switch (cache.mode()) {
|
||||
case "forever":
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: () => true
|
||||
}];
|
||||
callCache.set(arg, cachedValue);
|
||||
break;
|
||||
|
||||
case "invalidate":
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: cache.validator()
|
||||
}];
|
||||
callCache.set(arg, cachedValue);
|
||||
break;
|
||||
|
||||
case "valid":
|
||||
if (cachedValue) {
|
||||
cachedValue.push({
|
||||
value,
|
||||
valid: cache.validator()
|
||||
});
|
||||
} else {
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: cache.validator()
|
||||
}];
|
||||
callCache.set(arg, cachedValue);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
}
|
||||
|
||||
class CacheConfigurator {
|
||||
constructor(data) {
|
||||
this._active = true;
|
||||
this._never = false;
|
||||
this._forever = false;
|
||||
this._invalidate = false;
|
||||
this._configured = false;
|
||||
this._pairs = [];
|
||||
this._data = data;
|
||||
}
|
||||
|
||||
simple() {
|
||||
return makeSimpleConfigurator(this);
|
||||
}
|
||||
|
||||
mode() {
|
||||
if (this._never) return "never";
|
||||
if (this._forever) return "forever";
|
||||
if (this._invalidate) return "invalidate";
|
||||
return "valid";
|
||||
}
|
||||
|
||||
forever() {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
|
||||
if (this._never) {
|
||||
throw new Error("Caching has already been configured with .never()");
|
||||
}
|
||||
|
||||
this._forever = true;
|
||||
this._configured = true;
|
||||
}
|
||||
|
||||
never() {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
|
||||
if (this._forever) {
|
||||
throw new Error("Caching has already been configured with .forever()");
|
||||
}
|
||||
|
||||
this._never = true;
|
||||
this._configured = true;
|
||||
}
|
||||
|
||||
using(handler) {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
|
||||
if (this._never || this._forever) {
|
||||
throw new Error("Caching has already been configured with .never or .forever()");
|
||||
}
|
||||
|
||||
this._configured = true;
|
||||
const key = handler(this._data);
|
||||
|
||||
this._pairs.push([key, handler]);
|
||||
|
||||
return key;
|
||||
}
|
||||
|
||||
invalidate(handler) {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
|
||||
if (this._never || this._forever) {
|
||||
throw new Error("Caching has already been configured with .never or .forever()");
|
||||
}
|
||||
|
||||
this._invalidate = true;
|
||||
this._configured = true;
|
||||
const key = handler(this._data);
|
||||
|
||||
this._pairs.push([key, handler]);
|
||||
|
||||
return key;
|
||||
}
|
||||
|
||||
validator() {
|
||||
const pairs = this._pairs;
|
||||
return data => pairs.every(([key, fn]) => key === fn(data));
|
||||
}
|
||||
|
||||
deactivate() {
|
||||
this._active = false;
|
||||
}
|
||||
|
||||
configured() {
|
||||
return this._configured;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function makeSimpleConfigurator(cache) {
|
||||
function cacheFn(val) {
|
||||
if (typeof val === "boolean") {
|
||||
if (val) cache.forever();else cache.never();
|
||||
return;
|
||||
}
|
||||
|
||||
return cache.using(() => assertSimpleType(val()));
|
||||
}
|
||||
|
||||
cacheFn.forever = () => cache.forever();
|
||||
|
||||
cacheFn.never = () => cache.never();
|
||||
|
||||
cacheFn.using = cb => cache.using(() => assertSimpleType(cb()));
|
||||
|
||||
cacheFn.invalidate = cb => cache.invalidate(() => assertSimpleType(cb()));
|
||||
|
||||
return cacheFn;
|
||||
}
|
||||
|
||||
function assertSimpleType(value) {
|
||||
if (value != null && typeof value !== "string" && typeof value !== "boolean" && typeof value !== "number") {
|
||||
throw new Error("Cache keys must be either string, boolean, number, null, or undefined.");
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
439
node_modules/@babel/core/lib/config/config-chain.js
generated
vendored
Normal file
439
node_modules/@babel/core/lib/config/config-chain.js
generated
vendored
Normal file
@@ -0,0 +1,439 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.buildPresetChain = buildPresetChain;
|
||||
exports.buildRootChain = buildRootChain;
|
||||
exports.buildPresetChainWalker = void 0;
|
||||
|
||||
function _path() {
|
||||
const data = _interopRequireDefault(require("path"));
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _debug() {
|
||||
const data = _interopRequireDefault(require("debug"));
|
||||
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _options = require("./validation/options");
|
||||
|
||||
var _patternToRegex = _interopRequireDefault(require("./pattern-to-regex"));
|
||||
|
||||
var _files = require("./files");
|
||||
|
||||
var _caching = require("./caching");
|
||||
|
||||
var _configDescriptors = require("./config-descriptors");
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const debug = (0, _debug().default)("babel:config:config-chain");
|
||||
|
||||
function buildPresetChain(arg, context) {
|
||||
const chain = buildPresetChainWalker(arg, context);
|
||||
if (!chain) return null;
|
||||
return {
|
||||
plugins: dedupDescriptors(chain.plugins),
|
||||
presets: dedupDescriptors(chain.presets),
|
||||
options: chain.options.map(o => normalizeOptions(o))
|
||||
};
|
||||
}
|
||||
|
||||
const buildPresetChainWalker = makeChainWalker({
|
||||
init: arg => arg,
|
||||
root: preset => loadPresetDescriptors(preset),
|
||||
env: (preset, envName) => loadPresetEnvDescriptors(preset)(envName),
|
||||
overrides: (preset, index) => loadPresetOverridesDescriptors(preset)(index),
|
||||
overridesEnv: (preset, index, envName) => loadPresetOverridesEnvDescriptors(preset)(index)(envName)
|
||||
});
|
||||
exports.buildPresetChainWalker = buildPresetChainWalker;
|
||||
const loadPresetDescriptors = (0, _caching.makeWeakCache)(preset => buildRootDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors));
|
||||
const loadPresetEnvDescriptors = (0, _caching.makeWeakCache)(preset => (0, _caching.makeStrongCache)(envName => buildEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, envName)));
|
||||
const loadPresetOverridesDescriptors = (0, _caching.makeWeakCache)(preset => (0, _caching.makeStrongCache)(index => buildOverrideDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index)));
|
||||
const loadPresetOverridesEnvDescriptors = (0, _caching.makeWeakCache)(preset => (0, _caching.makeStrongCache)(index => (0, _caching.makeStrongCache)(envName => buildOverrideEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index, envName))));
|
||||
|
||||
function buildRootChain(opts, context) {
|
||||
const programmaticChain = loadProgrammaticChain({
|
||||
options: opts,
|
||||
dirname: context.cwd
|
||||
}, context);
|
||||
if (!programmaticChain) return null;
|
||||
let configFile;
|
||||
|
||||
if (typeof opts.configFile === "string") {
|
||||
configFile = (0, _files.loadConfig)(opts.configFile, context.cwd, context.envName, context.caller);
|
||||
} else if (opts.configFile !== false) {
|
||||
configFile = (0, _files.findRootConfig)(context.root, context.envName, context.caller);
|
||||
}
|
||||
|
||||
let {
|
||||
babelrc,
|
||||
babelrcRoots
|
||||
} = opts;
|
||||
let babelrcRootsDirectory = context.cwd;
|
||||
const configFileChain = emptyChain();
|
||||
|
||||
if (configFile) {
|
||||
const validatedFile = validateConfigFile(configFile);
|
||||
const result = loadFileChain(validatedFile, context);
|
||||
if (!result) return null;
|
||||
|
||||
if (babelrc === undefined) {
|
||||
babelrc = validatedFile.options.babelrc;
|
||||
}
|
||||
|
||||
if (babelrcRoots === undefined) {
|
||||
babelrcRootsDirectory = validatedFile.dirname;
|
||||
babelrcRoots = validatedFile.options.babelrcRoots;
|
||||
}
|
||||
|
||||
mergeChain(configFileChain, result);
|
||||
}
|
||||
|
||||
const pkgData = typeof context.filename === "string" ? (0, _files.findPackageData)(context.filename) : null;
|
||||
let ignoreFile, babelrcFile;
|
||||
const fileChain = emptyChain();
|
||||
|
||||
if ((babelrc === true || babelrc === undefined) && pkgData && babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory)) {
|
||||
({
|
||||
ignore: ignoreFile,
|
||||
config: babelrcFile
|
||||
} = (0, _files.findRelativeConfig)(pkgData, context.envName, context.caller));
|
||||
|
||||
if (ignoreFile && shouldIgnore(context, ignoreFile.ignore, null, ignoreFile.dirname)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (babelrcFile) {
|
||||
const result = loadFileChain(validateBabelrcFile(babelrcFile), context);
|
||||
if (!result) return null;
|
||||
mergeChain(fileChain, result);
|
||||
}
|
||||
}
|
||||
|
||||
const chain = mergeChain(mergeChain(mergeChain(emptyChain(), configFileChain), fileChain), programmaticChain);
|
||||
return {
|
||||
plugins: dedupDescriptors(chain.plugins),
|
||||
presets: dedupDescriptors(chain.presets),
|
||||
options: chain.options.map(o => normalizeOptions(o)),
|
||||
ignore: ignoreFile || undefined,
|
||||
babelrc: babelrcFile || undefined,
|
||||
config: configFile || undefined
|
||||
};
|
||||
}
|
||||
|
||||
function babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory) {
|
||||
if (typeof babelrcRoots === "boolean") return babelrcRoots;
|
||||
const absoluteRoot = context.root;
|
||||
|
||||
if (babelrcRoots === undefined) {
|
||||
return pkgData.directories.indexOf(absoluteRoot) !== -1;
|
||||
}
|
||||
|
||||
let babelrcPatterns = babelrcRoots;
|
||||
if (!Array.isArray(babelrcPatterns)) babelrcPatterns = [babelrcPatterns];
|
||||
babelrcPatterns = babelrcPatterns.map(pat => {
|
||||
return typeof pat === "string" ? _path().default.resolve(babelrcRootsDirectory, pat) : pat;
|
||||
});
|
||||
|
||||
if (babelrcPatterns.length === 1 && babelrcPatterns[0] === absoluteRoot) {
|
||||
return pkgData.directories.indexOf(absoluteRoot) !== -1;
|
||||
}
|
||||
|
||||
return babelrcPatterns.some(pat => {
|
||||
if (typeof pat === "string") {
|
||||
pat = (0, _patternToRegex.default)(pat, babelrcRootsDirectory);
|
||||
}
|
||||
|
||||
return pkgData.directories.some(directory => {
|
||||
return matchPattern(pat, babelrcRootsDirectory, directory, context);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const validateConfigFile = (0, _caching.makeWeakCache)(file => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: (0, _options.validate)("configfile", file.options)
|
||||
}));
|
||||
const validateBabelrcFile = (0, _caching.makeWeakCache)(file => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: (0, _options.validate)("babelrcfile", file.options)
|
||||
}));
|
||||
const validateExtendFile = (0, _caching.makeWeakCache)(file => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: (0, _options.validate)("extendsfile", file.options)
|
||||
}));
|
||||
const loadProgrammaticChain = makeChainWalker({
|
||||
root: input => buildRootDescriptors(input, "base", _configDescriptors.createCachedDescriptors),
|
||||
env: (input, envName) => buildEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, envName),
|
||||
overrides: (input, index) => buildOverrideDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index),
|
||||
overridesEnv: (input, index, envName) => buildOverrideEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index, envName)
|
||||
});
|
||||
const loadFileChain = makeChainWalker({
|
||||
root: file => loadFileDescriptors(file),
|
||||
env: (file, envName) => loadFileEnvDescriptors(file)(envName),
|
||||
overrides: (file, index) => loadFileOverridesDescriptors(file)(index),
|
||||
overridesEnv: (file, index, envName) => loadFileOverridesEnvDescriptors(file)(index)(envName)
|
||||
});
|
||||
const loadFileDescriptors = (0, _caching.makeWeakCache)(file => buildRootDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors));
|
||||
const loadFileEnvDescriptors = (0, _caching.makeWeakCache)(file => (0, _caching.makeStrongCache)(envName => buildEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, envName)));
|
||||
const loadFileOverridesDescriptors = (0, _caching.makeWeakCache)(file => (0, _caching.makeStrongCache)(index => buildOverrideDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index)));
|
||||
const loadFileOverridesEnvDescriptors = (0, _caching.makeWeakCache)(file => (0, _caching.makeStrongCache)(index => (0, _caching.makeStrongCache)(envName => buildOverrideEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index, envName))));
|
||||
|
||||
function buildRootDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors) {
|
||||
return descriptors(dirname, options, alias);
|
||||
}
|
||||
|
||||
function buildEnvDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors, envName) {
|
||||
const opts = options.env && options.env[envName];
|
||||
return opts ? descriptors(dirname, opts, `${alias}.env["${envName}"]`) : null;
|
||||
}
|
||||
|
||||
function buildOverrideDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors, index) {
|
||||
const opts = options.overrides && options.overrides[index];
|
||||
if (!opts) throw new Error("Assertion failure - missing override");
|
||||
return descriptors(dirname, opts, `${alias}.overrides[${index}]`);
|
||||
}
|
||||
|
||||
function buildOverrideEnvDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors, index, envName) {
|
||||
const override = options.overrides && options.overrides[index];
|
||||
if (!override) throw new Error("Assertion failure - missing override");
|
||||
const opts = override.env && override.env[envName];
|
||||
return opts ? descriptors(dirname, opts, `${alias}.overrides[${index}].env["${envName}"]`) : null;
|
||||
}
|
||||
|
||||
function makeChainWalker({
|
||||
root,
|
||||
env,
|
||||
overrides,
|
||||
overridesEnv
|
||||
}) {
|
||||
return (input, context, files = new Set()) => {
|
||||
const {
|
||||
dirname
|
||||
} = input;
|
||||
const flattenedConfigs = [];
|
||||
const rootOpts = root(input);
|
||||
|
||||
if (configIsApplicable(rootOpts, dirname, context)) {
|
||||
flattenedConfigs.push(rootOpts);
|
||||
const envOpts = env(input, context.envName);
|
||||
|
||||
if (envOpts && configIsApplicable(envOpts, dirname, context)) {
|
||||
flattenedConfigs.push(envOpts);
|
||||
}
|
||||
|
||||
(rootOpts.options.overrides || []).forEach((_, index) => {
|
||||
const overrideOps = overrides(input, index);
|
||||
|
||||
if (configIsApplicable(overrideOps, dirname, context)) {
|
||||
flattenedConfigs.push(overrideOps);
|
||||
const overrideEnvOpts = overridesEnv(input, index, context.envName);
|
||||
|
||||
if (overrideEnvOpts && configIsApplicable(overrideEnvOpts, dirname, context)) {
|
||||
flattenedConfigs.push(overrideEnvOpts);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (flattenedConfigs.some(({
|
||||
options: {
|
||||
ignore,
|
||||
only
|
||||
}
|
||||
}) => shouldIgnore(context, ignore, only, dirname))) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const chain = emptyChain();
|
||||
|
||||
for (const op of flattenedConfigs) {
|
||||
if (!mergeExtendsChain(chain, op.options, dirname, context, files)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
mergeChainOpts(chain, op);
|
||||
}
|
||||
|
||||
return chain;
|
||||
};
|
||||
}
|
||||
|
||||
function mergeExtendsChain(chain, opts, dirname, context, files) {
|
||||
if (opts.extends === undefined) return true;
|
||||
const file = (0, _files.loadConfig)(opts.extends, dirname, context.envName, context.caller);
|
||||
|
||||
if (files.has(file)) {
|
||||
throw new Error(`Configuration cycle detected loading ${file.filepath}.\n` + `File already loaded following the config chain:\n` + Array.from(files, file => ` - ${file.filepath}`).join("\n"));
|
||||
}
|
||||
|
||||
files.add(file);
|
||||
const fileChain = loadFileChain(validateExtendFile(file), context, files);
|
||||
files.delete(file);
|
||||
if (!fileChain) return false;
|
||||
mergeChain(chain, fileChain);
|
||||
return true;
|
||||
}
|
||||
|
||||
function mergeChain(target, source) {
|
||||
target.options.push(...source.options);
|
||||
target.plugins.push(...source.plugins);
|
||||
target.presets.push(...source.presets);
|
||||
return target;
|
||||
}
|
||||
|
||||
function mergeChainOpts(target, {
|
||||
options,
|
||||
plugins,
|
||||
presets
|
||||
}) {
|
||||
target.options.push(options);
|
||||
target.plugins.push(...plugins());
|
||||
target.presets.push(...presets());
|
||||
return target;
|
||||
}
|
||||
|
||||
function emptyChain() {
|
||||
return {
|
||||
options: [],
|
||||
presets: [],
|
||||
plugins: []
|
||||
};
|
||||
}
|
||||
|
||||
function normalizeOptions(opts) {
|
||||
const options = Object.assign({}, opts);
|
||||
delete options.extends;
|
||||
delete options.env;
|
||||
delete options.overrides;
|
||||
delete options.plugins;
|
||||
delete options.presets;
|
||||
delete options.passPerPreset;
|
||||
delete options.ignore;
|
||||
delete options.only;
|
||||
delete options.test;
|
||||
delete options.include;
|
||||
delete options.exclude;
|
||||
|
||||
if (Object.prototype.hasOwnProperty.call(options, "sourceMap")) {
|
||||
options.sourceMaps = options.sourceMap;
|
||||
delete options.sourceMap;
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
function dedupDescriptors(items) {
|
||||
const map = new Map();
|
||||
const descriptors = [];
|
||||
|
||||
for (const item of items) {
|
||||
if (typeof item.value === "function") {
|
||||
const fnKey = item.value;
|
||||
let nameMap = map.get(fnKey);
|
||||
|
||||
if (!nameMap) {
|
||||
nameMap = new Map();
|
||||
map.set(fnKey, nameMap);
|
||||
}
|
||||
|
||||
let desc = nameMap.get(item.name);
|
||||
|
||||
if (!desc) {
|
||||
desc = {
|
||||
value: item
|
||||
};
|
||||
descriptors.push(desc);
|
||||
if (!item.ownPass) nameMap.set(item.name, desc);
|
||||
} else {
|
||||
desc.value = item;
|
||||
}
|
||||
} else {
|
||||
descriptors.push({
|
||||
value: item
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return descriptors.reduce((acc, desc) => {
|
||||
acc.push(desc.value);
|
||||
return acc;
|
||||
}, []);
|
||||
}
|
||||
|
||||
function configIsApplicable({
|
||||
options
|
||||
}, dirname, context) {
|
||||
return (options.test === undefined || configFieldIsApplicable(context, options.test, dirname)) && (options.include === undefined || configFieldIsApplicable(context, options.include, dirname)) && (options.exclude === undefined || !configFieldIsApplicable(context, options.exclude, dirname));
|
||||
}
|
||||
|
||||
function configFieldIsApplicable(context, test, dirname) {
|
||||
const patterns = Array.isArray(test) ? test : [test];
|
||||
return matchesPatterns(context, patterns, dirname);
|
||||
}
|
||||
|
||||
function shouldIgnore(context, ignore, only, dirname) {
|
||||
if (ignore && matchesPatterns(context, ignore, dirname)) {
|
||||
debug("Ignored %o because it matched one of %O from %o", context.filename, ignore, dirname);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (only && !matchesPatterns(context, only, dirname)) {
|
||||
debug("Ignored %o because it failed to match one of %O from %o", context.filename, only, dirname);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function matchesPatterns(context, patterns, dirname) {
|
||||
return patterns.some(pattern => matchPattern(pattern, dirname, context.filename, context));
|
||||
}
|
||||
|
||||
function matchPattern(pattern, dirname, pathToTest, context) {
|
||||
if (typeof pattern === "function") {
|
||||
return !!pattern(pathToTest, {
|
||||
dirname,
|
||||
envName: context.envName,
|
||||
caller: context.caller
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof pathToTest !== "string") {
|
||||
throw new Error(`Configuration contains string/RegExp pattern, but no filename was passed to Babel`);
|
||||
}
|
||||
|
||||
if (typeof pattern === "string") {
|
||||
pattern = (0, _patternToRegex.default)(pattern, dirname);
|
||||
}
|
||||
|
||||
return pattern.test(pathToTest);
|
||||
}
|
||||
211
node_modules/@babel/core/lib/config/config-descriptors.js
generated
vendored
Normal file
211
node_modules/@babel/core/lib/config/config-descriptors.js
generated
vendored
Normal file
@@ -0,0 +1,211 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.createCachedDescriptors = createCachedDescriptors;
|
||||
exports.createUncachedDescriptors = createUncachedDescriptors;
|
||||
exports.createDescriptor = createDescriptor;
|
||||
|
||||
var _files = require("./files");
|
||||
|
||||
var _item = require("./item");
|
||||
|
||||
var _caching = require("./caching");
|
||||
|
||||
function isEqualDescriptor(a, b) {
|
||||
return a.name === b.name && a.value === b.value && a.options === b.options && a.dirname === b.dirname && a.alias === b.alias && a.ownPass === b.ownPass && (a.file && a.file.request) === (b.file && b.file.request) && (a.file && a.file.resolved) === (b.file && b.file.resolved);
|
||||
}
|
||||
|
||||
function createCachedDescriptors(dirname, options, alias) {
|
||||
const {
|
||||
plugins,
|
||||
presets,
|
||||
passPerPreset
|
||||
} = options;
|
||||
return {
|
||||
options,
|
||||
plugins: plugins ? () => createCachedPluginDescriptors(plugins, dirname)(alias) : () => [],
|
||||
presets: presets ? () => createCachedPresetDescriptors(presets, dirname)(alias)(!!passPerPreset) : () => []
|
||||
};
|
||||
}
|
||||
|
||||
function createUncachedDescriptors(dirname, options, alias) {
|
||||
let plugins;
|
||||
let presets;
|
||||
return {
|
||||
options,
|
||||
plugins: () => {
|
||||
if (!plugins) {
|
||||
plugins = createPluginDescriptors(options.plugins || [], dirname, alias);
|
||||
}
|
||||
|
||||
return plugins;
|
||||
},
|
||||
presets: () => {
|
||||
if (!presets) {
|
||||
presets = createPresetDescriptors(options.presets || [], dirname, alias, !!options.passPerPreset);
|
||||
}
|
||||
|
||||
return presets;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const PRESET_DESCRIPTOR_CACHE = new WeakMap();
|
||||
const createCachedPresetDescriptors = (0, _caching.makeWeakCache)((items, cache) => {
|
||||
const dirname = cache.using(dir => dir);
|
||||
return (0, _caching.makeStrongCache)(alias => (0, _caching.makeStrongCache)(passPerPreset => createPresetDescriptors(items, dirname, alias, passPerPreset).map(desc => loadCachedDescriptor(PRESET_DESCRIPTOR_CACHE, desc))));
|
||||
});
|
||||
const PLUGIN_DESCRIPTOR_CACHE = new WeakMap();
|
||||
const createCachedPluginDescriptors = (0, _caching.makeWeakCache)((items, cache) => {
|
||||
const dirname = cache.using(dir => dir);
|
||||
return (0, _caching.makeStrongCache)(alias => createPluginDescriptors(items, dirname, alias).map(desc => loadCachedDescriptor(PLUGIN_DESCRIPTOR_CACHE, desc)));
|
||||
});
|
||||
const DEFAULT_OPTIONS = {};
|
||||
|
||||
function loadCachedDescriptor(cache, desc) {
|
||||
const {
|
||||
value,
|
||||
options = DEFAULT_OPTIONS
|
||||
} = desc;
|
||||
if (options === false) return desc;
|
||||
let cacheByOptions = cache.get(value);
|
||||
|
||||
if (!cacheByOptions) {
|
||||
cacheByOptions = new WeakMap();
|
||||
cache.set(value, cacheByOptions);
|
||||
}
|
||||
|
||||
let possibilities = cacheByOptions.get(options);
|
||||
|
||||
if (!possibilities) {
|
||||
possibilities = [];
|
||||
cacheByOptions.set(options, possibilities);
|
||||
}
|
||||
|
||||
if (possibilities.indexOf(desc) === -1) {
|
||||
const matches = possibilities.filter(possibility => isEqualDescriptor(possibility, desc));
|
||||
|
||||
if (matches.length > 0) {
|
||||
return matches[0];
|
||||
}
|
||||
|
||||
possibilities.push(desc);
|
||||
}
|
||||
|
||||
return desc;
|
||||
}
|
||||
|
||||
function createPresetDescriptors(items, dirname, alias, passPerPreset) {
|
||||
return createDescriptors("preset", items, dirname, alias, passPerPreset);
|
||||
}
|
||||
|
||||
function createPluginDescriptors(items, dirname, alias) {
|
||||
return createDescriptors("plugin", items, dirname, alias);
|
||||
}
|
||||
|
||||
function createDescriptors(type, items, dirname, alias, ownPass) {
|
||||
const descriptors = items.map((item, index) => createDescriptor(item, dirname, {
|
||||
type,
|
||||
alias: `${alias}$${index}`,
|
||||
ownPass: !!ownPass
|
||||
}));
|
||||
assertNoDuplicates(descriptors);
|
||||
return descriptors;
|
||||
}
|
||||
|
||||
function createDescriptor(pair, dirname, {
|
||||
type,
|
||||
alias,
|
||||
ownPass
|
||||
}) {
|
||||
const desc = (0, _item.getItemDescriptor)(pair);
|
||||
|
||||
if (desc) {
|
||||
return desc;
|
||||
}
|
||||
|
||||
let name;
|
||||
let options;
|
||||
let value = pair;
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
if (value.length === 3) {
|
||||
[value, options, name] = value;
|
||||
} else {
|
||||
[value, options] = value;
|
||||
}
|
||||
}
|
||||
|
||||
let file = undefined;
|
||||
let filepath = null;
|
||||
|
||||
if (typeof value === "string") {
|
||||
if (typeof type !== "string") {
|
||||
throw new Error("To resolve a string-based item, the type of item must be given");
|
||||
}
|
||||
|
||||
const resolver = type === "plugin" ? _files.loadPlugin : _files.loadPreset;
|
||||
const request = value;
|
||||
({
|
||||
filepath,
|
||||
value
|
||||
} = resolver(value, dirname));
|
||||
file = {
|
||||
request,
|
||||
resolved: filepath
|
||||
};
|
||||
}
|
||||
|
||||
if (!value) {
|
||||
throw new Error(`Unexpected falsy value: ${String(value)}`);
|
||||
}
|
||||
|
||||
if (typeof value === "object" && value.__esModule) {
|
||||
if (value.default) {
|
||||
value = value.default;
|
||||
} else {
|
||||
throw new Error("Must export a default export when using ES6 modules.");
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof value !== "object" && typeof value !== "function") {
|
||||
throw new Error(`Unsupported format: ${typeof value}. Expected an object or a function.`);
|
||||
}
|
||||
|
||||
if (filepath !== null && typeof value === "object" && value) {
|
||||
throw new Error(`Plugin/Preset files are not allowed to export objects, only functions. In ${filepath}`);
|
||||
}
|
||||
|
||||
return {
|
||||
name,
|
||||
alias: filepath || alias,
|
||||
value,
|
||||
options,
|
||||
dirname,
|
||||
ownPass,
|
||||
file
|
||||
};
|
||||
}
|
||||
|
||||
function assertNoDuplicates(items) {
|
||||
const map = new Map();
|
||||
|
||||
for (const item of items) {
|
||||
if (typeof item.value !== "function") continue;
|
||||
let nameMap = map.get(item.value);
|
||||
|
||||
if (!nameMap) {
|
||||
nameMap = new Set();
|
||||
map.set(item.value, nameMap);
|
||||
}
|
||||
|
||||
if (nameMap.has(item.name)) {
|
||||
const conflicts = items.filter(i => i.value === item.value);
|
||||
throw new Error([`Duplicate plugin/preset detected.`, `If you'd like to use two separate instances of a plugin,`, `they need separate names, e.g.`, ``, ` plugins: [`, ` ['some-plugin', {}],`, ` ['some-plugin', {}, 'some unique name'],`, ` ]`, ``, `Duplicates detected are:`, `${JSON.stringify(conflicts, null, 2)}`].join("\n"));
|
||||
}
|
||||
|
||||
nameMap.add(item.name);
|
||||
}
|
||||
}
|
||||
312
node_modules/@babel/core/lib/config/files/configuration.js
generated
vendored
Normal file
312
node_modules/@babel/core/lib/config/files/configuration.js
generated
vendored
Normal file
@@ -0,0 +1,312 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.findConfigUpwards = findConfigUpwards;
|
||||
exports.findRelativeConfig = findRelativeConfig;
|
||||
exports.findRootConfig = findRootConfig;
|
||||
exports.loadConfig = loadConfig;
|
||||
|
||||
function _debug() {
|
||||
const data = _interopRequireDefault(require("debug"));
|
||||
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _path() {
|
||||
const data = _interopRequireDefault(require("path"));
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _fs() {
|
||||
const data = _interopRequireDefault(require("fs"));
|
||||
|
||||
_fs = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _json() {
|
||||
const data = _interopRequireDefault(require("json5"));
|
||||
|
||||
_json = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _resolve() {
|
||||
const data = _interopRequireDefault(require("resolve"));
|
||||
|
||||
_resolve = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _caching = require("../caching");
|
||||
|
||||
var _configApi = _interopRequireDefault(require("../helpers/config-api"));
|
||||
|
||||
var _utils = require("./utils");
|
||||
|
||||
var _patternToRegex = _interopRequireDefault(require("../pattern-to-regex"));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const debug = (0, _debug().default)("babel:config:loading:files:configuration");
|
||||
const ROOT_CONFIG_FILENAMES = ["babel.config.js", "babel.config.cjs", "babel.config.json"];
|
||||
const RELATIVE_CONFIG_FILENAMES = [".babelrc", ".babelrc.js", ".babelrc.cjs"];
|
||||
const BABELIGNORE_FILENAME = ".babelignore";
|
||||
|
||||
function findConfigUpwards(rootDir) {
|
||||
let dirname = rootDir;
|
||||
|
||||
while (true) {
|
||||
const configFileFound = ROOT_CONFIG_FILENAMES.some(filename => _fs().default.existsSync(_path().default.join(dirname, filename)));
|
||||
if (configFileFound) return dirname;
|
||||
|
||||
const nextDir = _path().default.dirname(dirname);
|
||||
|
||||
if (dirname === nextDir) break;
|
||||
dirname = nextDir;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function findRelativeConfig(packageData, envName, caller) {
|
||||
let config = null;
|
||||
let ignore = null;
|
||||
|
||||
const dirname = _path().default.dirname(packageData.filepath);
|
||||
|
||||
for (const loc of packageData.directories) {
|
||||
if (!config) {
|
||||
config = loadOneConfig(RELATIVE_CONFIG_FILENAMES, loc, envName, caller, packageData.pkg && packageData.pkg.dirname === loc ? packageToBabelConfig(packageData.pkg) : null);
|
||||
}
|
||||
|
||||
if (!ignore) {
|
||||
const ignoreLoc = _path().default.join(loc, BABELIGNORE_FILENAME);
|
||||
|
||||
ignore = readIgnoreConfig(ignoreLoc);
|
||||
|
||||
if (ignore) {
|
||||
debug("Found ignore %o from %o.", ignore.filepath, dirname);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
config,
|
||||
ignore
|
||||
};
|
||||
}
|
||||
|
||||
function findRootConfig(dirname, envName, caller) {
|
||||
return loadOneConfig(ROOT_CONFIG_FILENAMES, dirname, envName, caller);
|
||||
}
|
||||
|
||||
function loadOneConfig(names, dirname, envName, caller, previousConfig = null) {
|
||||
const config = names.reduce((previousConfig, name) => {
|
||||
const filepath = _path().default.resolve(dirname, name);
|
||||
|
||||
const config = readConfig(filepath, envName, caller);
|
||||
|
||||
if (config && previousConfig) {
|
||||
throw new Error(`Multiple configuration files found. Please remove one:\n` + ` - ${_path().default.basename(previousConfig.filepath)}\n` + ` - ${name}\n` + `from ${dirname}`);
|
||||
}
|
||||
|
||||
return config || previousConfig;
|
||||
}, previousConfig);
|
||||
|
||||
if (config) {
|
||||
debug("Found configuration %o from %o.", config.filepath, dirname);
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
function loadConfig(name, dirname, envName, caller) {
|
||||
const filepath = _resolve().default.sync(name, {
|
||||
basedir: dirname
|
||||
});
|
||||
|
||||
const conf = readConfig(filepath, envName, caller);
|
||||
|
||||
if (!conf) {
|
||||
throw new Error(`Config file ${filepath} contains no configuration data`);
|
||||
}
|
||||
|
||||
debug("Loaded config %o from %o.", name, dirname);
|
||||
return conf;
|
||||
}
|
||||
|
||||
function readConfig(filepath, envName, caller) {
|
||||
const ext = _path().default.extname(filepath);
|
||||
|
||||
return ext === ".js" || ext === ".cjs" ? readConfigJS(filepath, {
|
||||
envName,
|
||||
caller
|
||||
}) : readConfigJSON5(filepath);
|
||||
}
|
||||
|
||||
const LOADING_CONFIGS = new Set();
|
||||
const readConfigJS = (0, _caching.makeStrongCache)((filepath, cache) => {
|
||||
if (!_fs().default.existsSync(filepath)) {
|
||||
cache.forever();
|
||||
return null;
|
||||
}
|
||||
|
||||
if (LOADING_CONFIGS.has(filepath)) {
|
||||
cache.never();
|
||||
debug("Auto-ignoring usage of config %o.", filepath);
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().default.dirname(filepath),
|
||||
options: {}
|
||||
};
|
||||
}
|
||||
|
||||
let options;
|
||||
|
||||
try {
|
||||
LOADING_CONFIGS.add(filepath);
|
||||
|
||||
const configModule = require(filepath);
|
||||
|
||||
options = configModule && configModule.__esModule ? configModule.default || undefined : configModule;
|
||||
} catch (err) {
|
||||
err.message = `${filepath}: Error while loading config - ${err.message}`;
|
||||
throw err;
|
||||
} finally {
|
||||
LOADING_CONFIGS.delete(filepath);
|
||||
}
|
||||
|
||||
if (typeof options === "function") {
|
||||
options = options((0, _configApi.default)(cache));
|
||||
if (!cache.configured()) throwConfigError();
|
||||
}
|
||||
|
||||
if (!options || typeof options !== "object" || Array.isArray(options)) {
|
||||
throw new Error(`${filepath}: Configuration should be an exported JavaScript object.`);
|
||||
}
|
||||
|
||||
if (typeof options.then === "function") {
|
||||
throw new Error(`You appear to be using an async configuration, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously return your config.`);
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().default.dirname(filepath),
|
||||
options
|
||||
};
|
||||
});
|
||||
const packageToBabelConfig = (0, _caching.makeWeakCache)(file => {
|
||||
const babel = file.options["babel"];
|
||||
if (typeof babel === "undefined") return null;
|
||||
|
||||
if (typeof babel !== "object" || Array.isArray(babel) || babel === null) {
|
||||
throw new Error(`${file.filepath}: .babel property must be an object`);
|
||||
}
|
||||
|
||||
return {
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: babel
|
||||
};
|
||||
});
|
||||
const readConfigJSON5 = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||
let options;
|
||||
|
||||
try {
|
||||
options = _json().default.parse(content);
|
||||
} catch (err) {
|
||||
err.message = `${filepath}: Error while parsing config - ${err.message}`;
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (!options) throw new Error(`${filepath}: No config detected`);
|
||||
|
||||
if (typeof options !== "object") {
|
||||
throw new Error(`${filepath}: Config returned typeof ${typeof options}`);
|
||||
}
|
||||
|
||||
if (Array.isArray(options)) {
|
||||
throw new Error(`${filepath}: Expected config object but found array`);
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().default.dirname(filepath),
|
||||
options
|
||||
};
|
||||
});
|
||||
const readIgnoreConfig = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||
const ignoreDir = _path().default.dirname(filepath);
|
||||
|
||||
const ignorePatterns = content.split("\n").map(line => line.replace(/#(.*?)$/, "").trim()).filter(line => !!line);
|
||||
|
||||
for (const pattern of ignorePatterns) {
|
||||
if (pattern[0] === "!") {
|
||||
throw new Error(`Negation of file paths is not supported.`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().default.dirname(filepath),
|
||||
ignore: ignorePatterns.map(pattern => (0, _patternToRegex.default)(pattern, ignoreDir))
|
||||
};
|
||||
});
|
||||
|
||||
function throwConfigError() {
|
||||
throw new Error(`\
|
||||
Caching was left unconfigured. Babel's plugins, presets, and .babelrc.js files can be configured
|
||||
for various types of caching, using the first param of their handler functions:
|
||||
|
||||
module.exports = function(api) {
|
||||
// The API exposes the following:
|
||||
|
||||
// Cache the returned value forever and don't call this function again.
|
||||
api.cache(true);
|
||||
|
||||
// Don't cache at all. Not recommended because it will be very slow.
|
||||
api.cache(false);
|
||||
|
||||
// Cached based on the value of some function. If this function returns a value different from
|
||||
// a previously-encountered value, the plugins will re-evaluate.
|
||||
var env = api.cache(() => process.env.NODE_ENV);
|
||||
|
||||
// If testing for a specific env, we recommend specifics to avoid instantiating a plugin for
|
||||
// any possible NODE_ENV value that might come up during plugin execution.
|
||||
var isProd = api.cache(() => process.env.NODE_ENV === "production");
|
||||
|
||||
// .cache(fn) will perform a linear search though instances to find the matching plugin based
|
||||
// based on previous instantiated plugins. If you want to recreate the plugin and discard the
|
||||
// previous instance whenever something changes, you may use:
|
||||
var isProd = api.cache.invalidate(() => process.env.NODE_ENV === "production");
|
||||
|
||||
// Note, we also expose the following more-verbose versions of the above examples:
|
||||
api.cache.forever(); // api.cache(true)
|
||||
api.cache.never(); // api.cache(false)
|
||||
api.cache.using(fn); // api.cache(fn)
|
||||
|
||||
// Return the value that will be cached.
|
||||
return { };
|
||||
};`);
|
||||
}
|
||||
59
node_modules/@babel/core/lib/config/files/index-browser.js
generated
vendored
Normal file
59
node_modules/@babel/core/lib/config/files/index-browser.js
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.findConfigUpwards = findConfigUpwards;
|
||||
exports.findPackageData = findPackageData;
|
||||
exports.findRelativeConfig = findRelativeConfig;
|
||||
exports.findRootConfig = findRootConfig;
|
||||
exports.loadConfig = loadConfig;
|
||||
exports.resolvePlugin = resolvePlugin;
|
||||
exports.resolvePreset = resolvePreset;
|
||||
exports.loadPlugin = loadPlugin;
|
||||
exports.loadPreset = loadPreset;
|
||||
|
||||
function findConfigUpwards(rootDir) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function findPackageData(filepath) {
|
||||
return {
|
||||
filepath,
|
||||
directories: [],
|
||||
pkg: null,
|
||||
isPackage: false
|
||||
};
|
||||
}
|
||||
|
||||
function findRelativeConfig(pkgData, envName, caller) {
|
||||
return {
|
||||
pkg: null,
|
||||
config: null,
|
||||
ignore: null
|
||||
};
|
||||
}
|
||||
|
||||
function findRootConfig(dirname, envName, caller) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function loadConfig(name, dirname, envName, caller) {
|
||||
throw new Error(`Cannot load ${name} relative to ${dirname} in a browser`);
|
||||
}
|
||||
|
||||
function resolvePlugin(name, dirname) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function resolvePreset(name, dirname) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function loadPlugin(name, dirname) {
|
||||
throw new Error(`Cannot load plugin ${name} relative to ${dirname} in a browser`);
|
||||
}
|
||||
|
||||
function loadPreset(name, dirname) {
|
||||
throw new Error(`Cannot load preset ${name} relative to ${dirname} in a browser`);
|
||||
}
|
||||
67
node_modules/@babel/core/lib/config/files/index.js
generated
vendored
Normal file
67
node_modules/@babel/core/lib/config/files/index.js
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "findPackageData", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _package.findPackageData;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "findConfigUpwards", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.findConfigUpwards;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "findRelativeConfig", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.findRelativeConfig;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "findRootConfig", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.findRootConfig;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "loadConfig", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.loadConfig;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "resolvePlugin", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _plugins.resolvePlugin;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "resolvePreset", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _plugins.resolvePreset;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "loadPlugin", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _plugins.loadPlugin;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "loadPreset", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _plugins.loadPreset;
|
||||
}
|
||||
});
|
||||
|
||||
var _package = require("./package");
|
||||
|
||||
var _configuration = require("./configuration");
|
||||
|
||||
var _plugins = require("./plugins");
|
||||
|
||||
({});
|
||||
76
node_modules/@babel/core/lib/config/files/package.js
generated
vendored
Normal file
76
node_modules/@babel/core/lib/config/files/package.js
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.findPackageData = findPackageData;
|
||||
|
||||
function _path() {
|
||||
const data = _interopRequireDefault(require("path"));
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _utils = require("./utils");
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const PACKAGE_FILENAME = "package.json";
|
||||
|
||||
function findPackageData(filepath) {
|
||||
let pkg = null;
|
||||
const directories = [];
|
||||
let isPackage = true;
|
||||
|
||||
let dirname = _path().default.dirname(filepath);
|
||||
|
||||
while (!pkg && _path().default.basename(dirname) !== "node_modules") {
|
||||
directories.push(dirname);
|
||||
pkg = readConfigPackage(_path().default.join(dirname, PACKAGE_FILENAME));
|
||||
|
||||
const nextLoc = _path().default.dirname(dirname);
|
||||
|
||||
if (dirname === nextLoc) {
|
||||
isPackage = false;
|
||||
break;
|
||||
}
|
||||
|
||||
dirname = nextLoc;
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
directories,
|
||||
pkg,
|
||||
isPackage
|
||||
};
|
||||
}
|
||||
|
||||
const readConfigPackage = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||
let options;
|
||||
|
||||
try {
|
||||
options = JSON.parse(content);
|
||||
} catch (err) {
|
||||
err.message = `${filepath}: Error while parsing JSON - ${err.message}`;
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (typeof options !== "object") {
|
||||
throw new Error(`${filepath}: Config returned typeof ${typeof options}`);
|
||||
}
|
||||
|
||||
if (Array.isArray(options)) {
|
||||
throw new Error(`${filepath}: Expected config object but found array`);
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().default.dirname(filepath),
|
||||
options
|
||||
};
|
||||
});
|
||||
169
node_modules/@babel/core/lib/config/files/plugins.js
generated
vendored
Normal file
169
node_modules/@babel/core/lib/config/files/plugins.js
generated
vendored
Normal file
@@ -0,0 +1,169 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.resolvePlugin = resolvePlugin;
|
||||
exports.resolvePreset = resolvePreset;
|
||||
exports.loadPlugin = loadPlugin;
|
||||
exports.loadPreset = loadPreset;
|
||||
|
||||
function _debug() {
|
||||
const data = _interopRequireDefault(require("debug"));
|
||||
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _resolve() {
|
||||
const data = _interopRequireDefault(require("resolve"));
|
||||
|
||||
_resolve = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _path() {
|
||||
const data = _interopRequireDefault(require("path"));
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const debug = (0, _debug().default)("babel:config:loading:files:plugins");
|
||||
const EXACT_RE = /^module:/;
|
||||
const BABEL_PLUGIN_PREFIX_RE = /^(?!@|module:|[^/]+\/|babel-plugin-)/;
|
||||
const BABEL_PRESET_PREFIX_RE = /^(?!@|module:|[^/]+\/|babel-preset-)/;
|
||||
const BABEL_PLUGIN_ORG_RE = /^(@babel\/)(?!plugin-|[^/]+\/)/;
|
||||
const BABEL_PRESET_ORG_RE = /^(@babel\/)(?!preset-|[^/]+\/)/;
|
||||
const OTHER_PLUGIN_ORG_RE = /^(@(?!babel\/)[^/]+\/)(?![^/]*babel-plugin(?:-|\/|$)|[^/]+\/)/;
|
||||
const OTHER_PRESET_ORG_RE = /^(@(?!babel\/)[^/]+\/)(?![^/]*babel-preset(?:-|\/|$)|[^/]+\/)/;
|
||||
const OTHER_ORG_DEFAULT_RE = /^(@(?!babel$)[^/]+)$/;
|
||||
|
||||
function resolvePlugin(name, dirname) {
|
||||
return resolveStandardizedName("plugin", name, dirname);
|
||||
}
|
||||
|
||||
function resolvePreset(name, dirname) {
|
||||
return resolveStandardizedName("preset", name, dirname);
|
||||
}
|
||||
|
||||
function loadPlugin(name, dirname) {
|
||||
const filepath = resolvePlugin(name, dirname);
|
||||
|
||||
if (!filepath) {
|
||||
throw new Error(`Plugin ${name} not found relative to ${dirname}`);
|
||||
}
|
||||
|
||||
const value = requireModule("plugin", filepath);
|
||||
debug("Loaded plugin %o from %o.", name, dirname);
|
||||
return {
|
||||
filepath,
|
||||
value
|
||||
};
|
||||
}
|
||||
|
||||
function loadPreset(name, dirname) {
|
||||
const filepath = resolvePreset(name, dirname);
|
||||
|
||||
if (!filepath) {
|
||||
throw new Error(`Preset ${name} not found relative to ${dirname}`);
|
||||
}
|
||||
|
||||
const value = requireModule("preset", filepath);
|
||||
debug("Loaded preset %o from %o.", name, dirname);
|
||||
return {
|
||||
filepath,
|
||||
value
|
||||
};
|
||||
}
|
||||
|
||||
function standardizeName(type, name) {
|
||||
if (_path().default.isAbsolute(name)) return name;
|
||||
const isPreset = type === "preset";
|
||||
return name.replace(isPreset ? BABEL_PRESET_PREFIX_RE : BABEL_PLUGIN_PREFIX_RE, `babel-${type}-`).replace(isPreset ? BABEL_PRESET_ORG_RE : BABEL_PLUGIN_ORG_RE, `$1${type}-`).replace(isPreset ? OTHER_PRESET_ORG_RE : OTHER_PLUGIN_ORG_RE, `$1babel-${type}-`).replace(OTHER_ORG_DEFAULT_RE, `$1/babel-${type}`).replace(EXACT_RE, "");
|
||||
}
|
||||
|
||||
function resolveStandardizedName(type, name, dirname = process.cwd()) {
|
||||
const standardizedName = standardizeName(type, name);
|
||||
|
||||
try {
|
||||
return _resolve().default.sync(standardizedName, {
|
||||
basedir: dirname
|
||||
});
|
||||
} catch (e) {
|
||||
if (e.code !== "MODULE_NOT_FOUND") throw e;
|
||||
|
||||
if (standardizedName !== name) {
|
||||
let resolvedOriginal = false;
|
||||
|
||||
try {
|
||||
_resolve().default.sync(name, {
|
||||
basedir: dirname
|
||||
});
|
||||
|
||||
resolvedOriginal = true;
|
||||
} catch (e2) {}
|
||||
|
||||
if (resolvedOriginal) {
|
||||
e.message += `\n- If you want to resolve "${name}", use "module:${name}"`;
|
||||
}
|
||||
}
|
||||
|
||||
let resolvedBabel = false;
|
||||
|
||||
try {
|
||||
_resolve().default.sync(standardizeName(type, "@babel/" + name), {
|
||||
basedir: dirname
|
||||
});
|
||||
|
||||
resolvedBabel = true;
|
||||
} catch (e2) {}
|
||||
|
||||
if (resolvedBabel) {
|
||||
e.message += `\n- Did you mean "@babel/${name}"?`;
|
||||
}
|
||||
|
||||
let resolvedOppositeType = false;
|
||||
const oppositeType = type === "preset" ? "plugin" : "preset";
|
||||
|
||||
try {
|
||||
_resolve().default.sync(standardizeName(oppositeType, name), {
|
||||
basedir: dirname
|
||||
});
|
||||
|
||||
resolvedOppositeType = true;
|
||||
} catch (e2) {}
|
||||
|
||||
if (resolvedOppositeType) {
|
||||
e.message += `\n- Did you accidentally pass a ${oppositeType} as a ${type}?`;
|
||||
}
|
||||
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
const LOADING_MODULES = new Set();
|
||||
|
||||
function requireModule(type, name) {
|
||||
if (LOADING_MODULES.has(name)) {
|
||||
throw new Error(`Reentrant ${type} detected trying to load "${name}". This module is not ignored ` + "and is trying to load itself while compiling itself, leading to a dependency cycle. " + 'We recommend adding it to your "ignore" list in your babelrc, or to a .babelignore.');
|
||||
}
|
||||
|
||||
try {
|
||||
LOADING_MODULES.add(name);
|
||||
return require(name);
|
||||
} finally {
|
||||
LOADING_MODULES.delete(name);
|
||||
}
|
||||
}
|
||||
0
node_modules/@babel/core/lib/config/files/types.js
generated
vendored
Normal file
0
node_modules/@babel/core/lib/config/files/types.js
generated
vendored
Normal file
41
node_modules/@babel/core/lib/config/files/utils.js
generated
vendored
Normal file
41
node_modules/@babel/core/lib/config/files/utils.js
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.makeStaticFileCache = makeStaticFileCache;
|
||||
|
||||
function _fs() {
|
||||
const data = _interopRequireDefault(require("fs"));
|
||||
|
||||
_fs = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _caching = require("../caching");
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function makeStaticFileCache(fn) {
|
||||
return (0, _caching.makeStrongCache)((filepath, cache) => {
|
||||
if (cache.invalidate(() => fileMtime(filepath)) === null) {
|
||||
cache.forever();
|
||||
return null;
|
||||
}
|
||||
|
||||
return fn(filepath, _fs().default.readFileSync(filepath, "utf8"));
|
||||
});
|
||||
}
|
||||
|
||||
function fileMtime(filepath) {
|
||||
try {
|
||||
return +_fs().default.statSync(filepath).mtime;
|
||||
} catch (e) {
|
||||
if (e.code !== "ENOENT" && e.code !== "ENOTDIR") throw e;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user