mirror of
https://github.com/github/codeql-action.git
synced 2025-12-08 16:58:06 +08:00
Compare commits
80 Commits
examples-t
...
test-paths
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e4011f4497 | ||
|
|
6de3e1cde4 | ||
|
|
c9d0312cb7 | ||
|
|
0cdf645694 | ||
|
|
d00417a341 | ||
|
|
7928587bdf | ||
|
|
87ecd0d0cc | ||
|
|
7e2e297e07 | ||
|
|
b97097aaed | ||
|
|
8a8a49d3c5 | ||
|
|
fcb696ec59 | ||
|
|
c2d2dfdcdd | ||
|
|
042ab541fd | ||
|
|
19faafba94 | ||
|
|
476c8a44ba | ||
|
|
f9ef310b75 | ||
|
|
6bd7f17e0e | ||
|
|
582fd14a81 | ||
|
|
8425341ae0 | ||
|
|
1f2cca021a | ||
|
|
fa9e0ac2a6 | ||
|
|
a9de5b50d7 | ||
|
|
af4edf6546 | ||
|
|
5a97f7e980 | ||
|
|
d4fb7fc762 | ||
|
|
00ebedc522 | ||
|
|
840dc5ee9a | ||
|
|
c1add46efa | ||
|
|
e35c90f53d | ||
|
|
6db8182349 | ||
|
|
202704856d | ||
|
|
5ea736059a | ||
|
|
b4610ac367 | ||
|
|
a0d60d5d9e | ||
|
|
f18fffbea8 | ||
|
|
655c4497ce | ||
|
|
d7a2025f2d | ||
|
|
22501fd7c8 | ||
|
|
07e22b1f4a | ||
|
|
3c2191ffdd | ||
|
|
28abced8ca | ||
|
|
50dcaaf00d | ||
|
|
30f7117e6a | ||
|
|
28a878efc3 | ||
|
|
d518039a6b | ||
|
|
855f965205 | ||
|
|
2909e97a32 | ||
|
|
4997c3ff4d | ||
|
|
0bd4da3a6c | ||
|
|
98ad2fc49d | ||
|
|
3ca3147cd4 | ||
|
|
96da037d49 | ||
|
|
da1e237d1e | ||
|
|
054f867322 | ||
|
|
1e600686e7 | ||
|
|
cd1625a162 | ||
|
|
8788e5aa59 | ||
|
|
8fb9090674 | ||
|
|
10a2fd615f | ||
|
|
8b71cf3e5f | ||
|
|
ae301902e1 | ||
|
|
ddee374101 | ||
|
|
080dc8c3f0 | ||
|
|
6d1f969b1c | ||
|
|
ff40939f66 | ||
|
|
7b32c3c950 | ||
|
|
90c07ef21d | ||
|
|
852b9186d6 | ||
|
|
63f52e71c0 | ||
|
|
3a883af8a6 | ||
|
|
886b7d3e6e | ||
|
|
4e12efc7c3 | ||
|
|
5c5f422edb | ||
|
|
97ef91227e | ||
|
|
25e5256866 | ||
|
|
5ec6b7524f | ||
|
|
b366432cb3 | ||
|
|
fa0a733046 | ||
|
|
0e6df42024 | ||
|
|
58c1abf92e |
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Contact GitHub Support
|
||||
url: https://support.github.com/contact?subject=Code+Scanning+Beta+Support&tags=code-scanning-support
|
||||
about: Contact Support about code scanning
|
||||
7
.github/codeql/codeql-config.yml
vendored
7
.github/codeql/codeql-config.yml
vendored
@@ -2,5 +2,12 @@ name: "CodeQL config"
|
||||
queries:
|
||||
- name: Run custom queries
|
||||
uses: ./queries
|
||||
# Run all extra query suites, both because we want to
|
||||
# and because it'll act as extra testing. This is why
|
||||
# we include both even though one is a superset of the
|
||||
# other, because we're testing the parsing logic and
|
||||
# that the suites exist in the codeql bundle.
|
||||
- uses: security-extended
|
||||
- uses: security-and-quality
|
||||
paths-ignore:
|
||||
- tests
|
||||
178
.github/update-release-branch.py
vendored
Normal file
178
.github/update-release-branch.py
vendored
Normal file
@@ -0,0 +1,178 @@
|
||||
import datetime
|
||||
from github import Github
|
||||
import random
|
||||
import requests
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# The branch being merged from.
|
||||
# This is the one that contains day-to-day development work.
|
||||
MASTER_BRANCH = 'master'
|
||||
# The branch being merged into.
|
||||
# This is the release branch that users reference.
|
||||
LATEST_RELEASE_BRANCH = 'v1'
|
||||
# Name of the remote
|
||||
ORIGIN = 'origin'
|
||||
|
||||
# Runs git with the given args and returns the stdout.
|
||||
# Raises an error if git does not exit successfully.
|
||||
def run_git(*args):
|
||||
cmd = ['git', *args]
|
||||
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if (p.returncode != 0):
|
||||
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
|
||||
return p.stdout.decode('ascii')
|
||||
|
||||
# Returns true if the given branch exists on the origin remote
|
||||
def branch_exists_on_remote(branch_name):
|
||||
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
||||
|
||||
# Opens a PR from the given branch to the release branch
|
||||
def open_pr(repo, all_commits, short_master_sha, branch_name):
|
||||
# Sort the commits into the pull requests that introduced them,
|
||||
# and any commits that don't have a pull request
|
||||
pull_requests = []
|
||||
commits_without_pull_requests = []
|
||||
for commit in all_commits:
|
||||
pr = get_pr_for_commit(repo, commit)
|
||||
|
||||
if pr is None:
|
||||
commits_without_pull_requests.append(commit)
|
||||
elif not any(p for p in pull_requests if p.number == pr.number):
|
||||
pull_requests.append(pr)
|
||||
|
||||
print('Found ' + str(len(pull_requests)) + ' pull requests')
|
||||
print('Found ' + str(len(commits_without_pull_requests)) + ' commits not in a pull request')
|
||||
|
||||
# Sort PRs and commits by age
|
||||
sorted(pull_requests, key=lambda pr: pr.number)
|
||||
sorted(commits_without_pull_requests, key=lambda c: c.commit.author.date)
|
||||
|
||||
# Start constructing the body text
|
||||
body = 'Merging ' + short_master_sha + ' into ' + LATEST_RELEASE_BRANCH
|
||||
|
||||
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
|
||||
body += '\n\nConductor for this PR is @' + conductor
|
||||
|
||||
# List all PRs merged
|
||||
if len(pull_requests) > 0:
|
||||
body += '\n\nContains the following pull requests:'
|
||||
for pr in pull_requests:
|
||||
merger = get_merger_of_pr(repo, pr)
|
||||
body += '\n- #' + str(pr.number)
|
||||
body += ' - ' + pr.title
|
||||
body += ' (@' + merger + ')'
|
||||
|
||||
# List all commits not part of a PR
|
||||
if len(commits_without_pull_requests) > 0:
|
||||
body += '\n\nContains the following commits not from a pull request:'
|
||||
for commit in commits_without_pull_requests:
|
||||
body += '\n- ' + commit.sha
|
||||
body += ' - ' + get_truncated_commit_message(commit)
|
||||
body += ' (@' + commit.author.login + ')'
|
||||
|
||||
title = 'Merge ' + MASTER_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
|
||||
|
||||
# Create the pull request
|
||||
pr = repo.create_pull(title=title, body=body, head=branch_name, base=LATEST_RELEASE_BRANCH)
|
||||
print('Created PR #' + str(pr.number))
|
||||
|
||||
# Assign the conductor
|
||||
pr.add_to_assignees(conductor)
|
||||
print('Assigned PR to ' + conductor)
|
||||
|
||||
# Gets the person who should be in charge of the mergeback PR
|
||||
def get_conductor(repo, pull_requests, other_commits):
|
||||
# If there are any PRs then use whoever merged the last one
|
||||
if len(pull_requests) > 0:
|
||||
return get_merger_of_pr(repo, pull_requests[-1])
|
||||
|
||||
# Otherwise take the author of the latest commit
|
||||
return other_commits[-1].author.login
|
||||
|
||||
# Gets a list of the SHAs of all commits that have happened on master
|
||||
# since the release branched off.
|
||||
# This will not include any commits that exist on the release branch
|
||||
# that aren't on master.
|
||||
def get_commit_difference(repo):
|
||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '...' + MASTER_BRANCH).strip().split('\n')
|
||||
|
||||
# Convert to full-fledged commit objects
|
||||
commits = [repo.get_commit(c) for c in commits]
|
||||
|
||||
# Filter out merge commits for PRs
|
||||
return list(filter(lambda c: not is_pr_merge_commit(c), commits))
|
||||
|
||||
# Is the given commit the automatic merge commit from when merging a PR
|
||||
def is_pr_merge_commit(commit):
|
||||
return commit.committer.login == 'web-flow' and len(commit.parents) > 1
|
||||
|
||||
# Gets a copy of the commit message that should display nicely
|
||||
def get_truncated_commit_message(commit):
|
||||
message = commit.commit.message.split('\n')[0]
|
||||
if len(message) > 60:
|
||||
return message[:57] + '...'
|
||||
else:
|
||||
return message
|
||||
|
||||
# Converts a commit into the PR that introduced it to the master branch.
|
||||
# Returns the PR object, or None if no PR could be found.
|
||||
def get_pr_for_commit(repo, commit):
|
||||
prs = commit.get_pulls()
|
||||
|
||||
if prs.totalCount > 0:
|
||||
# In the case that there are multiple PRs, return the earliest one
|
||||
prs = list(prs)
|
||||
sorted(prs, key=lambda pr: int(pr.number))
|
||||
return prs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
# Get the person who merged the pull request.
|
||||
# For most cases this will be the same as the author, but for PRs opened
|
||||
# by external contributors getting the merger will get us the GitHub
|
||||
# employee who reviewed and merged the PR.
|
||||
def get_merger_of_pr(repo, pr):
|
||||
return repo.get_commit(pr.merge_commit_sha).author.login
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
raise Exception('Usage: update-release.branch.py <github token> <repository nwo>')
|
||||
github_token = sys.argv[1]
|
||||
repository_nwo = sys.argv[2]
|
||||
|
||||
repo = Github(github_token).get_repo(repository_nwo)
|
||||
|
||||
# Print what we intend to go
|
||||
print('Considering difference between ' + MASTER_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
|
||||
short_master_sha = run_git('rev-parse', '--short', MASTER_BRANCH).strip()
|
||||
print('Current head of ' + MASTER_BRANCH + ' is ' + short_master_sha)
|
||||
|
||||
# See if there are any commits to merge in
|
||||
commits = get_commit_difference(repo)
|
||||
if len(commits) == 0:
|
||||
print('No commits to merge from ' + MASTER_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
|
||||
return
|
||||
|
||||
# The branch name is based off of the name of branch being merged into
|
||||
# and the SHA of the branch being merged from. Thus if the branch already
|
||||
# exists we can assume we don't need to recreate it.
|
||||
new_branch_name = 'update-' + LATEST_RELEASE_BRANCH + '-' + short_master_sha
|
||||
print('Branch name is ' + new_branch_name)
|
||||
|
||||
# Check if the branch already exists. If so we can abort as this script
|
||||
# has already run on this combination of branches.
|
||||
if branch_exists_on_remote(new_branch_name):
|
||||
print('Branch ' + new_branch_name + ' already exists. Nothing to do.')
|
||||
return
|
||||
|
||||
# Create the new branch and push it to the remote
|
||||
print('Creating branch ' + new_branch_name)
|
||||
run_git('checkout', '-b', new_branch_name, MASTER_BRANCH)
|
||||
run_git('push', ORIGIN, new_branch_name)
|
||||
|
||||
# Open a PR to update the branch
|
||||
open_pr(repo, commits, short_master_sha, new_branch_name)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
12
.github/workflows/codeql.yml
vendored
12
.github/workflows/codeql.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: "CodeQL action"
|
||||
|
||||
on: [push]
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -11,6 +11,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
with:
|
||||
# Must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head of the pull request.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
- uses: ./init
|
||||
with:
|
||||
languages: javascript
|
||||
|
||||
44
.github/workflows/integration-testing.yml
vendored
44
.github/workflows/integration-testing.yml
vendored
@@ -1,14 +1,10 @@
|
||||
name: "Integration Testing"
|
||||
|
||||
on: [push]
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
multi-language-repo_test-autodetect-languages:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -16,9 +12,8 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
shopt -s dotglob
|
||||
mv * ../action/
|
||||
mv ../action/tests/multi-language-repo/* .
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
- name: Build code
|
||||
shell: bash
|
||||
@@ -26,6 +21,20 @@ jobs:
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- run: |
|
||||
cd "$CODEQL_ACTION_DATABASE_DIR"
|
||||
# List all directories as there will be precisely one directory per database
|
||||
# but there may be other files in this directory such as query suites.
|
||||
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
||||
[[ ! -d cpp ]] || \
|
||||
[[ ! -d csharp ]] || \
|
||||
[[ ! -d go ]] || \
|
||||
[[ ! -d java ]] || \
|
||||
[[ ! -d javascript ]] || \
|
||||
[[ ! -d python ]]; then
|
||||
echo "Did not find expected number of databases. Database dir contains: $(ls)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
multi-language-repo_test-custom-queries:
|
||||
strategy:
|
||||
@@ -40,9 +49,8 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
shopt -s dotglob
|
||||
mv * ../action/
|
||||
mv ../action/tests/multi-language-repo/* .
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
@@ -72,9 +80,8 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
shopt -s dotglob
|
||||
mv * ../action/
|
||||
mv ../action/tests/multi-language-repo/* .
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
@@ -96,15 +103,14 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
shopt -s dotglob
|
||||
mv * ../action/
|
||||
mv ../action/tests/multi-language-repo/* .
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
run: bundle add code-scanning-rubocop --version 0.2.0 --skip-install
|
||||
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
||||
- name: Install dependencies
|
||||
run: bundle install
|
||||
- name: Rubocop run
|
||||
|
||||
31
.github/workflows/update-release-branch.yml
vendored
Normal file
31
.github/workflows/update-release-branch.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Update release branch
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 9 * * 1
|
||||
repository_dispatch:
|
||||
# Example of how to trigger this:
|
||||
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
|
||||
# Replace <token> with a personal access token from this page: https://github.com/settings/tokens
|
||||
types: [update-release-branch]
|
||||
|
||||
jobs:
|
||||
update:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
# Need full history so we calculate diffs
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.5
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install PyGithub==1.51 requests
|
||||
|
||||
- name: Update release branch
|
||||
run: python .github/update-release-branch.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }}
|
||||
103
README.md
103
README.md
@@ -1,6 +1,6 @@
|
||||
# CodeQL Action
|
||||
|
||||
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/semmle/ql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||
|
||||
## License
|
||||
|
||||
@@ -10,6 +10,8 @@ The underlying CodeQL CLI, used in this action, is licensed under the [GitHub Co
|
||||
|
||||
## Usage
|
||||
|
||||
This is a short walkthrough, but for more information read [configuring code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning).
|
||||
|
||||
To get code scanning results from CodeQL analysis on your repo you can use the following workflow as a template:
|
||||
|
||||
```yaml
|
||||
@@ -18,6 +20,7 @@ name: "Code Scanning - Action"
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 0 * * 0'
|
||||
|
||||
@@ -33,6 +36,17 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# Must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head of the pull request.
|
||||
# Only include this option if you are running this workflow on pull requests.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
# Only include this step if you are running this workflow on pull requests.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
@@ -78,24 +92,9 @@ If you prefer to integrate this within an existing CI workflow, it should end up
|
||||
uses: github/codeql-action/analyze@v1
|
||||
```
|
||||
|
||||
### Actions triggers
|
||||
### Configuration file
|
||||
|
||||
The CodeQL action should be run on `push` events, and on a `schedule`. `Push` events allow us to do a detailed analysis of the delta in a pull request, while the `schedule` event ensures that GitHub regularly scans the repository for the latest vulnerabilities, even if the repository becomes inactive. This action does not support the `pull_request` event.
|
||||
|
||||
### Configuration
|
||||
|
||||
You may optionally specify additional queries for CodeQL to execute by using a config file. The queries must belong to a [QL pack](https://help.semmle.com/codeql/codeql-cli/reference/qlpack-overview.html) and can be in your repository or any public repository. You can choose a single .ql file, a folder containing multiple .ql files, a .qls [query suite](https://help.semmle.com/codeql/codeql-cli/procedures/query-suites.html) file, or any combination of the above. To use queries from other repositories use the same syntax as when [using an action](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepsuses).
|
||||
|
||||
You can disable the default queries using `disable-default-queries: true`.
|
||||
|
||||
You can choose to ignore some files or folders from the analysis, or include additional files/folders for analysis. This *only* works for Javascript and Python analysis.
|
||||
Identifying potential files for extraction:
|
||||
|
||||
- Scans each folder that's defined as `paths` in turn, traversing subfolders, and looking for relevant files.
|
||||
- If it finds a subfolder that's defined as `paths-ignore`, stop traversing.
|
||||
- If a file or folder is both in `paths` and `paths-ignore`, the `paths-ignore` is ignored.
|
||||
|
||||
Use the `config-file` parameter of the init action to enable the configuration file. For example:
|
||||
Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`.
|
||||
|
||||
```yaml
|
||||
- uses: github/codeql-action/init@v1
|
||||
@@ -103,72 +102,8 @@ Use the `config-file` parameter of the init action to enable the configuration f
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
```
|
||||
|
||||
A config file looks like this:
|
||||
|
||||
```yaml
|
||||
name: "My CodeQL config"
|
||||
|
||||
disable-default-queries: true
|
||||
|
||||
queries:
|
||||
- name: In-repo queries (Runs the queries located in the my-queries folder of the repo)
|
||||
uses: ./my-queries
|
||||
- name: External Javascript QL pack (Runs a QL pack located in an external repo)
|
||||
uses: /Semmle/ql/javascript/ql/src/Electron@master
|
||||
- name: External query (Runs a single query located in an external QL pack)
|
||||
uses: Semmle/ql/javascript/ql/src/AngularJS/DeadAngularJSEventListener.ql@master
|
||||
- name: Select query suite (Runs a query suites)
|
||||
uses: ./codeql-querypacks/complex-python-querypack/rootAndBar.qls
|
||||
|
||||
paths:
|
||||
- src/util.ts
|
||||
|
||||
paths-ignore:
|
||||
- src
|
||||
- lib
|
||||
```
|
||||
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration)."
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Trouble with Go dependencies
|
||||
|
||||
#### If you use a vendor directory
|
||||
|
||||
Try passing
|
||||
|
||||
```yaml
|
||||
env:
|
||||
GOFLAGS: "-mod=vendor"
|
||||
```
|
||||
|
||||
to `github/codeql-action/analyze`.
|
||||
|
||||
#### If you do not use a vendor directory
|
||||
|
||||
Dependencies on public repositories should just work. If you have dependencies on private repositories, one option is to use `git config` and a [personal access token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) to authenticate when downloading dependencies. Add a section like
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- name: Configure git private repo access
|
||||
env:
|
||||
TOKEN: ${{ secrets.GITHUB_PAT }}
|
||||
run: |
|
||||
git config --global url."https://${TOKEN}@github.com/foo/bar".insteadOf "https://github.com/foo/bar"
|
||||
git config --global url."https://${TOKEN}@github.com/foo/baz".insteadOf "https://github.com/foo/baz"
|
||||
```
|
||||
|
||||
before any codeql actions. A similar thing can also be done with an SSH key or deploy key.
|
||||
|
||||
### C# using dotnet version 2 on linux
|
||||
|
||||
This currently requires invoking `dotnet` with the `/p:UseSharedCompilation=false` flag. For example:
|
||||
|
||||
```shell
|
||||
dotnet build /p:UseSharedCompilation=false
|
||||
```
|
||||
|
||||
Version 3 does not require the additional flag.
|
||||
|
||||
### Analysing Go together with other languages on `macos-latest`
|
||||
|
||||
When running on macos it is currently not possible to analyze Go in conjunction with any of Java, C/C++, or C#. Each language can still be analyzed separately.
|
||||
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
|
||||
|
||||
@@ -12,6 +12,9 @@ inputs:
|
||||
description: Upload the SARIF file
|
||||
required: false
|
||||
default: true
|
||||
ram:
|
||||
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
|
||||
required: false
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
name: extended-cpp-queryset
|
||||
queries:
|
||||
- name: Additional C++ queries
|
||||
uses: github/codeql/cpp/ql/src/codeql-suites/cpp-lgtm.qls@master
|
||||
@@ -1,4 +0,0 @@
|
||||
name: extended-csharp-queryset
|
||||
queries:
|
||||
- name: Additional C# queries
|
||||
uses: github/codeql/csharp/ql/src/codeql-suites/csharp-lgtm.qls@master
|
||||
@@ -1,4 +0,0 @@
|
||||
name: extended-go-queryset
|
||||
queries:
|
||||
- name: Additional Go queries
|
||||
uses: github/codeql-go/ql/src/codeql-suites/go-lgtm.qls@master
|
||||
@@ -1,4 +0,0 @@
|
||||
name: extended-java-queryset
|
||||
queries:
|
||||
- name: Additional Java queries
|
||||
uses: github/codeql/java/ql/src/codeql-suites/java-lgtm.qls@master
|
||||
@@ -1,4 +0,0 @@
|
||||
name: extended-javascript-queryset
|
||||
queries:
|
||||
- name: Additional Javascript queries
|
||||
uses: github/codeql/javascript/ql/src/codeql-suites/javascript-lgtm.qls@master
|
||||
@@ -1,4 +0,0 @@
|
||||
name: extended-python-queryset
|
||||
queries:
|
||||
- name: Additional Python queries
|
||||
uses: github/codeql/python/ql/src/codeql-suites/python-lgtm.qls@master
|
||||
@@ -5,12 +5,14 @@ inputs:
|
||||
tools:
|
||||
description: URL of CodeQL tools
|
||||
required: false
|
||||
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200427/codeql-bundle.tar.gz
|
||||
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200601/codeql-bundle.tar.gz
|
||||
languages:
|
||||
description: The languages to be analysed
|
||||
required: false
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
default: ${{ toJson(matrix) }}
|
||||
config-file:
|
||||
description: Path of the config file to use
|
||||
required: false
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
module.exports = {
|
||||
clearMocks: true,
|
||||
moduleFileExtensions: ['js', 'ts'],
|
||||
testEnvironment: 'node',
|
||||
testMatch: ['**/*.test.ts'],
|
||||
testRunner: 'jest-circus/runner',
|
||||
transform: {
|
||||
'^.+\\.ts$': 'ts-jest'
|
||||
},
|
||||
verbose: true
|
||||
}
|
||||
2
lib/analysis-paths.js
generated
2
lib/analysis-paths.js
generated
@@ -16,7 +16,7 @@ function includeAndExcludeAnalysisPaths(config, languages) {
|
||||
core.exportVariable('LGTM_INDEX_EXCLUDE', config.pathsIgnore.join('\n'));
|
||||
}
|
||||
function isInterpretedLanguage(language) {
|
||||
return language === 'javascript' && language === 'python';
|
||||
return language === 'javascript' || language === 'python';
|
||||
}
|
||||
// Index include/exclude only work in javascript and python
|
||||
// If some other language is detected/configured show a warning
|
||||
|
||||
185
lib/config-utils.js
generated
185
lib/config-utils.js
generated
@@ -12,6 +12,13 @@ const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const path = __importStar(require("path"));
|
||||
const util = __importStar(require("./util"));
|
||||
const NAME_PROPERTY = 'name';
|
||||
const DISPLAY_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
|
||||
const QUERIES_PROPERTY = 'queries';
|
||||
const QUERIES_USES_PROPERTY = 'uses';
|
||||
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
|
||||
const PATHS_PROPERTY = 'paths';
|
||||
class ExternalQuery {
|
||||
constructor(repository, ref) {
|
||||
this.path = '';
|
||||
@@ -20,39 +27,72 @@ class ExternalQuery {
|
||||
}
|
||||
}
|
||||
exports.ExternalQuery = ExternalQuery;
|
||||
// The set of acceptable values for built-in suites from the codeql bundle
|
||||
const builtinSuites = ['security-extended', 'security-and-quality'];
|
||||
class Config {
|
||||
constructor() {
|
||||
this.name = "";
|
||||
this.disableDefaultQueries = false;
|
||||
this.additionalQueries = [];
|
||||
this.externalQueries = [];
|
||||
this.additionalSuites = [];
|
||||
this.pathsIgnore = [];
|
||||
this.paths = [];
|
||||
}
|
||||
addQuery(queryUses) {
|
||||
addQuery(configFile, queryUses) {
|
||||
// The logic for parsing the string is based on what actions does for
|
||||
// parsing the 'uses' actions in the workflow file
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw '"uses" value for queries cannot be blank';
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
this.additionalQueries.push(queryUses.slice(2));
|
||||
const localQueryPath = queryUses.slice(2);
|
||||
// Resolve the local path against the workspace so that when this is
|
||||
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
||||
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
const absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(absoluteQueryPath)) {
|
||||
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||
}
|
||||
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||
if (!(fs.realpathSync(absoluteQueryPath) + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||
}
|
||||
this.additionalQueries.push(absoluteQueryPath);
|
||||
return;
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
|
||||
const suite = builtinSuites.find((suite) => suite === queryUses);
|
||||
if (suite) {
|
||||
this.additionalSuites.push(suite);
|
||||
return;
|
||||
}
|
||||
else {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
}
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
const ref = tok[1];
|
||||
tok = tok[0].split('/');
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length < 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
if (tok.length > 3) {
|
||||
tok = [tok[0], tok[1], tok.slice(2).join('/')];
|
||||
}
|
||||
if (tok.length < 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
// Check none of the parts of the repository name are empty
|
||||
if (tok[0].trim() === '' || tok[1].trim() === '') {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
|
||||
if (tok.length === 3) {
|
||||
@@ -62,62 +102,137 @@ class Config {
|
||||
}
|
||||
}
|
||||
exports.Config = Config;
|
||||
const configFolder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
function getNameInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
|
||||
}
|
||||
exports.getNameInvalid = getNameInvalid;
|
||||
function getDisableDefaultQueriesInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, DISPLAY_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
|
||||
}
|
||||
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
|
||||
function getQueriesInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
|
||||
}
|
||||
exports.getQueriesInvalid = getQueriesInvalid;
|
||||
function getQueryUsesInvalid(configFile, queryUses) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'must be a built-in suite (' + builtinSuites.join(' or ') +
|
||||
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
|
||||
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
|
||||
}
|
||||
exports.getQueryUsesInvalid = getQueryUsesInvalid;
|
||||
function getPathsIgnoreInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
exports.getPathsIgnoreInvalid = getPathsIgnoreInvalid;
|
||||
function getPathsInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
exports.getPathsInvalid = getPathsInvalid;
|
||||
function getLocalPathOutsideOfRepository(configFile, localPath) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" is outside of the repository');
|
||||
}
|
||||
exports.getLocalPathOutsideOfRepository = getLocalPathOutsideOfRepository;
|
||||
function getLocalPathDoesNotExist(configFile, localPath) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" does not exist in the repository');
|
||||
}
|
||||
exports.getLocalPathDoesNotExist = getLocalPathDoesNotExist;
|
||||
function getConfigFileOutsideWorkspaceErrorMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" is outside of the workspace';
|
||||
}
|
||||
exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage;
|
||||
function getConfigFileDoesNotExistErrorMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" does not exist';
|
||||
}
|
||||
exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage;
|
||||
function getConfigFilePropertyError(configFile, property, error) {
|
||||
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
|
||||
}
|
||||
function initConfig() {
|
||||
const configFile = core.getInput('config-file');
|
||||
let configFile = core.getInput('config-file');
|
||||
const config = new Config();
|
||||
// If no config file was provided create an empty one
|
||||
if (configFile === '') {
|
||||
core.debug('No configuration file was provided');
|
||||
return config;
|
||||
}
|
||||
try {
|
||||
// Treat the config file as relative to the workspace
|
||||
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
configFile = path.resolve(workspacePath, configFile);
|
||||
// Error if the config file is now outside of the workspace
|
||||
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
}
|
||||
// Error if the file does not exist
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
||||
}
|
||||
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
if (parsedYAML.name && typeof parsedYAML.name === "string") {
|
||||
config.name = parsedYAML.name;
|
||||
if (NAME_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
if (parsedYAML['disable-default-queries'] && typeof parsedYAML['disable-default-queries'] === "boolean") {
|
||||
config.disableDefaultQueries = parsedYAML['disable-default-queries'];
|
||||
if (parsedYAML[NAME_PROPERTY].length === 0) {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
const queries = parsedYAML.queries;
|
||||
if (queries && queries instanceof Array) {
|
||||
queries.forEach(query => {
|
||||
if (query.uses && typeof query.uses === "string") {
|
||||
config.addQuery(query.uses);
|
||||
config.name = parsedYAML[NAME_PROPERTY];
|
||||
}
|
||||
if (DISPLAY_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
||||
throw new Error(getDisableDefaultQueriesInvalid(configFile));
|
||||
}
|
||||
config.disableDefaultQueries = parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY];
|
||||
}
|
||||
if (QUERIES_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getQueriesInvalid(configFile));
|
||||
}
|
||||
parsedYAML[QUERIES_PROPERTY].forEach(query => {
|
||||
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
config.addQuery(configFile, query[QUERIES_USES_PROPERTY]);
|
||||
});
|
||||
}
|
||||
const pathsIgnore = parsedYAML['paths-ignore'];
|
||||
if (pathsIgnore && pathsIgnore instanceof Array) {
|
||||
pathsIgnore.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
config.pathsIgnore.push(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
const paths = parsedYAML.paths;
|
||||
if (paths && paths instanceof Array) {
|
||||
paths.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
if (PATHS_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
config.paths.push(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
core.setFailed(err);
|
||||
}
|
||||
return config;
|
||||
}
|
||||
function getConfigFolder() {
|
||||
return util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
}
|
||||
function getConfigFile() {
|
||||
return path.join(getConfigFolder(), 'config');
|
||||
}
|
||||
exports.getConfigFile = getConfigFile;
|
||||
async function saveConfig(config) {
|
||||
const configString = JSON.stringify(config);
|
||||
await io.mkdirP(configFolder);
|
||||
fs.writeFileSync(path.join(configFolder, 'config'), configString, 'utf8');
|
||||
await io.mkdirP(getConfigFolder());
|
||||
fs.writeFileSync(getConfigFile(), configString, 'utf8');
|
||||
core.debug('Saved config:');
|
||||
core.debug(configString);
|
||||
}
|
||||
async function loadConfig() {
|
||||
const configFile = path.join(configFolder, 'config');
|
||||
const configFile = getConfigFile();
|
||||
if (fs.existsSync(configFile)) {
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
|
||||
File diff suppressed because one or more lines are too long
162
lib/config-utils.test.js
generated
Normal file
162
lib/config-utils.test.js
generated
Normal file
@@ -0,0 +1,162 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const util = __importStar(require("./util"));
|
||||
function setInput(name, value) {
|
||||
// Transformation copied from
|
||||
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
|
||||
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
|
||||
if (value !== undefined) {
|
||||
process.env[envVar] = value;
|
||||
}
|
||||
else {
|
||||
delete process.env[envVar];
|
||||
}
|
||||
}
|
||||
ava_1.default("load empty config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', undefined);
|
||||
const config = await configUtils.loadConfig();
|
||||
t.deepEqual(config, new configUtils.Config());
|
||||
});
|
||||
});
|
||||
ava_1.default("loading config saves config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const configFile = configUtils.getConfigFile();
|
||||
// Sanity check the saved config file does not already exist
|
||||
t.false(fs.existsSync(configFile));
|
||||
const config = await configUtils.loadConfig();
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configFile));
|
||||
// And the contents should parse correctly to the config that was returned
|
||||
t.deepEqual(fs.readFileSync(configFile, 'utf8'), JSON.stringify(config));
|
||||
});
|
||||
});
|
||||
ava_1.default("load input outside of workspace", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', '../input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("load non-existent input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
t.false(fs.existsSync(path.join(tmpDir, 'input')));
|
||||
setInput('config-file', 'input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("load non-empty input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
// Just create a generic config object with non-default values for all fields
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- uses: ./
|
||||
- uses: ./foo
|
||||
- uses: foo/bar@dev
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = new configUtils.Config();
|
||||
expectedConfig.name = 'my config';
|
||||
expectedConfig.disableDefaultQueries = true;
|
||||
expectedConfig.additionalQueries.push(tmpDir);
|
||||
expectedConfig.additionalQueries.push(path.join(tmpDir, 'foo'));
|
||||
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
|
||||
expectedConfig.pathsIgnore = ['a', 'b'];
|
||||
expectedConfig.paths = ['c/d'];
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
const actualConfig = await configUtils.loadConfig();
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
|
||||
ava_1.default("load invalid input - " + testName, async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const inputFile = path.join(tmpDir, 'input');
|
||||
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
doInvalidInputTest('name invalid type', `
|
||||
name:
|
||||
- foo: bar`, configUtils.getNameInvalid);
|
||||
doInvalidInputTest('disable-default-queries invalid type', `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
|
||||
doInvalidInputTest('queries invalid type', `queries: foo`, configUtils.getQueriesInvalid);
|
||||
doInvalidInputTest('paths-ignore invalid type', `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
|
||||
doInvalidInputTest('paths invalid type', `paths: 17`, configUtils.getPathsInvalid);
|
||||
doInvalidInputTest('queries uses invalid type', `
|
||||
queries:
|
||||
- uses:
|
||||
- hello: world`, configUtils.getQueryUsesInvalid);
|
||||
function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) {
|
||||
// Invalid contents of a "queries.uses" field.
|
||||
// Should fail with the expected error message
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
queries:
|
||||
- name: foo
|
||||
uses: ` + input;
|
||||
doInvalidInputTest("queries uses \"" + input + "\"", inputFileContents, expectedErrorMessageGenerator);
|
||||
}
|
||||
// Various "uses" fields, and the errors they should produce
|
||||
doInvalidQueryUsesTest("''", c => configUtils.getQueryUsesInvalid(c, undefined));
|
||||
doInvalidQueryUsesTest("foo/bar", c => configUtils.getQueryUsesInvalid(c, "foo/bar"));
|
||||
doInvalidQueryUsesTest("foo/bar@v1@v2", c => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
|
||||
doInvalidQueryUsesTest("foo@master", c => configUtils.getQueryUsesInvalid(c, "foo@master"));
|
||||
doInvalidQueryUsesTest("https://github.com/foo/bar@master", c => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
|
||||
doInvalidQueryUsesTest("./foo", c => configUtils.getLocalPathDoesNotExist(c, "foo"));
|
||||
doInvalidQueryUsesTest("./..", c => configUtils.getLocalPathOutsideOfRepository(c, ".."));
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
1
lib/config-utils.test.js.map
Normal file
1
lib/config-utils.test.js.map
Normal file
File diff suppressed because one or more lines are too long
2
lib/external-queries.js
generated
2
lib/external-queries.js
generated
@@ -13,7 +13,7 @@ const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function checkoutExternalQueries(config) {
|
||||
const folder = util.getRequiredEnvParam('RUNNER_WORKSPACE');
|
||||
const folder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
for (const externalQuery of config.externalQueries) {
|
||||
core.info('Checking out ' + externalQuery.repository);
|
||||
const checkoutLocation = path.join(folder, externalQuery.repository);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAG7B,6CAA+B;AAExB,KAAK,UAAU,uBAAuB,CAAC,MAA0B;IACtE,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,kBAAkB,CAAC,CAAC;IAE5D,KAAK,MAAM,aAAa,IAAI,MAAM,CAAC,eAAe,EAAE;QAClD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,UAAU,CAAC,CAAC;QAEtD,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,aAAa,CAAC,UAAU,CAAC,CAAC;QACrE,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;YACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,aAAa,CAAC,UAAU,GAAG,MAAM,CAAC;YAC1E,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;YAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;gBACrB,cAAc,GAAG,gBAAgB;gBACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;gBACzC,UAAU,EAAE,aAAa,CAAC,GAAG;aAC9B,CAAC,CAAC;SACJ;QAED,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;KAChF;AACH,CAAC;AAnBD,0DAmBC"}
|
||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAG7B,6CAA+B;AAExB,KAAK,UAAU,uBAAuB,CAAC,MAA0B;IACtE,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC;IAEvD,KAAK,MAAM,aAAa,IAAI,MAAM,CAAC,eAAe,EAAE;QAClD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,UAAU,CAAC,CAAC;QAEtD,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,aAAa,CAAC,UAAU,CAAC,CAAC;QACrE,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;YACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,aAAa,CAAC,UAAU,GAAG,MAAM,CAAC;YAC1E,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;YAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;gBACrB,cAAc,GAAG,gBAAgB;gBACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;gBACzC,UAAU,EAAE,aAAa,CAAC,GAAG;aAC9B,CAAC,CAAC;SACJ;QAED,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;KAChF;AACH,CAAC;AAnBD,0DAmBC"}
|
||||
2
lib/external-queries.test.js
generated
2
lib/external-queries.test.js
generated
@@ -22,7 +22,7 @@ ava_1.default("checkoutExternalQueries", async (t) => {
|
||||
new configUtils.ExternalQuery("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b"),
|
||||
];
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["RUNNER_WORKSPACE"] = tmpDir;
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
|
||||
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,4DAA8C;AAC9C,oEAAsD;AACtD,6CAA+B;AAE/B,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACtC,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,eAAe,GAAG;QACrB,IAAI,WAAW,CAAC,aAAa,CAAC,kBAAkB,EAAE,0CAA0C,CAAC;KAChG,CAAC;IAEF,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACjC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QACzC,MAAM,eAAe,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtD,uFAAuF;QACvF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IACjF,CAAC,CAAC,CAAC;AACP,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,4DAA8C;AAC9C,oEAAsD;AACtD,6CAA+B;AAE/B,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACtC,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,eAAe,GAAG;QACrB,IAAI,WAAW,CAAC,aAAa,CAAC,kBAAkB,EAAE,0CAA0C,CAAC;KAChG,CAAC;IAEF,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACjC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QACpC,MAAM,eAAe,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtD,uFAAuF;QACvF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IACjF,CAAC,CAAC,CAAC;AACP,CAAC,CAAC,CAAC"}
|
||||
95
lib/finalize-db.js
generated
95
lib/finalize-db.js
generated
@@ -11,12 +11,49 @@ const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
/**
|
||||
* A list of queries from https://github.com/github/codeql that
|
||||
* we don't want to run. Disabling them here is a quicker alternative to
|
||||
* disabling them in the code scanning query suites. Queries should also
|
||||
* be disabled in the suites, and removed from this list here once the
|
||||
* bundle is updated to make those suite changes live.
|
||||
*
|
||||
* Format is a map from language to an array of path suffixes of .ql files.
|
||||
*/
|
||||
const DISABLED_BUILTIN_QUERIES = {
|
||||
'csharp': [
|
||||
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
|
||||
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
|
||||
]
|
||||
};
|
||||
function queryIsDisabled(language, query) {
|
||||
return (DISABLED_BUILTIN_QUERIES[language] || [])
|
||||
.some(disabledQuery => query.endsWith(disabledQuery));
|
||||
}
|
||||
function getMemoryFlag() {
|
||||
let memoryToUseMegaBytes;
|
||||
const memoryToUseString = core.getInput("ram");
|
||||
if (memoryToUseString) {
|
||||
memoryToUseMegaBytes = Number(memoryToUseString);
|
||||
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
|
||||
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
|
||||
}
|
||||
}
|
||||
else {
|
||||
const totalMemoryBytes = os.totalmem();
|
||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||
const systemReservedMemoryMegaBytes = 256;
|
||||
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
|
||||
}
|
||||
return "--ram=" + Math.floor(memoryToUseMegaBytes);
|
||||
}
|
||||
async function createdDBForScannedLanguages(codeqlCmd, databaseFolder) {
|
||||
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
||||
if (scannedLanguages) {
|
||||
@@ -49,26 +86,50 @@ async function finalizeDatabaseCreation(codeqlCmd, databaseFolder) {
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
async function resolveQueryLanguages(codeqlCmd, config) {
|
||||
let res = new Map();
|
||||
if (config.additionalQueries.length !== 0) {
|
||||
let resolveQueriesOutput = '';
|
||||
async function runResolveQueries(codeqlCmd, queries) {
|
||||
let output = '';
|
||||
const options = {
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
resolveQueriesOutput += data.toString();
|
||||
output += data.toString();
|
||||
}
|
||||
}
|
||||
};
|
||||
await exec.exec(codeqlCmd, [
|
||||
'resolve',
|
||||
'queries',
|
||||
...config.additionalQueries,
|
||||
...queries,
|
||||
'--format=bylanguage'
|
||||
], options);
|
||||
const resolveQueriesOutputObject = JSON.parse(resolveQueriesOutput);
|
||||
return JSON.parse(output);
|
||||
}
|
||||
async function resolveQueryLanguages(codeqlCmd, config) {
|
||||
let res = new Map();
|
||||
if (!config.disableDefaultQueries || config.additionalSuites.length !== 0) {
|
||||
const suites = [];
|
||||
for (const language of await util.getLanguages()) {
|
||||
if (!config.disableDefaultQueries) {
|
||||
suites.push(language + '-code-scanning.qls');
|
||||
}
|
||||
for (const additionalSuite of config.additionalSuites) {
|
||||
suites.push(language + '-' + additionalSuite + '.qls');
|
||||
}
|
||||
}
|
||||
const resolveQueriesOutputObject = await runResolveQueries(codeqlCmd, suites);
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
res[language] = Object.keys(queries);
|
||||
if (res[language] === undefined) {
|
||||
res[language] = [];
|
||||
}
|
||||
res[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
|
||||
}
|
||||
}
|
||||
if (config.additionalQueries.length !== 0) {
|
||||
const resolveQueriesOutputObject = await runResolveQueries(codeqlCmd, config.additionalQueries);
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
if (res[language] === undefined) {
|
||||
res[language] = [];
|
||||
}
|
||||
res[language].push(...Object.keys(queries));
|
||||
}
|
||||
const noDeclaredLanguage = resolveQueriesOutputObject.noDeclaredLanguage;
|
||||
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
||||
@@ -88,20 +149,26 @@ async function runQueries(codeqlCmd, databaseFolder, sarifFolder, config) {
|
||||
const queriesPerLanguage = await resolveQueryLanguages(codeqlCmd, config);
|
||||
for (let database of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + database);
|
||||
const queries = [];
|
||||
if (!config.disableDefaultQueries) {
|
||||
queries.push(database + '-code-scanning.qls');
|
||||
const queries = queriesPerLanguage[database] || [];
|
||||
if (queries.length === 0) {
|
||||
throw new Error('Unable to analyse ' + database + ' as no queries were selected for this language');
|
||||
}
|
||||
queries.push(...(queriesPerLanguage[database] || []));
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = path.join(databaseFolder, database + '-queries.qls');
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
core.debug('Query suite file for ' + database + '...\n' + querySuiteContents);
|
||||
const sarifFile = path.join(sarifFolder, database + '.sarif');
|
||||
await exec.exec(codeqlCmd, [
|
||||
'database',
|
||||
'analyze',
|
||||
getMemoryFlag(),
|
||||
path.join(databaseFolder, database),
|
||||
'--format=sarif-latest',
|
||||
'--output=' + sarifFile,
|
||||
'--no-sarif-add-snippets',
|
||||
...queries
|
||||
querySuite
|
||||
]);
|
||||
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
@@ -126,7 +193,7 @@ async function run() {
|
||||
await runQueries(codeqlCmd, databaseFolder, sarifFolder, config);
|
||||
if ('true' === core.getInput('upload')) {
|
||||
if (!await upload_lib.upload(sarifFolder)) {
|
||||
await util.reportActionFailed('failed', 'upload');
|
||||
await util.reportActionFailed('finish', 'upload');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
13
lib/setup-tracer.js
generated
13
lib/setup-tracer.js
generated
@@ -100,12 +100,13 @@ function concatTracerConfigs(configs) {
|
||||
totalCount += count;
|
||||
totalLines.push(...lines.slice(2));
|
||||
}
|
||||
const newLogFilePath = path.resolve(util.workspaceFolder(), 'compound-build-tracer.log');
|
||||
const spec = path.resolve(util.workspaceFolder(), 'compound-spec');
|
||||
const tempFolder = path.resolve(util.workspaceFolder(), 'compound-temp');
|
||||
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
|
||||
const spec = path.resolve(tempFolder, 'compound-spec');
|
||||
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
|
||||
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
|
||||
if (copyExecutables) {
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = tempFolder;
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
|
||||
envSize += 1;
|
||||
}
|
||||
fs.writeFileSync(spec, newSpecContent.join('\n'));
|
||||
@@ -156,7 +157,7 @@ async function run() {
|
||||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
||||
core.exportVariable('CODEQL_RAM', codeqlRam);
|
||||
const databaseFolder = path.resolve(util.workspaceFolder(), 'codeql_databases');
|
||||
const databaseFolder = path.resolve(util.getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
|
||||
await io.mkdirP(databaseFolder);
|
||||
let tracedLanguages = {};
|
||||
let scannedLanguages = [];
|
||||
@@ -205,8 +206,8 @@ async function run() {
|
||||
await util.reportActionFailed('init', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
await util.reportActionSucceeded('init');
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("init action failed: " + e);
|
||||
|
||||
File diff suppressed because one or more lines are too long
82
lib/upload-lib.js
generated
82
lib/upload-lib.js
generated
@@ -13,26 +13,14 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const auth = __importStar(require("@actions/http-client/auth"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const file_url_1 = __importDefault(require("file-url"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const jsonschema = __importStar(require("jsonschema"));
|
||||
const path = __importStar(require("path"));
|
||||
const zlib_1 = __importDefault(require("zlib"));
|
||||
const fingerprints = __importStar(require("./fingerprints"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util = __importStar(require("./util"));
|
||||
// Construct the location of the sentinel file for detecting multiple uploads.
|
||||
// The returned location should be writable.
|
||||
async function getSentinelFilePath() {
|
||||
// Use the temp dir instead of placing next to the sarif file because of
|
||||
// issues with docker actions. The directory containing the sarif file
|
||||
// may not be writable by us.
|
||||
const uploadsTmpDir = path.join(process.env['RUNNER_TEMP'] || '/tmp/codeql-action', 'uploads');
|
||||
await io.mkdirP(uploadsTmpDir);
|
||||
// Hash the absolute path so we'll behave correctly in the unlikely
|
||||
// scenario a file is referenced twice with different paths.
|
||||
return path.join(uploadsTmpDir, 'codeql-action-upload-sentinel');
|
||||
}
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
function combineSarifFiles(sarifFiles) {
|
||||
@@ -125,27 +113,63 @@ async function upload(input) {
|
||||
}
|
||||
}
|
||||
exports.upload = upload;
|
||||
// Counts the number of results in the given SARIF file
|
||||
function countResultsInSarif(sarif) {
|
||||
let numResults = 0;
|
||||
for (const run of JSON.parse(sarif).runs) {
|
||||
numResults += run.results.length;
|
||||
}
|
||||
return numResults;
|
||||
}
|
||||
exports.countResultsInSarif = countResultsInSarif;
|
||||
// Validates that the given file path refers to a valid SARIF file.
|
||||
// Returns a non-empty list of error message if the file is invalid,
|
||||
// otherwise returns the empty list if the file is valid.
|
||||
function validateSarifFileSchema(sarifFilePath) {
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
|
||||
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
if (result.valid) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
// Set the failure message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const errorMessages = result.errors.map(e => "- " + e.stack);
|
||||
core.setFailed("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + errorMessages.join("\n"));
|
||||
// Also output the more verbose error messages in groups as these may be very large.
|
||||
for (const error of result.errors) {
|
||||
core.startGroup("Error details: " + error.stack);
|
||||
core.info(JSON.stringify(error, null, 2));
|
||||
core.endGroup();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles) {
|
||||
core.startGroup("Uploading results");
|
||||
let succeeded = false;
|
||||
try {
|
||||
// Check if an upload has happened before. If so then abort.
|
||||
// This is intended to catch when the finish and upload-sarif actions
|
||||
// are used together, and then the upload-sarif action is invoked twice.
|
||||
const sentinelFile = await getSentinelFilePath();
|
||||
if (fs.existsSync(sentinelFile)) {
|
||||
core.info("Aborting as an upload has already happened from this job");
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
return false;
|
||||
}
|
||||
const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
if (!validateSarifFileSchema(file)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
const commitOid = await util.getCommitOid();
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRef();
|
||||
const analysisKey = await util.getAnalysisKey();
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||
@@ -173,14 +197,12 @@ async function uploadFiles(sarifFiles) {
|
||||
"started_at": startedAt,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
// Log some useful debug info about the info
|
||||
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
|
||||
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
|
||||
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
|
||||
// Make the upload
|
||||
succeeded = await uploadPayload(payload);
|
||||
// Mark that we have made an upload
|
||||
fs.writeFileSync(sentinelFile, '');
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
}
|
||||
const succeeded = await uploadPayload(payload);
|
||||
core.endGroup();
|
||||
return succeeded;
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
25
lib/upload-lib.test.js
generated
Normal file
25
lib/upload-lib.test.js
generated
Normal file
@@ -0,0 +1,25 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const uploadLib = __importStar(require("./upload-lib"));
|
||||
ava_1.default('validateSarifFileSchema - valid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
|
||||
t.true(uploadLib.validateSarifFileSchema(inputFile));
|
||||
});
|
||||
ava_1.default('validateSarifFileSchema - invalid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
|
||||
t.false(uploadLib.validateSarifFileSchema(inputFile));
|
||||
// validateSarifFileSchema calls core.setFailed which sets the exit code on error
|
||||
process.exitCode = 0;
|
||||
});
|
||||
//# sourceMappingURL=upload-lib.test.js.map
|
||||
1
lib/upload-lib.test.js.map
Normal file
1
lib/upload-lib.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,wDAA0C;AAE1C,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;IACtD,iFAAiF;IACjF,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC"}
|
||||
51
lib/util.js
generated
51
lib/util.js
generated
@@ -11,6 +11,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const auth = __importStar(require("@actions/http-client/auth"));
|
||||
const octokit = __importStar(require("@octokit/rest"));
|
||||
@@ -33,12 +34,6 @@ function should_abort(actionName, requireInitActionHasRun) {
|
||||
core.setFailed('GITHUB_REF must be set.');
|
||||
return true;
|
||||
}
|
||||
// Should abort if called on a merge commit for a pull request.
|
||||
if (ref.startsWith('refs/pull/')) {
|
||||
core.warning('The CodeQL ' + actionName + ' action is intended for workflows triggered on `push` events, '
|
||||
+ 'but the current workflow is running on a pull request. Aborting.');
|
||||
return true;
|
||||
}
|
||||
// If the init action is required, then check the it completed successfully.
|
||||
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
|
||||
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
|
||||
@@ -47,16 +42,6 @@ function should_abort(actionName, requireInitActionHasRun) {
|
||||
return false;
|
||||
}
|
||||
exports.should_abort = should_abort;
|
||||
/**
|
||||
* Resolve the path to the workspace folder.
|
||||
*/
|
||||
function workspaceFolder() {
|
||||
let workspaceFolder = process.env['RUNNER_WORKSPACE'];
|
||||
if (!workspaceFolder)
|
||||
workspaceFolder = path.resolve('..');
|
||||
return workspaceFolder;
|
||||
}
|
||||
exports.workspaceFolder = workspaceFolder;
|
||||
/**
|
||||
* Get an environment parameter, but throw an error if it is not set.
|
||||
*/
|
||||
@@ -151,6 +136,21 @@ async function getLanguages() {
|
||||
return languages;
|
||||
}
|
||||
exports.getLanguages = getLanguages;
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
async function getCommitOid() {
|
||||
let commitOid = '';
|
||||
await exec.exec('git', ['rev-parse', 'HEAD'], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => { commitOid += data.toString(); },
|
||||
stderr: (data) => { process.stderr.write(data); }
|
||||
}
|
||||
});
|
||||
return commitOid.trim();
|
||||
}
|
||||
exports.getCommitOid = getCommitOid;
|
||||
/**
|
||||
* Get the path of the currently executing workflow.
|
||||
*/
|
||||
@@ -196,8 +196,20 @@ exports.getAnalysisKey = getAnalysisKey;
|
||||
* Get the ref currently being analyzed.
|
||||
*/
|
||||
function getRef() {
|
||||
// it's in the form "refs/heads/master"
|
||||
return getRequiredEnvParam('GITHUB_REF');
|
||||
// Will be in the form "refs/heads/master" on a push event
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const ref = getRequiredEnvParam('GITHUB_REF');
|
||||
// For pull request refs we want to convert from the 'merge' ref
|
||||
// to the 'head' ref, as that is what we want to analyse.
|
||||
// There should have been some code earlier in the workflow to do
|
||||
// the checkout, but we have no way of verifying that here.
|
||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||
if (pull_ref_regex.test(ref)) {
|
||||
return ref.replace(pull_ref_regex, 'refs/pull/$1/head');
|
||||
}
|
||||
else {
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
exports.getRef = getRef;
|
||||
/**
|
||||
@@ -337,8 +349,9 @@ exports.getToolNames = getToolNames;
|
||||
// Mostly intended for use within tests.
|
||||
async function withTmpDir(body) {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
|
||||
await body(tmpDir);
|
||||
const result = await body(tmpDir);
|
||||
fs.rmdirSync(tmpDir, { recursive: true });
|
||||
return result;
|
||||
}
|
||||
exports.withTmpDir = withTmpDir;
|
||||
//# sourceMappingURL=util.js.map
|
||||
File diff suppressed because one or more lines are too long
10
node_modules/jsonschema/.editorconfig
generated
vendored
Normal file
10
node_modules/jsonschema/.editorconfig
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
# editorconfig.org
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
21
node_modules/jsonschema/LICENSE
generated
vendored
Normal file
21
node_modules/jsonschema/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
jsonschema is licensed under MIT license.
|
||||
|
||||
Copyright (C) 2012-2015 Tom de Grunt <tom@degrunt.nl>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
261
node_modules/jsonschema/README.md
generated
vendored
Normal file
261
node_modules/jsonschema/README.md
generated
vendored
Normal file
@@ -0,0 +1,261 @@
|
||||
[](http://travis-ci.org/tdegrunt/jsonschema)
|
||||
|
||||
# jsonschema
|
||||
[JSON schema](http://json-schema.org/) validator, which is designed to be fast and simple to use.
|
||||
The latest IETF published draft is v6, this library is mostly v4 compatible.
|
||||
|
||||
## Contributing & bugs
|
||||
Please fork the repository, make the changes in your fork and include tests. Once you're done making changes, send in a pull request.
|
||||
|
||||
### Bug reports
|
||||
Please include a test which shows why the code fails.
|
||||
|
||||
## Usage
|
||||
|
||||
### Simple
|
||||
Simple object validation using JSON schemas.
|
||||
|
||||
```javascript
|
||||
var Validator = require('jsonschema').Validator;
|
||||
var v = new Validator();
|
||||
var instance = 4;
|
||||
var schema = {"type": "number"};
|
||||
console.log(v.validate(instance, schema));
|
||||
```
|
||||
|
||||
### Even simpler
|
||||
|
||||
```javascript
|
||||
var validate = require('jsonschema').validate;
|
||||
console.log(validate(4, {"type": "number"}));
|
||||
```
|
||||
|
||||
### Complex example, with split schemas and references
|
||||
|
||||
```javascript
|
||||
var Validator = require('jsonschema').Validator;
|
||||
var v = new Validator();
|
||||
|
||||
// Address, to be embedded on Person
|
||||
var addressSchema = {
|
||||
"id": "/SimpleAddress",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"lines": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"zip": {"type": "string"},
|
||||
"city": {"type": "string"},
|
||||
"country": {"type": "string"}
|
||||
},
|
||||
"required": ["country"]
|
||||
};
|
||||
|
||||
// Person
|
||||
var schema = {
|
||||
"id": "/SimplePerson",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"address": {"$ref": "/SimpleAddress"},
|
||||
"votes": {"type": "integer", "minimum": 1}
|
||||
}
|
||||
};
|
||||
|
||||
var p = {
|
||||
"name": "Barack Obama",
|
||||
"address": {
|
||||
"lines": [ "1600 Pennsylvania Avenue Northwest" ],
|
||||
"zip": "DC 20500",
|
||||
"city": "Washington",
|
||||
"country": "USA"
|
||||
},
|
||||
"votes": "lots"
|
||||
};
|
||||
|
||||
v.addSchema(addressSchema, '/SimpleAddress');
|
||||
console.log(v.validate(p, schema));
|
||||
```
|
||||
### Example for Array schema
|
||||
```json
|
||||
var arraySchema = {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"properties": {
|
||||
"name": { "type": "string" },
|
||||
"lastname": { "type": "string" }
|
||||
},
|
||||
"required": ["name", "lastname"]
|
||||
}
|
||||
}
|
||||
```
|
||||
For a comprehensive, annotated example illustrating all possible validation options, see [examples/all.js](./examples/all.js)
|
||||
|
||||
## Features
|
||||
|
||||
### Definitions
|
||||
All schema definitions are supported, $schema is ignored.
|
||||
|
||||
### Types
|
||||
All types are supported
|
||||
|
||||
### Formats
|
||||
#### Disabling the format keyword.
|
||||
|
||||
You may disable format validation by providing `disableFormat: true` to the validator
|
||||
options.
|
||||
|
||||
#### String Formats
|
||||
All formats are supported, phone numbers are expected to follow the [E.123](http://en.wikipedia.org/wiki/E.123) standard.
|
||||
|
||||
#### Custom Formats
|
||||
You may add your own custom format functions. Format functions accept the input
|
||||
being validated and return a boolean value. If the returned value is `true`, then
|
||||
validation succeeds. If the returned value is `false`, then validation fails.
|
||||
|
||||
* Formats added to `Validator.prototype.customFormats` do not affect previously instantiated
|
||||
Validators. This is to prevent validator instances from being altered once created.
|
||||
It is conceivable that multiple validators may be created to handle multiple schemas
|
||||
with different formats in a program.
|
||||
* Formats added to `validator.customFormats` affect only that Validator instance.
|
||||
|
||||
Here is an example that uses custom formats:
|
||||
|
||||
```javascript
|
||||
Validator.prototype.customFormats.myFormat = function(input) {
|
||||
return input === 'myFormat';
|
||||
};
|
||||
|
||||
var validator = new Validator();
|
||||
validator.validate('myFormat', {type: 'string', format: 'myFormat'}).valid; // true
|
||||
validator.validate('foo', {type: 'string', format: 'myFormat'}).valid; // false
|
||||
```
|
||||
|
||||
### Results
|
||||
The first error found will be thrown as an `Error` object if `options.throwError` is `true`. Otherwise all results will be appended to the `result.errors` array which also contains the success flag `result.valid`.
|
||||
|
||||
When `oneOf` or `anyOf` validations fail, errors that caused any of the sub-schemas referenced therein to fail are not reported, unless `options.nestedErrors` is truthy. This option may be useful when troubleshooting validation errors in complex schemas.
|
||||
|
||||
### Custom properties
|
||||
Specify your own JSON Schema properties with the validator.attributes property:
|
||||
|
||||
```javascript
|
||||
validator.attributes.contains = function validateContains(instance, schema, options, ctx) {
|
||||
if(typeof instance!='string') return;
|
||||
if(typeof schema.contains!='string') throw new jsonschema.SchemaError('"contains" expects a string', schema);
|
||||
if(instance.indexOf(schema.contains)<0){
|
||||
return 'does not contain the string ' + JSON.stringify(schema.contains);
|
||||
}
|
||||
}
|
||||
var result = validator.validate("i am an instance", { type:"string", contains: "i am" });
|
||||
// result.valid === true;
|
||||
```
|
||||
|
||||
The instance passes validation if the function returns nothing. A single validation error is produced
|
||||
if the fuction returns a string. Any number of errors (maybe none at all) may be returned by passing a
|
||||
`ValidatorResult` object, which may be used like so:
|
||||
|
||||
```javascript
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
while(someErrorCondition()){
|
||||
result.addError('fails some validation test');
|
||||
}
|
||||
return result;
|
||||
```
|
||||
|
||||
### Dereferencing schemas
|
||||
Sometimes you may want to download schemas from remote sources, like a database, or over HTTP. When importing a schema,
|
||||
unknown references are inserted into the `validator.unresolvedRefs` Array. Asynchronously shift elements off this array and import
|
||||
them:
|
||||
|
||||
```javascript
|
||||
var Validator = require('jsonschema').Validator;
|
||||
var v = new Validator();
|
||||
v.addSchema(initialSchema);
|
||||
function importNextSchema(){
|
||||
var nextSchema = v.unresolvedRefs.shift();
|
||||
if(!nextSchema){ done(); return; }
|
||||
databaseGet(nextSchema, function(schema){
|
||||
v.addSchema(schema);
|
||||
importNextSchema();
|
||||
});
|
||||
}
|
||||
importNextSchema();
|
||||
```
|
||||
|
||||
### Pre-Property Validation Hook
|
||||
If some processing of properties is required prior to validation a function may be passed via the options parameter of the validate function. For example, say you needed to perform type coercion for some properties:
|
||||
|
||||
```javascript
|
||||
const coercionHook = function (instance, property, schema, options, ctx) {
|
||||
var value = instance[property];
|
||||
|
||||
// Skip nulls and undefineds
|
||||
if (value === null || typeof value == 'undefined') {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the schema declares a type and the property fails type validation.
|
||||
if (schema.type && this.attributes.type.call(this, instance, schema, options, ctx.makeChild(schema, property))) {
|
||||
var types = Array.isArray(schema.type) ? schema.type : [schema.type];
|
||||
var coerced = undefined;
|
||||
|
||||
// Go through the declared types until we find something that we can
|
||||
// coerce the value into.
|
||||
for (var i = 0; typeof coerced == 'undefined' && i < types.length; i++) {
|
||||
// If we support coercion to this type
|
||||
if (lib.coercions[types[i]]) {
|
||||
// ...attempt it.
|
||||
coerced = lib.coercions[types[i]](value);
|
||||
}
|
||||
}
|
||||
// If we got a successful coercion we modify the property of the instance.
|
||||
if (typeof coerced != 'undefined') {
|
||||
instance[property] = coerced;
|
||||
}
|
||||
}
|
||||
}.bind(validator)
|
||||
|
||||
// And now, to actually perform validation with the coercion hook!
|
||||
v.validate(instance, schema, { preValidateProperty: coercionHook });
|
||||
```
|
||||
|
||||
## Tests
|
||||
Uses [JSON Schema Test Suite](https://github.com/json-schema/JSON-Schema-Test-Suite) as well as our own tests.
|
||||
You'll need to update and init the git submodules:
|
||||
|
||||
git submodule update --init
|
||||
npm test
|
||||
|
||||
## Contributions
|
||||
|
||||
This library would not be possible without the valuable contributions by:
|
||||
|
||||
- Austin Wright
|
||||
|
||||
... and many others!
|
||||
|
||||
## License
|
||||
|
||||
jsonschema is licensed under MIT license.
|
||||
|
||||
Copyright (C) 2012-2019 Tom de Grunt <tom@degrunt.nl>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
820
node_modules/jsonschema/lib/attribute.js
generated
vendored
Normal file
820
node_modules/jsonschema/lib/attribute.js
generated
vendored
Normal file
@@ -0,0 +1,820 @@
|
||||
'use strict';
|
||||
|
||||
var helpers = require('./helpers');
|
||||
|
||||
/** @type ValidatorResult */
|
||||
var ValidatorResult = helpers.ValidatorResult;
|
||||
/** @type SchemaError */
|
||||
var SchemaError = helpers.SchemaError;
|
||||
|
||||
var attribute = {};
|
||||
|
||||
attribute.ignoreProperties = {
|
||||
// informative properties
|
||||
'id': true,
|
||||
'default': true,
|
||||
'description': true,
|
||||
'title': true,
|
||||
// arguments to other properties
|
||||
'exclusiveMinimum': true,
|
||||
'exclusiveMaximum': true,
|
||||
'additionalItems': true,
|
||||
// special-handled properties
|
||||
'$schema': true,
|
||||
'$ref': true,
|
||||
'extends': true
|
||||
};
|
||||
|
||||
/**
|
||||
* @name validators
|
||||
*/
|
||||
var validators = attribute.validators = {};
|
||||
|
||||
/**
|
||||
* Validates whether the instance if of a certain type
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param options
|
||||
* @param ctx
|
||||
* @return {ValidatorResult|null}
|
||||
*/
|
||||
validators.type = function validateType (instance, schema, options, ctx) {
|
||||
// Ignore undefined instances
|
||||
if (instance === undefined) {
|
||||
return null;
|
||||
}
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
var types = Array.isArray(schema.type) ? schema.type : [schema.type];
|
||||
if (!types.some(this.testType.bind(this, instance, schema, options, ctx))) {
|
||||
var list = types.map(function (v) {
|
||||
return v.id && ('<' + v.id + '>') || (v+'');
|
||||
});
|
||||
result.addError({
|
||||
name: 'type',
|
||||
argument: list,
|
||||
message: "is not of a type(s) " + list,
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
function testSchemaNoThrow(instance, options, ctx, callback, schema){
|
||||
var throwError = options.throwError;
|
||||
options.throwError = false;
|
||||
var res = this.validateSchema(instance, schema, options, ctx);
|
||||
options.throwError = throwError;
|
||||
|
||||
if (!res.valid && callback instanceof Function) {
|
||||
callback(res);
|
||||
}
|
||||
return res.valid;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates whether the instance matches some of the given schemas
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param options
|
||||
* @param ctx
|
||||
* @return {ValidatorResult|null}
|
||||
*/
|
||||
validators.anyOf = function validateAnyOf (instance, schema, options, ctx) {
|
||||
// Ignore undefined instances
|
||||
if (instance === undefined) {
|
||||
return null;
|
||||
}
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
var inner = new ValidatorResult(instance, schema, options, ctx);
|
||||
if (!Array.isArray(schema.anyOf)){
|
||||
throw new SchemaError("anyOf must be an array");
|
||||
}
|
||||
if (!schema.anyOf.some(
|
||||
testSchemaNoThrow.bind(
|
||||
this, instance, options, ctx, function(res){inner.importErrors(res);}
|
||||
))) {
|
||||
var list = schema.anyOf.map(function (v, i) {
|
||||
return (v.id && ('<' + v.id + '>')) || (v.title && JSON.stringify(v.title)) || (v['$ref'] && ('<' + v['$ref'] + '>')) || '[subschema '+i+']';
|
||||
});
|
||||
if (options.nestedErrors) {
|
||||
result.importErrors(inner);
|
||||
}
|
||||
result.addError({
|
||||
name: 'anyOf',
|
||||
argument: list,
|
||||
message: "is not any of " + list.join(','),
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates whether the instance matches every given schema
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param options
|
||||
* @param ctx
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.allOf = function validateAllOf (instance, schema, options, ctx) {
|
||||
// Ignore undefined instances
|
||||
if (instance === undefined) {
|
||||
return null;
|
||||
}
|
||||
if (!Array.isArray(schema.allOf)){
|
||||
throw new SchemaError("allOf must be an array");
|
||||
}
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
var self = this;
|
||||
schema.allOf.forEach(function(v, i){
|
||||
var valid = self.validateSchema(instance, v, options, ctx);
|
||||
if(!valid.valid){
|
||||
var msg = (v.id && ('<' + v.id + '>')) || (v.title && JSON.stringify(v.title)) || (v['$ref'] && ('<' + v['$ref'] + '>')) || '[subschema '+i+']';
|
||||
result.addError({
|
||||
name: 'allOf',
|
||||
argument: { id: msg, length: valid.errors.length, valid: valid },
|
||||
message: 'does not match allOf schema ' + msg + ' with ' + valid.errors.length + ' error[s]:',
|
||||
});
|
||||
result.importErrors(valid);
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates whether the instance matches exactly one of the given schemas
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param options
|
||||
* @param ctx
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.oneOf = function validateOneOf (instance, schema, options, ctx) {
|
||||
// Ignore undefined instances
|
||||
if (instance === undefined) {
|
||||
return null;
|
||||
}
|
||||
if (!Array.isArray(schema.oneOf)){
|
||||
throw new SchemaError("oneOf must be an array");
|
||||
}
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
var inner = new ValidatorResult(instance, schema, options, ctx);
|
||||
var count = schema.oneOf.filter(
|
||||
testSchemaNoThrow.bind(
|
||||
this, instance, options, ctx, function(res) {inner.importErrors(res);}
|
||||
) ).length;
|
||||
var list = schema.oneOf.map(function (v, i) {
|
||||
return (v.id && ('<' + v.id + '>')) || (v.title && JSON.stringify(v.title)) || (v['$ref'] && ('<' + v['$ref'] + '>')) || '[subschema '+i+']';
|
||||
});
|
||||
if (count!==1) {
|
||||
if (options.nestedErrors) {
|
||||
result.importErrors(inner);
|
||||
}
|
||||
result.addError({
|
||||
name: 'oneOf',
|
||||
argument: list,
|
||||
message: "is not exactly one from " + list.join(','),
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates properties
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param options
|
||||
* @param ctx
|
||||
* @return {String|null|ValidatorResult}
|
||||
*/
|
||||
validators.properties = function validateProperties (instance, schema, options, ctx) {
|
||||
if(!this.types.object(instance)) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
var properties = schema.properties || {};
|
||||
for (var property in properties) {
|
||||
if (typeof options.preValidateProperty == 'function') {
|
||||
options.preValidateProperty(instance, property, properties[property], options, ctx);
|
||||
}
|
||||
|
||||
var prop = Object.hasOwnProperty.call(instance, property) ? instance[property] : undefined;
|
||||
var res = this.validateSchema(prop, properties[property], options, ctx.makeChild(properties[property], property));
|
||||
if(res.instance !== result.instance[property]) result.instance[property] = res.instance;
|
||||
result.importErrors(res);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Test a specific property within in instance against the additionalProperties schema attribute
|
||||
* This ignores properties with definitions in the properties schema attribute, but no other attributes.
|
||||
* If too many more types of property-existance tests pop up they may need their own class of tests (like `type` has)
|
||||
* @private
|
||||
* @return {boolean}
|
||||
*/
|
||||
function testAdditionalProperty (instance, schema, options, ctx, property, result) {
|
||||
if(!this.types.object(instance)) return;
|
||||
if (schema.properties && schema.properties[property] !== undefined) {
|
||||
return;
|
||||
}
|
||||
if (schema.additionalProperties === false) {
|
||||
result.addError({
|
||||
name: 'additionalProperties',
|
||||
argument: property,
|
||||
message: "additionalProperty " + JSON.stringify(property) + " exists in instance when not allowed",
|
||||
});
|
||||
} else {
|
||||
var additionalProperties = schema.additionalProperties || {};
|
||||
|
||||
if (typeof options.preValidateProperty == 'function') {
|
||||
options.preValidateProperty(instance, property, additionalProperties, options, ctx);
|
||||
}
|
||||
|
||||
var res = this.validateSchema(instance[property], additionalProperties, options, ctx.makeChild(additionalProperties, property));
|
||||
if(res.instance !== result.instance[property]) result.instance[property] = res.instance;
|
||||
result.importErrors(res);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates patternProperties
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param options
|
||||
* @param ctx
|
||||
* @return {String|null|ValidatorResult}
|
||||
*/
|
||||
validators.patternProperties = function validatePatternProperties (instance, schema, options, ctx) {
|
||||
if(!this.types.object(instance)) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
var patternProperties = schema.patternProperties || {};
|
||||
|
||||
for (var property in instance) {
|
||||
var test = true;
|
||||
for (var pattern in patternProperties) {
|
||||
var expr = new RegExp(pattern);
|
||||
if (!expr.test(property)) {
|
||||
continue;
|
||||
}
|
||||
test = false;
|
||||
|
||||
if (typeof options.preValidateProperty == 'function') {
|
||||
options.preValidateProperty(instance, property, patternProperties[pattern], options, ctx);
|
||||
}
|
||||
|
||||
var res = this.validateSchema(instance[property], patternProperties[pattern], options, ctx.makeChild(patternProperties[pattern], property));
|
||||
if(res.instance !== result.instance[property]) result.instance[property] = res.instance;
|
||||
result.importErrors(res);
|
||||
}
|
||||
if (test) {
|
||||
testAdditionalProperty.call(this, instance, schema, options, ctx, property, result);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates additionalProperties
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param options
|
||||
* @param ctx
|
||||
* @return {String|null|ValidatorResult}
|
||||
*/
|
||||
validators.additionalProperties = function validateAdditionalProperties (instance, schema, options, ctx) {
|
||||
if(!this.types.object(instance)) return;
|
||||
// if patternProperties is defined then we'll test when that one is called instead
|
||||
if (schema.patternProperties) {
|
||||
return null;
|
||||
}
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
for (var property in instance) {
|
||||
testAdditionalProperty.call(this, instance, schema, options, ctx, property, result);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates whether the instance value is at least of a certain length, when the instance value is a string.
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.minProperties = function validateMinProperties (instance, schema, options, ctx) {
|
||||
if (!this.types.object(instance)) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
var keys = Object.keys(instance);
|
||||
if (!(keys.length >= schema.minProperties)) {
|
||||
result.addError({
|
||||
name: 'minProperties',
|
||||
argument: schema.minProperties,
|
||||
message: "does not meet minimum property length of " + schema.minProperties,
|
||||
})
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates whether the instance value is at most of a certain length, when the instance value is a string.
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.maxProperties = function validateMaxProperties (instance, schema, options, ctx) {
|
||||
if (!this.types.object(instance)) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
var keys = Object.keys(instance);
|
||||
if (!(keys.length <= schema.maxProperties)) {
|
||||
result.addError({
|
||||
name: 'maxProperties',
|
||||
argument: schema.maxProperties,
|
||||
message: "does not meet maximum property length of " + schema.maxProperties,
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates items when instance is an array
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param options
|
||||
* @param ctx
|
||||
* @return {String|null|ValidatorResult}
|
||||
*/
|
||||
validators.items = function validateItems (instance, schema, options, ctx) {
|
||||
var self = this;
|
||||
if (!this.types.array(instance)) return;
|
||||
if (!schema.items) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
instance.every(function (value, i) {
|
||||
var items = Array.isArray(schema.items) ? (schema.items[i] || schema.additionalItems) : schema.items;
|
||||
if (items === undefined) {
|
||||
return true;
|
||||
}
|
||||
if (items === false) {
|
||||
result.addError({
|
||||
name: 'items',
|
||||
message: "additionalItems not permitted",
|
||||
});
|
||||
return false;
|
||||
}
|
||||
var res = self.validateSchema(value, items, options, ctx.makeChild(items, i));
|
||||
if(res.instance !== result.instance[i]) result.instance[i] = res.instance;
|
||||
result.importErrors(res);
|
||||
return true;
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates minimum and exclusiveMinimum when the type of the instance value is a number.
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.minimum = function validateMinimum (instance, schema, options, ctx) {
|
||||
if (!this.types.number(instance)) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
var valid = true;
|
||||
if (schema.exclusiveMinimum && schema.exclusiveMinimum === true) {
|
||||
valid = instance > schema.minimum;
|
||||
} else {
|
||||
valid = instance >= schema.minimum;
|
||||
}
|
||||
if (!valid) {
|
||||
result.addError({
|
||||
name: 'minimum',
|
||||
argument: schema.minimum,
|
||||
message: "must have a minimum value of " + schema.minimum,
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates maximum and exclusiveMaximum when the type of the instance value is a number.
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.maximum = function validateMaximum (instance, schema, options, ctx) {
|
||||
if (!this.types.number(instance)) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
var valid;
|
||||
if (schema.exclusiveMaximum && schema.exclusiveMaximum === true) {
|
||||
valid = instance < schema.maximum;
|
||||
} else {
|
||||
valid = instance <= schema.maximum;
|
||||
}
|
||||
if (!valid) {
|
||||
result.addError({
|
||||
name: 'maximum',
|
||||
argument: schema.maximum,
|
||||
message: "must have a maximum value of " + schema.maximum,
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Perform validation for multipleOf and divisibleBy, which are essentially the same.
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param validationType
|
||||
* @param errorMessage
|
||||
* @returns {String|null}
|
||||
*/
|
||||
var validateMultipleOfOrDivisbleBy = function validateMultipleOfOrDivisbleBy (instance, schema, options, ctx, validationType, errorMessage) {
|
||||
if (!this.types.number(instance)) return;
|
||||
|
||||
var validationArgument = schema[validationType];
|
||||
if (validationArgument == 0) {
|
||||
throw new SchemaError(validationType + " cannot be zero");
|
||||
}
|
||||
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
|
||||
var instanceDecimals = helpers.getDecimalPlaces(instance);
|
||||
var divisorDecimals = helpers.getDecimalPlaces(validationArgument);
|
||||
|
||||
var maxDecimals = Math.max(instanceDecimals , divisorDecimals);
|
||||
var multiplier = Math.pow(10, maxDecimals);
|
||||
|
||||
if (Math.round(instance * multiplier) % Math.round(validationArgument * multiplier) !== 0) {
|
||||
result.addError({
|
||||
name: validationType,
|
||||
argument: validationArgument,
|
||||
message: errorMessage + JSON.stringify(validationArgument)
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates divisibleBy when the type of the instance value is a number.
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.multipleOf = function validateMultipleOf (instance, schema, options, ctx) {
|
||||
return validateMultipleOfOrDivisbleBy.call(this, instance, schema, options, ctx, "multipleOf", "is not a multiple of (divisible by) ");
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates multipleOf when the type of the instance value is a number.
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.divisibleBy = function validateDivisibleBy (instance, schema, options, ctx) {
|
||||
return validateMultipleOfOrDivisbleBy.call(this, instance, schema, options, ctx, "divisibleBy", "is not divisible by (multiple of) ");
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates whether the instance value is present.
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.required = function validateRequired (instance, schema, options, ctx) {
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
if (instance === undefined && schema.required === true) {
|
||||
// A boolean form is implemented for reverse-compatability with schemas written against older drafts
|
||||
result.addError({
|
||||
name: 'required',
|
||||
message: "is required"
|
||||
});
|
||||
} else if (this.types.object(instance) && Array.isArray(schema.required)) {
|
||||
schema.required.forEach(function(n){
|
||||
if(instance[n]===undefined){
|
||||
result.addError({
|
||||
name: 'required',
|
||||
argument: n,
|
||||
message: "requires property " + JSON.stringify(n),
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates whether the instance value matches the regular expression, when the instance value is a string.
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.pattern = function validatePattern (instance, schema, options, ctx) {
|
||||
if (!this.types.string(instance)) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
if (!instance.match(schema.pattern)) {
|
||||
result.addError({
|
||||
name: 'pattern',
|
||||
argument: schema.pattern,
|
||||
message: "does not match pattern " + JSON.stringify(schema.pattern.toString()),
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates whether the instance value is of a certain defined format or a custom
|
||||
* format.
|
||||
* The following formats are supported for string types:
|
||||
* - date-time
|
||||
* - date
|
||||
* - time
|
||||
* - ip-address
|
||||
* - ipv6
|
||||
* - uri
|
||||
* - color
|
||||
* - host-name
|
||||
* - alpha
|
||||
* - alpha-numeric
|
||||
* - utc-millisec
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param [options]
|
||||
* @param [ctx]
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.format = function validateFormat (instance, schema, options, ctx) {
|
||||
if (instance===undefined) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
if (!result.disableFormat && !helpers.isFormat(instance, schema.format, this)) {
|
||||
result.addError({
|
||||
name: 'format',
|
||||
argument: schema.format,
|
||||
message: "does not conform to the " + JSON.stringify(schema.format) + " format",
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates whether the instance value is at least of a certain length, when the instance value is a string.
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.minLength = function validateMinLength (instance, schema, options, ctx) {
|
||||
if (!this.types.string(instance)) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
var hsp = instance.match(/[\uDC00-\uDFFF]/g);
|
||||
var length = instance.length - (hsp ? hsp.length : 0);
|
||||
if (!(length >= schema.minLength)) {
|
||||
result.addError({
|
||||
name: 'minLength',
|
||||
argument: schema.minLength,
|
||||
message: "does not meet minimum length of " + schema.minLength,
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates whether the instance value is at most of a certain length, when the instance value is a string.
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.maxLength = function validateMaxLength (instance, schema, options, ctx) {
|
||||
if (!this.types.string(instance)) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
// TODO if this was already computed in "minLength", use that value instead of re-computing
|
||||
var hsp = instance.match(/[\uDC00-\uDFFF]/g);
|
||||
var length = instance.length - (hsp ? hsp.length : 0);
|
||||
if (!(length <= schema.maxLength)) {
|
||||
result.addError({
|
||||
name: 'maxLength',
|
||||
argument: schema.maxLength,
|
||||
message: "does not meet maximum length of " + schema.maxLength,
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates whether instance contains at least a minimum number of items, when the instance is an Array.
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.minItems = function validateMinItems (instance, schema, options, ctx) {
|
||||
if (!this.types.array(instance)) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
if (!(instance.length >= schema.minItems)) {
|
||||
result.addError({
|
||||
name: 'minItems',
|
||||
argument: schema.minItems,
|
||||
message: "does not meet minimum length of " + schema.minItems,
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates whether instance contains no more than a maximum number of items, when the instance is an Array.
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.maxItems = function validateMaxItems (instance, schema, options, ctx) {
|
||||
if (!this.types.array(instance)) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
if (!(instance.length <= schema.maxItems)) {
|
||||
result.addError({
|
||||
name: 'maxItems',
|
||||
argument: schema.maxItems,
|
||||
message: "does not meet maximum length of " + schema.maxItems,
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates that every item in an instance array is unique, when instance is an array
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param options
|
||||
* @param ctx
|
||||
* @return {String|null|ValidatorResult}
|
||||
*/
|
||||
validators.uniqueItems = function validateUniqueItems (instance, schema, options, ctx) {
|
||||
if (!this.types.array(instance)) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
function testArrays (v, i, a) {
|
||||
for (var j = i + 1; j < a.length; j++) if (helpers.deepCompareStrict(v, a[j])) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if (!instance.every(testArrays)) {
|
||||
result.addError({
|
||||
name: 'uniqueItems',
|
||||
message: "contains duplicate item",
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Deep compares arrays for duplicates
|
||||
* @param v
|
||||
* @param i
|
||||
* @param a
|
||||
* @private
|
||||
* @return {boolean}
|
||||
*/
|
||||
function testArrays (v, i, a) {
|
||||
var j, len = a.length;
|
||||
for (j = i + 1, len; j < len; j++) {
|
||||
if (helpers.deepCompareStrict(v, a[j])) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates whether there are no duplicates, when the instance is an Array.
|
||||
* @param instance
|
||||
* @return {String|null}
|
||||
*/
|
||||
validators.uniqueItems = function validateUniqueItems (instance, schema, options, ctx) {
|
||||
if (!this.types.array(instance)) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
if (!instance.every(testArrays)) {
|
||||
result.addError({
|
||||
name: 'uniqueItems',
|
||||
message: "contains duplicate item",
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validate for the presence of dependency properties, if the instance is an object.
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param options
|
||||
* @param ctx
|
||||
* @return {null|ValidatorResult}
|
||||
*/
|
||||
validators.dependencies = function validateDependencies (instance, schema, options, ctx) {
|
||||
if (!this.types.object(instance)) return;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
for (var property in schema.dependencies) {
|
||||
if (instance[property] === undefined) {
|
||||
continue;
|
||||
}
|
||||
var dep = schema.dependencies[property];
|
||||
var childContext = ctx.makeChild(dep, property);
|
||||
if (typeof dep == 'string') {
|
||||
dep = [dep];
|
||||
}
|
||||
if (Array.isArray(dep)) {
|
||||
dep.forEach(function (prop) {
|
||||
if (instance[prop] === undefined) {
|
||||
result.addError({
|
||||
// FIXME there's two different "dependencies" errors here with slightly different outputs
|
||||
// Can we make these the same? Or should we create different error types?
|
||||
name: 'dependencies',
|
||||
argument: childContext.propertyPath,
|
||||
message: "property " + prop + " not found, required by " + childContext.propertyPath,
|
||||
});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
var res = this.validateSchema(instance, dep, options, childContext);
|
||||
if(result.instance !== res.instance) result.instance = res.instance;
|
||||
if (res && res.errors.length) {
|
||||
result.addError({
|
||||
name: 'dependencies',
|
||||
argument: childContext.propertyPath,
|
||||
message: "does not meet dependency required by " + childContext.propertyPath,
|
||||
});
|
||||
result.importErrors(res);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates whether the instance value is one of the enumerated values.
|
||||
*
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @return {ValidatorResult|null}
|
||||
*/
|
||||
validators['enum'] = function validateEnum (instance, schema, options, ctx) {
|
||||
if (instance === undefined) {
|
||||
return null;
|
||||
}
|
||||
if (!Array.isArray(schema['enum'])) {
|
||||
throw new SchemaError("enum expects an array", schema);
|
||||
}
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
if (!schema['enum'].some(helpers.deepCompareStrict.bind(null, instance))) {
|
||||
result.addError({
|
||||
name: 'enum',
|
||||
argument: schema['enum'],
|
||||
message: "is not one of enum values: " + schema['enum'].map(String).join(','),
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates whether the instance exactly matches a given value
|
||||
*
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @return {ValidatorResult|null}
|
||||
*/
|
||||
validators['const'] = function validateEnum (instance, schema, options, ctx) {
|
||||
if (instance === undefined) {
|
||||
return null;
|
||||
}
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
if (!helpers.deepCompareStrict(schema['const'], instance)) {
|
||||
result.addError({
|
||||
name: 'const',
|
||||
argument: schema['const'],
|
||||
message: "does not exactly match expected constant: " + schema['const'],
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates whether the instance if of a prohibited type.
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param options
|
||||
* @param ctx
|
||||
* @return {null|ValidatorResult}
|
||||
*/
|
||||
validators.not = validators.disallow = function validateNot (instance, schema, options, ctx) {
|
||||
var self = this;
|
||||
if(instance===undefined) return null;
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
var notTypes = schema.not || schema.disallow;
|
||||
if(!notTypes) return null;
|
||||
if(!Array.isArray(notTypes)) notTypes=[notTypes];
|
||||
notTypes.forEach(function (type) {
|
||||
if (self.testType(instance, schema, options, ctx, type)) {
|
||||
var schemaId = type && type.id && ('<' + type.id + '>') || type;
|
||||
result.addError({
|
||||
name: 'not',
|
||||
argument: schemaId,
|
||||
message: "is of prohibited type " + schemaId,
|
||||
});
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
module.exports = attribute;
|
||||
325
node_modules/jsonschema/lib/helpers.js
generated
vendored
Normal file
325
node_modules/jsonschema/lib/helpers.js
generated
vendored
Normal file
@@ -0,0 +1,325 @@
|
||||
'use strict';
|
||||
|
||||
var uri = require('url');
|
||||
|
||||
var ValidationError = exports.ValidationError = function ValidationError (message, instance, schema, propertyPath, name, argument) {
|
||||
if (propertyPath) {
|
||||
this.property = propertyPath;
|
||||
}
|
||||
if (message) {
|
||||
this.message = message;
|
||||
}
|
||||
if (schema) {
|
||||
if (schema.id) {
|
||||
this.schema = schema.id;
|
||||
} else {
|
||||
this.schema = schema;
|
||||
}
|
||||
}
|
||||
if (instance) {
|
||||
this.instance = instance;
|
||||
}
|
||||
this.name = name;
|
||||
this.argument = argument;
|
||||
this.stack = this.toString();
|
||||
};
|
||||
|
||||
ValidationError.prototype.toString = function toString() {
|
||||
return this.property + ' ' + this.message;
|
||||
};
|
||||
|
||||
var ValidatorResult = exports.ValidatorResult = function ValidatorResult(instance, schema, options, ctx) {
|
||||
this.instance = instance;
|
||||
this.schema = schema;
|
||||
this.propertyPath = ctx.propertyPath;
|
||||
this.errors = [];
|
||||
this.throwError = options && options.throwError;
|
||||
this.disableFormat = options && options.disableFormat === true;
|
||||
};
|
||||
|
||||
ValidatorResult.prototype.addError = function addError(detail) {
|
||||
var err;
|
||||
if (typeof detail == 'string') {
|
||||
err = new ValidationError(detail, this.instance, this.schema, this.propertyPath);
|
||||
} else {
|
||||
if (!detail) throw new Error('Missing error detail');
|
||||
if (!detail.message) throw new Error('Missing error message');
|
||||
if (!detail.name) throw new Error('Missing validator type');
|
||||
err = new ValidationError(detail.message, this.instance, this.schema, this.propertyPath, detail.name, detail.argument);
|
||||
}
|
||||
|
||||
if (this.throwError) {
|
||||
throw err;
|
||||
}
|
||||
this.errors.push(err);
|
||||
return err;
|
||||
};
|
||||
|
||||
ValidatorResult.prototype.importErrors = function importErrors(res) {
|
||||
if (typeof res == 'string' || (res && res.validatorType)) {
|
||||
this.addError(res);
|
||||
} else if (res && res.errors) {
|
||||
Array.prototype.push.apply(this.errors, res.errors);
|
||||
}
|
||||
};
|
||||
|
||||
function stringizer (v,i){
|
||||
return i+': '+v.toString()+'\n';
|
||||
}
|
||||
ValidatorResult.prototype.toString = function toString(res) {
|
||||
return this.errors.map(stringizer).join('');
|
||||
};
|
||||
|
||||
Object.defineProperty(ValidatorResult.prototype, "valid", { get: function() {
|
||||
return !this.errors.length;
|
||||
} });
|
||||
|
||||
/**
|
||||
* Describes a problem with a Schema which prevents validation of an instance
|
||||
* @name SchemaError
|
||||
* @constructor
|
||||
*/
|
||||
var SchemaError = exports.SchemaError = function SchemaError (msg, schema) {
|
||||
this.message = msg;
|
||||
this.schema = schema;
|
||||
Error.call(this, msg);
|
||||
Error.captureStackTrace(this, SchemaError);
|
||||
};
|
||||
SchemaError.prototype = Object.create(Error.prototype,
|
||||
{ constructor: {value: SchemaError, enumerable: false}
|
||||
, name: {value: 'SchemaError', enumerable: false}
|
||||
});
|
||||
|
||||
var SchemaContext = exports.SchemaContext = function SchemaContext (schema, options, propertyPath, base, schemas) {
|
||||
this.schema = schema;
|
||||
this.options = options;
|
||||
this.propertyPath = propertyPath;
|
||||
this.base = base;
|
||||
this.schemas = schemas;
|
||||
};
|
||||
|
||||
SchemaContext.prototype.resolve = function resolve (target) {
|
||||
return uri.resolve(this.base, target);
|
||||
};
|
||||
|
||||
SchemaContext.prototype.makeChild = function makeChild(schema, propertyName){
|
||||
var propertyPath = (propertyName===undefined) ? this.propertyPath : this.propertyPath+makeSuffix(propertyName);
|
||||
var base = uri.resolve(this.base, schema.id||'');
|
||||
var ctx = new SchemaContext(schema, this.options, propertyPath, base, Object.create(this.schemas));
|
||||
if(schema.id && !ctx.schemas[base]){
|
||||
ctx.schemas[base] = schema;
|
||||
}
|
||||
return ctx;
|
||||
}
|
||||
|
||||
var FORMAT_REGEXPS = exports.FORMAT_REGEXPS = {
|
||||
'date-time': /^\d{4}-(?:0[0-9]{1}|1[0-2]{1})-(3[01]|0[1-9]|[12][0-9])[tT ](2[0-4]|[01][0-9]):([0-5][0-9]):(60|[0-5][0-9])(\.\d+)?([zZ]|[+-]([0-5][0-9]):(60|[0-5][0-9]))$/,
|
||||
'date': /^\d{4}-(?:0[0-9]{1}|1[0-2]{1})-(3[01]|0[1-9]|[12][0-9])$/,
|
||||
'time': /^(2[0-4]|[01][0-9]):([0-5][0-9]):(60|[0-5][0-9])$/,
|
||||
|
||||
'email': /^(?:[\w\!\#\$\%\&\'\*\+\-\/\=\?\^\`\{\|\}\~]+\.)*[\w\!\#\$\%\&\'\*\+\-\/\=\?\^\`\{\|\}\~]+@(?:(?:(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-](?!\.)){0,61}[a-zA-Z0-9]?\.)+[a-zA-Z0-9](?:[a-zA-Z0-9\-](?!$)){0,61}[a-zA-Z0-9]?)|(?:\[(?:(?:[01]?\d{1,2}|2[0-4]\d|25[0-5])\.){3}(?:[01]?\d{1,2}|2[0-4]\d|25[0-5])\]))$/,
|
||||
'ip-address': /^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/,
|
||||
'ipv6': /^\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?\s*$/,
|
||||
'uri': /^[a-zA-Z][a-zA-Z0-9+-.]*:[^\s]*$/,
|
||||
|
||||
'color': /^(#?([0-9A-Fa-f]{3}){1,2}\b|aqua|black|blue|fuchsia|gray|green|lime|maroon|navy|olive|orange|purple|red|silver|teal|white|yellow|(rgb\(\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*,\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*,\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*\))|(rgb\(\s*(\d?\d%|100%)+\s*,\s*(\d?\d%|100%)+\s*,\s*(\d?\d%|100%)+\s*\)))$/,
|
||||
|
||||
// hostname regex from: http://stackoverflow.com/a/1420225/5628
|
||||
'hostname': /^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\.?$/,
|
||||
'host-name': /^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\.?$/,
|
||||
|
||||
'alpha': /^[a-zA-Z]+$/,
|
||||
'alphanumeric': /^[a-zA-Z0-9]+$/,
|
||||
'utc-millisec': function (input) {
|
||||
return (typeof input === 'string') && parseFloat(input) === parseInt(input, 10) && !isNaN(input);
|
||||
},
|
||||
'regex': function (input) {
|
||||
var result = true;
|
||||
try {
|
||||
new RegExp(input);
|
||||
} catch (e) {
|
||||
result = false;
|
||||
}
|
||||
return result;
|
||||
},
|
||||
'style': /\s*(.+?):\s*([^;]+);?/,
|
||||
'phone': /^\+(?:[0-9] ?){6,14}[0-9]$/
|
||||
};
|
||||
|
||||
FORMAT_REGEXPS.regexp = FORMAT_REGEXPS.regex;
|
||||
FORMAT_REGEXPS.pattern = FORMAT_REGEXPS.regex;
|
||||
FORMAT_REGEXPS.ipv4 = FORMAT_REGEXPS['ip-address'];
|
||||
|
||||
exports.isFormat = function isFormat (input, format, validator) {
|
||||
if (typeof input === 'string' && FORMAT_REGEXPS[format] !== undefined) {
|
||||
if (FORMAT_REGEXPS[format] instanceof RegExp) {
|
||||
return FORMAT_REGEXPS[format].test(input);
|
||||
}
|
||||
if (typeof FORMAT_REGEXPS[format] === 'function') {
|
||||
return FORMAT_REGEXPS[format](input);
|
||||
}
|
||||
} else if (validator && validator.customFormats &&
|
||||
typeof validator.customFormats[format] === 'function') {
|
||||
return validator.customFormats[format](input);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
var makeSuffix = exports.makeSuffix = function makeSuffix (key) {
|
||||
key = key.toString();
|
||||
// This function could be capable of outputting valid a ECMAScript string, but the
|
||||
// resulting code for testing which form to use would be tens of thousands of characters long
|
||||
// That means this will use the name form for some illegal forms
|
||||
if (!key.match(/[.\s\[\]]/) && !key.match(/^[\d]/)) {
|
||||
return '.' + key;
|
||||
}
|
||||
if (key.match(/^\d+$/)) {
|
||||
return '[' + key + ']';
|
||||
}
|
||||
return '[' + JSON.stringify(key) + ']';
|
||||
};
|
||||
|
||||
exports.deepCompareStrict = function deepCompareStrict (a, b) {
|
||||
if (typeof a !== typeof b) {
|
||||
return false;
|
||||
}
|
||||
if (Array.isArray(a)) {
|
||||
if (!Array.isArray(b)) {
|
||||
return false;
|
||||
}
|
||||
if (a.length !== b.length) {
|
||||
return false;
|
||||
}
|
||||
return a.every(function (v, i) {
|
||||
return deepCompareStrict(a[i], b[i]);
|
||||
});
|
||||
}
|
||||
if (typeof a === 'object') {
|
||||
if (!a || !b) {
|
||||
return a === b;
|
||||
}
|
||||
var aKeys = Object.keys(a);
|
||||
var bKeys = Object.keys(b);
|
||||
if (aKeys.length !== bKeys.length) {
|
||||
return false;
|
||||
}
|
||||
return aKeys.every(function (v) {
|
||||
return deepCompareStrict(a[v], b[v]);
|
||||
});
|
||||
}
|
||||
return a === b;
|
||||
};
|
||||
|
||||
function deepMerger (target, dst, e, i) {
|
||||
if (typeof e === 'object') {
|
||||
dst[i] = deepMerge(target[i], e)
|
||||
} else {
|
||||
if (target.indexOf(e) === -1) {
|
||||
dst.push(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function copyist (src, dst, key) {
|
||||
dst[key] = src[key];
|
||||
}
|
||||
|
||||
function copyistWithDeepMerge (target, src, dst, key) {
|
||||
if (typeof src[key] !== 'object' || !src[key]) {
|
||||
dst[key] = src[key];
|
||||
}
|
||||
else {
|
||||
if (!target[key]) {
|
||||
dst[key] = src[key];
|
||||
} else {
|
||||
dst[key] = deepMerge(target[key], src[key])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function deepMerge (target, src) {
|
||||
var array = Array.isArray(src);
|
||||
var dst = array && [] || {};
|
||||
|
||||
if (array) {
|
||||
target = target || [];
|
||||
dst = dst.concat(target);
|
||||
src.forEach(deepMerger.bind(null, target, dst));
|
||||
} else {
|
||||
if (target && typeof target === 'object') {
|
||||
Object.keys(target).forEach(copyist.bind(null, target, dst));
|
||||
}
|
||||
Object.keys(src).forEach(copyistWithDeepMerge.bind(null, target, src, dst));
|
||||
}
|
||||
|
||||
return dst;
|
||||
};
|
||||
|
||||
module.exports.deepMerge = deepMerge;
|
||||
|
||||
/**
|
||||
* Validates instance against the provided schema
|
||||
* Implements URI+JSON Pointer encoding, e.g. "%7e"="~0"=>"~", "~1"="%2f"=>"/"
|
||||
* @param o
|
||||
* @param s The path to walk o along
|
||||
* @return any
|
||||
*/
|
||||
exports.objectGetPath = function objectGetPath(o, s) {
|
||||
var parts = s.split('/').slice(1);
|
||||
var k;
|
||||
while (typeof (k=parts.shift()) == 'string') {
|
||||
var n = decodeURIComponent(k.replace(/~0/,'~').replace(/~1/g,'/'));
|
||||
if (!(n in o)) return;
|
||||
o = o[n];
|
||||
}
|
||||
return o;
|
||||
};
|
||||
|
||||
function pathEncoder (v) {
|
||||
return '/'+encodeURIComponent(v).replace(/~/g,'%7E');
|
||||
}
|
||||
/**
|
||||
* Accept an Array of property names and return a JSON Pointer URI fragment
|
||||
* @param Array a
|
||||
* @return {String}
|
||||
*/
|
||||
exports.encodePath = function encodePointer(a){
|
||||
// ~ must be encoded explicitly because hacks
|
||||
// the slash is encoded by encodeURIComponent
|
||||
return a.map(pathEncoder).join('');
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Calculate the number of decimal places a number uses
|
||||
* We need this to get correct results out of multipleOf and divisibleBy
|
||||
* when either figure is has decimal places, due to IEEE-754 float issues.
|
||||
* @param number
|
||||
* @returns {number}
|
||||
*/
|
||||
exports.getDecimalPlaces = function getDecimalPlaces(number) {
|
||||
|
||||
var decimalPlaces = 0;
|
||||
if (isNaN(number)) return decimalPlaces;
|
||||
|
||||
if (typeof number !== 'number') {
|
||||
number = Number(number);
|
||||
}
|
||||
|
||||
var parts = number.toString().split('e');
|
||||
if (parts.length === 2) {
|
||||
if (parts[1][0] !== '-') {
|
||||
return decimalPlaces;
|
||||
} else {
|
||||
decimalPlaces = Number(parts[1].slice(1));
|
||||
}
|
||||
}
|
||||
|
||||
var decimalParts = parts[0].split('.');
|
||||
if (decimalParts.length === 2) {
|
||||
decimalPlaces += decimalParts[1].length;
|
||||
}
|
||||
|
||||
return decimalPlaces;
|
||||
};
|
||||
|
||||
127
node_modules/jsonschema/lib/index.d.ts
generated
vendored
Normal file
127
node_modules/jsonschema/lib/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,127 @@
|
||||
/*
|
||||
This is type definition for typescript.
|
||||
This is for library users. Thus, properties and methods for internal use is omitted.
|
||||
*/
|
||||
export declare class Validator {
|
||||
constructor();
|
||||
customFormats: {[formatName: string]: CustomFormat};
|
||||
schemas: {[id: string]: Schema};
|
||||
unresolvedRefs: string[];
|
||||
|
||||
attributes: {[property: string]: CustomProperty};
|
||||
|
||||
addSchema(schema?: Schema, uri?: string): Schema|void;
|
||||
validate(instance: any, schema: Schema, options?: Options, ctx?: SchemaContext): ValidatorResult;
|
||||
}
|
||||
|
||||
export declare class ValidatorResult {
|
||||
constructor(instance: any, schema: Schema, options: Options, ctx: SchemaContext)
|
||||
instance: any;
|
||||
schema: Schema;
|
||||
propertyPath: string;
|
||||
errors: ValidationError[];
|
||||
throwError: boolean;
|
||||
disableFormat: boolean;
|
||||
valid: boolean;
|
||||
addError(detail: string|ErrorDetail): ValidationError;
|
||||
toString(): string;
|
||||
}
|
||||
|
||||
export declare class ValidationError {
|
||||
constructor(message?: string, instance?: any, schema?: Schema, propertyPath?: any, name?: string, argument?: any);
|
||||
property: string;
|
||||
message: string;
|
||||
schema: string|Schema;
|
||||
instance: any;
|
||||
name: string;
|
||||
argument: any;
|
||||
toString(): string;
|
||||
stack: string;
|
||||
}
|
||||
|
||||
export declare class SchemaError extends Error{
|
||||
constructor(msg: string, schema: Schema);
|
||||
schema: Schema;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export declare function validate(instance: any, schema: any, options?: Options): ValidatorResult
|
||||
|
||||
export interface Schema {
|
||||
id?: string
|
||||
$schema?: string
|
||||
$ref?: string
|
||||
title?: string
|
||||
description?: string
|
||||
multipleOf?: number
|
||||
maximum?: number
|
||||
exclusiveMaximum?: boolean
|
||||
minimum?: number
|
||||
exclusiveMinimum?: boolean
|
||||
maxLength?: number
|
||||
minLength?: number
|
||||
pattern?: string | RegExp
|
||||
additionalItems?: boolean | Schema
|
||||
items?: Schema | Schema[]
|
||||
maxItems?: number
|
||||
minItems?: number
|
||||
uniqueItems?: boolean
|
||||
maxProperties?: number
|
||||
minProperties?: number
|
||||
required?: string[] | boolean
|
||||
additionalProperties?: boolean | Schema
|
||||
definitions?: {
|
||||
[name: string]: Schema
|
||||
}
|
||||
properties?: {
|
||||
[name: string]: Schema
|
||||
}
|
||||
patternProperties?: {
|
||||
[name: string]: Schema
|
||||
}
|
||||
dependencies?: {
|
||||
[name: string]: Schema | string[]
|
||||
}
|
||||
'enum'?: any[]
|
||||
type?: string | string[]
|
||||
format?: string
|
||||
allOf?: Schema[]
|
||||
anyOf?: Schema[]
|
||||
oneOf?: Schema[]
|
||||
not?: Schema
|
||||
}
|
||||
|
||||
export interface Options {
|
||||
skipAttributes?: string[];
|
||||
allowUnknownAttributes?: boolean;
|
||||
rewrite?: RewriteFunction;
|
||||
propertyName?: string;
|
||||
base?: string;
|
||||
throwError?: boolean;
|
||||
}
|
||||
|
||||
export interface RewriteFunction {
|
||||
(instance: any, schema: Schema, options: Options, ctx: SchemaContext): any;
|
||||
}
|
||||
|
||||
export interface SchemaContext {
|
||||
schema: Schema;
|
||||
options: Options;
|
||||
propertyPath: string;
|
||||
base: string;
|
||||
schemas: {[base: string]: Schema};
|
||||
}
|
||||
|
||||
export interface CustomFormat {
|
||||
(input: any): boolean;
|
||||
}
|
||||
|
||||
export interface CustomProperty {
|
||||
(instance: any, schema: Schema, options: Options, ctx: SchemaContext): string|ValidatorResult;
|
||||
}
|
||||
|
||||
export interface ErrorDetail {
|
||||
message: string;
|
||||
name: string;
|
||||
argument: string;
|
||||
}
|
||||
14
node_modules/jsonschema/lib/index.js
generated
vendored
Normal file
14
node_modules/jsonschema/lib/index.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
'use strict';
|
||||
|
||||
var Validator = module.exports.Validator = require('./validator');
|
||||
|
||||
module.exports.ValidatorResult = require('./helpers').ValidatorResult;
|
||||
module.exports.ValidationError = require('./helpers').ValidationError;
|
||||
module.exports.SchemaError = require('./helpers').SchemaError;
|
||||
module.exports.SchemaScanResult = require('./scan').SchemaScanResult;
|
||||
module.exports.scan = require('./scan').scan;
|
||||
|
||||
module.exports.validate = function (instance, schema, options) {
|
||||
var v = new Validator();
|
||||
return v.validate(instance, schema, options);
|
||||
};
|
||||
74
node_modules/jsonschema/lib/scan.js
generated
vendored
Normal file
74
node_modules/jsonschema/lib/scan.js
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
|
||||
var urilib = require('url');
|
||||
var helpers = require('./helpers');
|
||||
|
||||
module.exports.SchemaScanResult = SchemaScanResult;
|
||||
function SchemaScanResult(found, ref){
|
||||
this.id = found;
|
||||
this.ref = ref;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a schema with a certain urn to the Validator instance.
|
||||
* @param string uri
|
||||
* @param object schema
|
||||
* @return {Object}
|
||||
*/
|
||||
module.exports.scan = function scan(base, schema){
|
||||
function scanSchema(baseuri, schema){
|
||||
if(!schema || typeof schema!='object') return;
|
||||
// Mark all referenced schemas so we can tell later which schemas are referred to, but never defined
|
||||
if(schema.$ref){
|
||||
var resolvedUri = urilib.resolve(baseuri, schema.$ref);
|
||||
ref[resolvedUri] = ref[resolvedUri] ? ref[resolvedUri]+1 : 0;
|
||||
return;
|
||||
}
|
||||
var ourBase = schema.id ? urilib.resolve(baseuri, schema.id) : baseuri;
|
||||
if (ourBase) {
|
||||
// If there's no fragment, append an empty one
|
||||
if(ourBase.indexOf('#')<0) ourBase += '#';
|
||||
if(found[ourBase]){
|
||||
if(!helpers.deepCompareStrict(found[ourBase], schema)){
|
||||
throw new Error('Schema <'+schema+'> already exists with different definition');
|
||||
}
|
||||
return found[ourBase];
|
||||
}
|
||||
found[ourBase] = schema;
|
||||
// strip trailing fragment
|
||||
if(ourBase[ourBase.length-1]=='#'){
|
||||
found[ourBase.substring(0, ourBase.length-1)] = schema;
|
||||
}
|
||||
}
|
||||
scanArray(ourBase+'/items', (Array.isArray(schema.items)?schema.items:[schema.items]));
|
||||
scanArray(ourBase+'/extends', (Array.isArray(schema.extends)?schema.extends:[schema.extends]));
|
||||
scanSchema(ourBase+'/additionalItems', schema.additionalItems);
|
||||
scanObject(ourBase+'/properties', schema.properties);
|
||||
scanSchema(ourBase+'/additionalProperties', schema.additionalProperties);
|
||||
scanObject(ourBase+'/definitions', schema.definitions);
|
||||
scanObject(ourBase+'/patternProperties', schema.patternProperties);
|
||||
scanObject(ourBase+'/dependencies', schema.dependencies);
|
||||
scanArray(ourBase+'/disallow', schema.disallow);
|
||||
scanArray(ourBase+'/allOf', schema.allOf);
|
||||
scanArray(ourBase+'/anyOf', schema.anyOf);
|
||||
scanArray(ourBase+'/oneOf', schema.oneOf);
|
||||
scanSchema(ourBase+'/not', schema.not);
|
||||
}
|
||||
function scanArray(baseuri, schemas){
|
||||
if(!Array.isArray(schemas)) return;
|
||||
for(var i=0; i<schemas.length; i++){
|
||||
scanSchema(baseuri+'/'+i, schemas[i]);
|
||||
}
|
||||
}
|
||||
function scanObject(baseuri, schemas){
|
||||
if(!schemas || typeof schemas!='object') return;
|
||||
for(var p in schemas){
|
||||
scanSchema(baseuri+'/'+p, schemas[p]);
|
||||
}
|
||||
}
|
||||
|
||||
var found = {};
|
||||
var ref = {};
|
||||
var schemaUri = base;
|
||||
scanSchema(base, schema);
|
||||
return new SchemaScanResult(found, ref);
|
||||
}
|
||||
320
node_modules/jsonschema/lib/validator.js
generated
vendored
Normal file
320
node_modules/jsonschema/lib/validator.js
generated
vendored
Normal file
@@ -0,0 +1,320 @@
|
||||
'use strict';
|
||||
|
||||
var urilib = require('url');
|
||||
|
||||
var attribute = require('./attribute');
|
||||
var helpers = require('./helpers');
|
||||
var scanSchema = require('./scan').scan;
|
||||
var ValidatorResult = helpers.ValidatorResult;
|
||||
var SchemaError = helpers.SchemaError;
|
||||
var SchemaContext = helpers.SchemaContext;
|
||||
//var anonymousBase = 'vnd.jsonschema:///';
|
||||
var anonymousBase = '/';
|
||||
|
||||
/**
|
||||
* Creates a new Validator object
|
||||
* @name Validator
|
||||
* @constructor
|
||||
*/
|
||||
var Validator = function Validator () {
|
||||
// Allow a validator instance to override global custom formats or to have their
|
||||
// own custom formats.
|
||||
this.customFormats = Object.create(Validator.prototype.customFormats);
|
||||
this.schemas = {};
|
||||
this.unresolvedRefs = [];
|
||||
|
||||
// Use Object.create to make this extensible without Validator instances stepping on each other's toes.
|
||||
this.types = Object.create(types);
|
||||
this.attributes = Object.create(attribute.validators);
|
||||
};
|
||||
|
||||
// Allow formats to be registered globally.
|
||||
Validator.prototype.customFormats = {};
|
||||
|
||||
// Hint at the presence of a property
|
||||
Validator.prototype.schemas = null;
|
||||
Validator.prototype.types = null;
|
||||
Validator.prototype.attributes = null;
|
||||
Validator.prototype.unresolvedRefs = null;
|
||||
|
||||
/**
|
||||
* Adds a schema with a certain urn to the Validator instance.
|
||||
* @param schema
|
||||
* @param urn
|
||||
* @return {Object}
|
||||
*/
|
||||
Validator.prototype.addSchema = function addSchema (schema, base) {
|
||||
var self = this;
|
||||
if (!schema) {
|
||||
return null;
|
||||
}
|
||||
var scan = scanSchema(base||anonymousBase, schema);
|
||||
var ourUri = base || schema.id;
|
||||
for(var uri in scan.id){
|
||||
this.schemas[uri] = scan.id[uri];
|
||||
}
|
||||
for(var uri in scan.ref){
|
||||
this.unresolvedRefs.push(uri);
|
||||
}
|
||||
this.unresolvedRefs = this.unresolvedRefs.filter(function(uri){
|
||||
return typeof self.schemas[uri]==='undefined';
|
||||
});
|
||||
return this.schemas[ourUri];
|
||||
};
|
||||
|
||||
Validator.prototype.addSubSchemaArray = function addSubSchemaArray(baseuri, schemas) {
|
||||
if(!Array.isArray(schemas)) return;
|
||||
for(var i=0; i<schemas.length; i++){
|
||||
this.addSubSchema(baseuri, schemas[i]);
|
||||
}
|
||||
};
|
||||
|
||||
Validator.prototype.addSubSchemaObject = function addSubSchemaArray(baseuri, schemas) {
|
||||
if(!schemas || typeof schemas!='object') return;
|
||||
for(var p in schemas){
|
||||
this.addSubSchema(baseuri, schemas[p]);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Sets all the schemas of the Validator instance.
|
||||
* @param schemas
|
||||
*/
|
||||
Validator.prototype.setSchemas = function setSchemas (schemas) {
|
||||
this.schemas = schemas;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the schema of a certain urn
|
||||
* @param urn
|
||||
*/
|
||||
Validator.prototype.getSchema = function getSchema (urn) {
|
||||
return this.schemas[urn];
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates instance against the provided schema
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param [options]
|
||||
* @param [ctx]
|
||||
* @return {Array}
|
||||
*/
|
||||
Validator.prototype.validate = function validate (instance, schema, options, ctx) {
|
||||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
var propertyName = options.propertyName || 'instance';
|
||||
// This will work so long as the function at uri.resolve() will resolve a relative URI to a relative URI
|
||||
var base = urilib.resolve(options.base||anonymousBase, schema.id||'');
|
||||
if(!ctx){
|
||||
ctx = new SchemaContext(schema, options, propertyName, base, Object.create(this.schemas));
|
||||
if (!ctx.schemas[base]) {
|
||||
ctx.schemas[base] = schema;
|
||||
}
|
||||
var found = scanSchema(base, schema);
|
||||
for(var n in found.id){
|
||||
var sch = found.id[n];
|
||||
ctx.schemas[n] = sch;
|
||||
}
|
||||
}
|
||||
if (schema) {
|
||||
var result = this.validateSchema(instance, schema, options, ctx);
|
||||
if (!result) {
|
||||
throw new Error('Result undefined');
|
||||
}
|
||||
return result;
|
||||
}
|
||||
throw new SchemaError('no schema specified', schema);
|
||||
};
|
||||
|
||||
/**
|
||||
* @param Object schema
|
||||
* @return mixed schema uri or false
|
||||
*/
|
||||
function shouldResolve(schema) {
|
||||
var ref = (typeof schema === 'string') ? schema : schema.$ref;
|
||||
if (typeof ref=='string') return ref;
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates an instance against the schema (the actual work horse)
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param options
|
||||
* @param ctx
|
||||
* @private
|
||||
* @return {ValidatorResult}
|
||||
*/
|
||||
Validator.prototype.validateSchema = function validateSchema (instance, schema, options, ctx) {
|
||||
var result = new ValidatorResult(instance, schema, options, ctx);
|
||||
|
||||
// Support for the true/false schemas
|
||||
if(typeof schema==='boolean') {
|
||||
if(schema===true){
|
||||
// `true` is always valid
|
||||
schema = {};
|
||||
}else if(schema===false){
|
||||
// `false` is always invalid
|
||||
schema = {type: []};
|
||||
}
|
||||
}else if(!schema){
|
||||
// This might be a string
|
||||
throw new Error("schema is undefined");
|
||||
}
|
||||
|
||||
if (schema['extends']) {
|
||||
if (Array.isArray(schema['extends'])) {
|
||||
var schemaobj = {schema: schema, ctx: ctx};
|
||||
schema['extends'].forEach(this.schemaTraverser.bind(this, schemaobj));
|
||||
schema = schemaobj.schema;
|
||||
schemaobj.schema = null;
|
||||
schemaobj.ctx = null;
|
||||
schemaobj = null;
|
||||
} else {
|
||||
schema = helpers.deepMerge(schema, this.superResolve(schema['extends'], ctx));
|
||||
}
|
||||
}
|
||||
|
||||
// If passed a string argument, load that schema URI
|
||||
var switchSchema;
|
||||
if (switchSchema = shouldResolve(schema)) {
|
||||
var resolved = this.resolve(schema, switchSchema, ctx);
|
||||
var subctx = new SchemaContext(resolved.subschema, options, ctx.propertyPath, resolved.switchSchema, ctx.schemas);
|
||||
return this.validateSchema(instance, resolved.subschema, options, subctx);
|
||||
}
|
||||
|
||||
var skipAttributes = options && options.skipAttributes || [];
|
||||
// Validate each schema attribute against the instance
|
||||
for (var key in schema) {
|
||||
if (!attribute.ignoreProperties[key] && skipAttributes.indexOf(key) < 0) {
|
||||
var validatorErr = null;
|
||||
var validator = this.attributes[key];
|
||||
if (validator) {
|
||||
validatorErr = validator.call(this, instance, schema, options, ctx);
|
||||
} else if (options.allowUnknownAttributes === false) {
|
||||
// This represents an error with the schema itself, not an invalid instance
|
||||
throw new SchemaError("Unsupported attribute: " + key, schema);
|
||||
}
|
||||
if (validatorErr) {
|
||||
result.importErrors(validatorErr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof options.rewrite == 'function') {
|
||||
var value = options.rewrite.call(this, instance, schema, options, ctx);
|
||||
result.instance = value;
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* @private
|
||||
* @param Object schema
|
||||
* @param SchemaContext ctx
|
||||
* @returns Object schema or resolved schema
|
||||
*/
|
||||
Validator.prototype.schemaTraverser = function schemaTraverser (schemaobj, s) {
|
||||
schemaobj.schema = helpers.deepMerge(schemaobj.schema, this.superResolve(s, schemaobj.ctx));
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
* @param Object schema
|
||||
* @param SchemaContext ctx
|
||||
* @returns Object schema or resolved schema
|
||||
*/
|
||||
Validator.prototype.superResolve = function superResolve (schema, ctx) {
|
||||
var ref;
|
||||
if(ref = shouldResolve(schema)) {
|
||||
return this.resolve(schema, ref, ctx).subschema;
|
||||
}
|
||||
return schema;
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
* @param Object schema
|
||||
* @param Object switchSchema
|
||||
* @param SchemaContext ctx
|
||||
* @return Object resolved schemas {subschema:String, switchSchema: String}
|
||||
* @throws SchemaError
|
||||
*/
|
||||
Validator.prototype.resolve = function resolve (schema, switchSchema, ctx) {
|
||||
switchSchema = ctx.resolve(switchSchema);
|
||||
// First see if the schema exists under the provided URI
|
||||
if (ctx.schemas[switchSchema]) {
|
||||
return {subschema: ctx.schemas[switchSchema], switchSchema: switchSchema};
|
||||
}
|
||||
// Else try walking the property pointer
|
||||
var parsed = urilib.parse(switchSchema);
|
||||
var fragment = parsed && parsed.hash;
|
||||
var document = fragment && fragment.length && switchSchema.substr(0, switchSchema.length - fragment.length);
|
||||
if (!document || !ctx.schemas[document]) {
|
||||
throw new SchemaError("no such schema <" + switchSchema + ">", schema);
|
||||
}
|
||||
var subschema = helpers.objectGetPath(ctx.schemas[document], fragment.substr(1));
|
||||
if(subschema===undefined){
|
||||
throw new SchemaError("no such schema " + fragment + " located in <" + document + ">", schema);
|
||||
}
|
||||
return {subschema: subschema, switchSchema: switchSchema};
|
||||
};
|
||||
|
||||
/**
|
||||
* Tests whether the instance if of a certain type.
|
||||
* @private
|
||||
* @param instance
|
||||
* @param schema
|
||||
* @param options
|
||||
* @param ctx
|
||||
* @param type
|
||||
* @return {boolean}
|
||||
*/
|
||||
Validator.prototype.testType = function validateType (instance, schema, options, ctx, type) {
|
||||
if (typeof this.types[type] == 'function') {
|
||||
return this.types[type].call(this, instance);
|
||||
}
|
||||
if (type && typeof type == 'object') {
|
||||
var res = this.validateSchema(instance, type, options, ctx);
|
||||
return res === undefined || !(res && res.errors.length);
|
||||
}
|
||||
// Undefined or properties not on the list are acceptable, same as not being defined
|
||||
return true;
|
||||
};
|
||||
|
||||
var types = Validator.prototype.types = {};
|
||||
types.string = function testString (instance) {
|
||||
return typeof instance == 'string';
|
||||
};
|
||||
types.number = function testNumber (instance) {
|
||||
// isFinite returns false for NaN, Infinity, and -Infinity
|
||||
return typeof instance == 'number' && isFinite(instance);
|
||||
};
|
||||
types.integer = function testInteger (instance) {
|
||||
return (typeof instance == 'number') && instance % 1 === 0;
|
||||
};
|
||||
types.boolean = function testBoolean (instance) {
|
||||
return typeof instance == 'boolean';
|
||||
};
|
||||
types.array = function testArray (instance) {
|
||||
return Array.isArray(instance);
|
||||
};
|
||||
types['null'] = function testNull (instance) {
|
||||
return instance === null;
|
||||
};
|
||||
types.date = function testDate (instance) {
|
||||
return instance instanceof Date;
|
||||
};
|
||||
types.any = function testAny (instance) {
|
||||
return true;
|
||||
};
|
||||
types.object = function testObject (instance) {
|
||||
// TODO: fix this - see #15
|
||||
return instance && (typeof instance === 'object') && !(Array.isArray(instance)) && !(instance instanceof Date);
|
||||
};
|
||||
|
||||
module.exports = Validator;
|
||||
38
node_modules/jsonschema/package.json
generated
vendored
Normal file
38
node_modules/jsonschema/package.json
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"author": "Tom de Grunt <tom@degrunt.nl>",
|
||||
"name": "jsonschema",
|
||||
"version": "1.2.6",
|
||||
"license": "MIT",
|
||||
"dependencies": {},
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Austin Wright"
|
||||
}
|
||||
],
|
||||
"main": "./lib",
|
||||
"typings": "./lib/index.d.ts",
|
||||
"devDependencies": {
|
||||
"json-metaschema": "^1.2.0",
|
||||
"mocha": "~3",
|
||||
"chai": "~1.5.0"
|
||||
},
|
||||
"optionalDependencies": {},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
},
|
||||
"keywords": [
|
||||
"json",
|
||||
"schema",
|
||||
"jsonschema",
|
||||
"validator",
|
||||
"validation"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/tdegrunt/jsonschema.git"
|
||||
},
|
||||
"description": "A fast and easy to use JSON Schema validator",
|
||||
"scripts": {
|
||||
"test": "./node_modules/.bin/mocha -R spec"
|
||||
}
|
||||
}
|
||||
5
package-lock.json
generated
5
package-lock.json
generated
@@ -1658,6 +1658,11 @@
|
||||
"integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==",
|
||||
"dev": true
|
||||
},
|
||||
"jsonschema": {
|
||||
"version": "1.2.6",
|
||||
"resolved": "https://registry.npmjs.org/jsonschema/-/jsonschema-1.2.6.tgz",
|
||||
"integrity": "sha512-SqhURKZG07JyKKeo/ir24QnS4/BV7a6gQy93bUSe4lUdNp0QNpIz2c9elWJQ9dpc5cQYY6cvCzgRwy0MQCLyqA=="
|
||||
},
|
||||
"keyv": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz",
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"test": "ava src/**",
|
||||
"test": "ava src/** --serial",
|
||||
"lint": "tslint -p . -c tslint.json 'src/**/*.ts'",
|
||||
"removeNPMAbsolutePaths": "removeNPMAbsolutePaths . --force"
|
||||
},
|
||||
@@ -28,6 +28,7 @@
|
||||
"file-url": "^3.0.0",
|
||||
"fs": "0.0.1-security",
|
||||
"js-yaml": "^3.13.1",
|
||||
"jsonschema": "1.2.6",
|
||||
"long": "^4.0.0",
|
||||
"md5": "^2.2.1",
|
||||
"path": "^0.12.7",
|
||||
|
||||
@@ -12,7 +12,7 @@ export function includeAndExcludeAnalysisPaths(config: configUtils.Config, langu
|
||||
}
|
||||
|
||||
function isInterpretedLanguage(language): boolean {
|
||||
return language === 'javascript' && language === 'python';
|
||||
return language === 'javascript' || language === 'python';
|
||||
}
|
||||
|
||||
// Index include/exclude only work in javascript and python
|
||||
|
||||
223
src/config-utils.test.ts
Normal file
223
src/config-utils.test.ts
Normal file
@@ -0,0 +1,223 @@
|
||||
import test from 'ava';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as configUtils from './config-utils';
|
||||
import * as util from './util';
|
||||
|
||||
function setInput(name: string, value: string | undefined) {
|
||||
// Transformation copied from
|
||||
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
|
||||
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
|
||||
if (value !== undefined) {
|
||||
process.env[envVar] = value;
|
||||
} else {
|
||||
delete process.env[envVar];
|
||||
}
|
||||
}
|
||||
|
||||
test("load empty config", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
setInput('config-file', undefined);
|
||||
|
||||
const config = await configUtils.loadConfig();
|
||||
|
||||
t.deepEqual(config, new configUtils.Config());
|
||||
});
|
||||
});
|
||||
|
||||
test("loading config saves config", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
const configFile = configUtils.getConfigFile();
|
||||
// Sanity check the saved config file does not already exist
|
||||
t.false(fs.existsSync(configFile));
|
||||
|
||||
const config = await configUtils.loadConfig();
|
||||
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configFile));
|
||||
|
||||
// And the contents should parse correctly to the config that was returned
|
||||
t.deepEqual(fs.readFileSync(configFile, 'utf8'), JSON.stringify(config));
|
||||
});
|
||||
});
|
||||
|
||||
test("load input outside of workspace", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
setInput('config-file', '../input');
|
||||
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
} catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("load non-existent input", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
t.false(fs.existsSync(path.join(tmpDir, 'input')));
|
||||
setInput('config-file', 'input');
|
||||
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
} catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("load non-empty input", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
// Just create a generic config object with non-default values for all fields
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- uses: ./
|
||||
- uses: ./foo
|
||||
- uses: foo/bar@dev
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = new configUtils.Config();
|
||||
expectedConfig.name = 'my config';
|
||||
expectedConfig.disableDefaultQueries = true;
|
||||
expectedConfig.additionalQueries.push(tmpDir);
|
||||
expectedConfig.additionalQueries.push(path.join(tmpDir, 'foo'));
|
||||
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
|
||||
expectedConfig.pathsIgnore = ['a', 'b'];
|
||||
expectedConfig.paths = ['c/d'];
|
||||
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
|
||||
const actualConfig = await configUtils.loadConfig();
|
||||
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
|
||||
function doInvalidInputTest(
|
||||
testName: string,
|
||||
inputFileContents: string,
|
||||
expectedErrorMessageGenerator: (configFile: string) => string) {
|
||||
|
||||
test("load invalid input - " + testName, async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
const inputFile = path.join(tmpDir, 'input');
|
||||
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
} catch (err) {
|
||||
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
doInvalidInputTest(
|
||||
'name invalid type',
|
||||
`
|
||||
name:
|
||||
- foo: bar`,
|
||||
configUtils.getNameInvalid);
|
||||
|
||||
doInvalidInputTest(
|
||||
'disable-default-queries invalid type',
|
||||
`disable-default-queries: 42`,
|
||||
configUtils.getDisableDefaultQueriesInvalid);
|
||||
|
||||
doInvalidInputTest(
|
||||
'queries invalid type',
|
||||
`queries: foo`,
|
||||
configUtils.getQueriesInvalid);
|
||||
|
||||
doInvalidInputTest(
|
||||
'paths-ignore invalid type',
|
||||
`paths-ignore: bar`,
|
||||
configUtils.getPathsIgnoreInvalid);
|
||||
|
||||
doInvalidInputTest(
|
||||
'paths invalid type',
|
||||
`paths: 17`,
|
||||
configUtils.getPathsInvalid);
|
||||
|
||||
doInvalidInputTest(
|
||||
'queries uses invalid type',
|
||||
`
|
||||
queries:
|
||||
- uses:
|
||||
- hello: world`,
|
||||
configUtils.getQueryUsesInvalid);
|
||||
|
||||
function doInvalidQueryUsesTest(
|
||||
input: string,
|
||||
expectedErrorMessageGenerator: (configFile: string) => string) {
|
||||
|
||||
// Invalid contents of a "queries.uses" field.
|
||||
// Should fail with the expected error message
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
queries:
|
||||
- name: foo
|
||||
uses: ` + input;
|
||||
|
||||
doInvalidInputTest(
|
||||
"queries uses \"" + input + "\"",
|
||||
inputFileContents,
|
||||
expectedErrorMessageGenerator);
|
||||
}
|
||||
|
||||
// Various "uses" fields, and the errors they should produce
|
||||
doInvalidQueryUsesTest(
|
||||
"''",
|
||||
c => configUtils.getQueryUsesInvalid(c, undefined));
|
||||
doInvalidQueryUsesTest(
|
||||
"foo/bar",
|
||||
c => configUtils.getQueryUsesInvalid(c, "foo/bar"));
|
||||
doInvalidQueryUsesTest(
|
||||
"foo/bar@v1@v2",
|
||||
c => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
|
||||
doInvalidQueryUsesTest(
|
||||
"foo@master",
|
||||
c => configUtils.getQueryUsesInvalid(c, "foo@master"));
|
||||
doInvalidQueryUsesTest(
|
||||
"https://github.com/foo/bar@master",
|
||||
c => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
|
||||
doInvalidQueryUsesTest(
|
||||
"./foo",
|
||||
c => configUtils.getLocalPathDoesNotExist(c, "foo"));
|
||||
doInvalidQueryUsesTest(
|
||||
"./..",
|
||||
c => configUtils.getLocalPathOutsideOfRepository(c, ".."));
|
||||
@@ -4,6 +4,15 @@ import * as fs from 'fs';
|
||||
import * as yaml from 'js-yaml';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as util from './util';
|
||||
|
||||
const NAME_PROPERTY = 'name';
|
||||
const DISPLAY_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
|
||||
const QUERIES_PROPERTY = 'queries';
|
||||
const QUERIES_USES_PROPERTY = 'uses';
|
||||
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
|
||||
const PATHS_PROPERTY = 'paths';
|
||||
|
||||
export class ExternalQuery {
|
||||
public repository: string;
|
||||
public ref: string;
|
||||
@@ -15,30 +24,64 @@ export class ExternalQuery {
|
||||
}
|
||||
}
|
||||
|
||||
// The set of acceptable values for built-in suites from the codeql bundle
|
||||
const builtinSuites = ['security-extended', 'security-and-quality'] as const;
|
||||
// Derive the union type from the array values
|
||||
type BuiltInSuite = typeof builtinSuites[number];
|
||||
|
||||
export class Config {
|
||||
public name = "";
|
||||
public disableDefaultQueries = false;
|
||||
public additionalQueries: string[] = [];
|
||||
public externalQueries: ExternalQuery[] = [];
|
||||
public additionalSuites: BuiltInSuite[] = [];
|
||||
public pathsIgnore: string[] = [];
|
||||
public paths: string[] = [];
|
||||
|
||||
public addQuery(queryUses: string) {
|
||||
public addQuery(configFile: string, queryUses: string) {
|
||||
// The logic for parsing the string is based on what actions does for
|
||||
// parsing the 'uses' actions in the workflow file
|
||||
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw '"uses" value for queries cannot be blank';
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
this.additionalQueries.push(queryUses.slice(2));
|
||||
const localQueryPath = queryUses.slice(2);
|
||||
// Resolve the local path against the workspace so that when this is
|
||||
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
||||
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
const absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(absoluteQueryPath)) {
|
||||
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||
}
|
||||
|
||||
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||
if (!(fs.realpathSync(absoluteQueryPath) + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||
}
|
||||
|
||||
this.additionalQueries.push(absoluteQueryPath);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
|
||||
const suite = builtinSuites.find((suite) => suite === queryUses);
|
||||
if (suite) {
|
||||
this.additionalSuites.push(suite);
|
||||
return;
|
||||
} else {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
}
|
||||
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
|
||||
const ref = tok[1];
|
||||
@@ -46,12 +89,16 @@ export class Config {
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length < 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
if (tok.length > 3) {
|
||||
tok = [tok[0], tok[1], tok.slice(2).join('/')];
|
||||
}
|
||||
|
||||
if (tok.length < 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
// Check none of the parts of the repository name are empty
|
||||
if (tok[0].trim() === '' || tok[1].trim() === '') {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
|
||||
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
|
||||
@@ -62,10 +109,63 @@ export class Config {
|
||||
}
|
||||
}
|
||||
|
||||
const configFolder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
export function getNameInvalid(configFile: string): string {
|
||||
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
|
||||
}
|
||||
|
||||
export function getDisableDefaultQueriesInvalid(configFile: string): string {
|
||||
return getConfigFilePropertyError(configFile, DISPLAY_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
|
||||
}
|
||||
|
||||
export function getQueriesInvalid(configFile: string): string {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
|
||||
}
|
||||
|
||||
export function getQueryUsesInvalid(configFile: string, queryUses?: string): string {
|
||||
return getConfigFilePropertyError(
|
||||
configFile,
|
||||
QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY,
|
||||
'must be a built-in suite (' + builtinSuites.join(' or ') +
|
||||
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
|
||||
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
|
||||
}
|
||||
|
||||
export function getPathsIgnoreInvalid(configFile: string): string {
|
||||
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
|
||||
export function getPathsInvalid(configFile: string): string {
|
||||
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
|
||||
export function getLocalPathOutsideOfRepository(configFile: string, localPath: string): string {
|
||||
return getConfigFilePropertyError(
|
||||
configFile,
|
||||
QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY,
|
||||
'is invalid as the local path "' + localPath + '" is outside of the repository');
|
||||
}
|
||||
|
||||
export function getLocalPathDoesNotExist(configFile: string, localPath: string): string {
|
||||
return getConfigFilePropertyError(
|
||||
configFile,
|
||||
QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY,
|
||||
'is invalid as the local path "' + localPath + '" does not exist in the repository');
|
||||
}
|
||||
|
||||
export function getConfigFileOutsideWorkspaceErrorMessage(configFile: string): string {
|
||||
return 'The configuration file "' + configFile + '" is outside of the workspace';
|
||||
}
|
||||
|
||||
export function getConfigFileDoesNotExistErrorMessage(configFile: string): string {
|
||||
return 'The configuration file "' + configFile + '" does not exist';
|
||||
}
|
||||
|
||||
function getConfigFilePropertyError(configFile: string, property: string, error: string): string {
|
||||
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
|
||||
}
|
||||
|
||||
function initConfig(): Config {
|
||||
const configFile = core.getInput('config-file');
|
||||
let configFile = core.getInput('config-file');
|
||||
|
||||
const config = new Config();
|
||||
|
||||
@@ -75,60 +175,96 @@ function initConfig(): Config {
|
||||
return config;
|
||||
}
|
||||
|
||||
try {
|
||||
// Treat the config file as relative to the workspace
|
||||
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
configFile = path.resolve(workspacePath, configFile);
|
||||
|
||||
// Error if the config file is now outside of the workspace
|
||||
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
}
|
||||
|
||||
// Error if the file does not exist
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
||||
}
|
||||
|
||||
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
|
||||
if (parsedYAML.name && typeof parsedYAML.name === "string") {
|
||||
config.name = parsedYAML.name;
|
||||
if (NAME_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
if (parsedYAML[NAME_PROPERTY].length === 0) {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
config.name = parsedYAML[NAME_PROPERTY];
|
||||
}
|
||||
|
||||
if (parsedYAML['disable-default-queries'] && typeof parsedYAML['disable-default-queries'] === "boolean") {
|
||||
config.disableDefaultQueries = parsedYAML['disable-default-queries'];
|
||||
if (DISPLAY_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
||||
throw new Error(getDisableDefaultQueriesInvalid(configFile));
|
||||
}
|
||||
config.disableDefaultQueries = parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY];
|
||||
}
|
||||
|
||||
const queries = parsedYAML.queries;
|
||||
if (queries && queries instanceof Array) {
|
||||
queries.forEach(query => {
|
||||
if (query.uses && typeof query.uses === "string") {
|
||||
config.addQuery(query.uses);
|
||||
if (QUERIES_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getQueriesInvalid(configFile));
|
||||
}
|
||||
parsedYAML[QUERIES_PROPERTY].forEach(query => {
|
||||
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
config.addQuery(configFile, query[QUERIES_USES_PROPERTY]);
|
||||
});
|
||||
}
|
||||
|
||||
const pathsIgnore = parsedYAML['paths-ignore'];
|
||||
if (pathsIgnore && pathsIgnore instanceof Array) {
|
||||
pathsIgnore.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
config.pathsIgnore.push(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const paths = parsedYAML.paths;
|
||||
if (paths && paths instanceof Array) {
|
||||
paths.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
if (PATHS_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
config.paths.push(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
core.setFailed(err);
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
function getConfigFolder(): string {
|
||||
return util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
}
|
||||
|
||||
export function getConfigFile(): string {
|
||||
return path.join(getConfigFolder(), 'config');
|
||||
}
|
||||
|
||||
async function saveConfig(config: Config) {
|
||||
const configString = JSON.stringify(config);
|
||||
await io.mkdirP(configFolder);
|
||||
fs.writeFileSync(path.join(configFolder, 'config'), configString, 'utf8');
|
||||
await io.mkdirP(getConfigFolder());
|
||||
fs.writeFileSync(getConfigFile(), configString, 'utf8');
|
||||
core.debug('Saved config:');
|
||||
core.debug(configString);
|
||||
}
|
||||
|
||||
export async function loadConfig(): Promise<Config> {
|
||||
const configFile = path.join(configFolder, 'config');
|
||||
const configFile = getConfigFile();
|
||||
if (fs.existsSync(configFile)) {
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
|
||||
@@ -13,7 +13,7 @@ test("checkoutExternalQueries", async t => {
|
||||
];
|
||||
|
||||
await util.withTmpDir(async tmpDir => {
|
||||
process.env["RUNNER_WORKSPACE"] = tmpDir;
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
|
||||
|
||||
@@ -7,7 +7,7 @@ import * as configUtils from './config-utils';
|
||||
import * as util from './util';
|
||||
|
||||
export async function checkoutExternalQueries(config: configUtils.Config) {
|
||||
const folder = util.getRequiredEnvParam('RUNNER_WORKSPACE');
|
||||
const folder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
|
||||
for (const externalQuery of config.externalQueries) {
|
||||
core.info('Checking out ' + externalQuery.repository);
|
||||
|
||||
@@ -2,6 +2,7 @@ import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as io from '@actions/io';
|
||||
import * as fs from 'fs';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as configUtils from './config-utils';
|
||||
@@ -10,6 +11,44 @@ import * as sharedEnv from './shared-environment';
|
||||
import * as upload_lib from './upload-lib';
|
||||
import * as util from './util';
|
||||
|
||||
/**
|
||||
* A list of queries from https://github.com/github/codeql that
|
||||
* we don't want to run. Disabling them here is a quicker alternative to
|
||||
* disabling them in the code scanning query suites. Queries should also
|
||||
* be disabled in the suites, and removed from this list here once the
|
||||
* bundle is updated to make those suite changes live.
|
||||
*
|
||||
* Format is a map from language to an array of path suffixes of .ql files.
|
||||
*/
|
||||
const DISABLED_BUILTIN_QUERIES: {[language: string]: string[]} = {
|
||||
'csharp': [
|
||||
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
|
||||
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
|
||||
]
|
||||
};
|
||||
|
||||
function queryIsDisabled(language, query): boolean {
|
||||
return (DISABLED_BUILTIN_QUERIES[language] || [])
|
||||
.some(disabledQuery => query.endsWith(disabledQuery));
|
||||
}
|
||||
|
||||
function getMemoryFlag(): string {
|
||||
let memoryToUseMegaBytes: number;
|
||||
const memoryToUseString = core.getInput("ram");
|
||||
if (memoryToUseString) {
|
||||
memoryToUseMegaBytes = Number(memoryToUseString);
|
||||
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
|
||||
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
|
||||
}
|
||||
} else {
|
||||
const totalMemoryBytes = os.totalmem();
|
||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||
const systemReservedMemoryMegaBytes = 256;
|
||||
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
|
||||
}
|
||||
return "--ram=" + Math.floor(memoryToUseMegaBytes);
|
||||
}
|
||||
|
||||
async function createdDBForScannedLanguages(codeqlCmd: string, databaseFolder: string) {
|
||||
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
||||
if (scannedLanguages) {
|
||||
@@ -51,15 +90,26 @@ async function finalizeDatabaseCreation(codeqlCmd: string, databaseFolder: strin
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveQueryLanguages(codeqlCmd: string, config: configUtils.Config): Promise<Map<string, string[]>> {
|
||||
let res = new Map();
|
||||
interface ResolveQueriesOutput {
|
||||
byLanguage: {
|
||||
[language: string]: {
|
||||
[queryPath: string]: {}
|
||||
}
|
||||
};
|
||||
noDeclaredLanguage: {
|
||||
[queryPath: string]: {}
|
||||
};
|
||||
multipleDeclaredLanguages: {
|
||||
[queryPath: string]: {}
|
||||
};
|
||||
}
|
||||
|
||||
if (config.additionalQueries.length !== 0) {
|
||||
let resolveQueriesOutput = '';
|
||||
async function runResolveQueries(codeqlCmd: string, queries: string[]): Promise<ResolveQueriesOutput> {
|
||||
let output = '';
|
||||
const options = {
|
||||
listeners: {
|
||||
stdout: (data: Buffer) => {
|
||||
resolveQueriesOutput += data.toString();
|
||||
output += data.toString();
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -68,15 +118,46 @@ async function resolveQueryLanguages(codeqlCmd: string, config: configUtils.Conf
|
||||
codeqlCmd, [
|
||||
'resolve',
|
||||
'queries',
|
||||
...config.additionalQueries,
|
||||
...queries,
|
||||
'--format=bylanguage'
|
||||
],
|
||||
options);
|
||||
|
||||
const resolveQueriesOutputObject = JSON.parse(resolveQueriesOutput);
|
||||
return JSON.parse(output);
|
||||
}
|
||||
|
||||
async function resolveQueryLanguages(codeqlCmd: string, config: configUtils.Config): Promise<Map<string, string[]>> {
|
||||
let res = new Map();
|
||||
|
||||
if (!config.disableDefaultQueries || config.additionalSuites.length !== 0) {
|
||||
const suites: string[] = [];
|
||||
for (const language of await util.getLanguages()) {
|
||||
if (!config.disableDefaultQueries) {
|
||||
suites.push(language + '-code-scanning.qls');
|
||||
}
|
||||
for (const additionalSuite of config.additionalSuites) {
|
||||
suites.push(language + '-' + additionalSuite + '.qls');
|
||||
}
|
||||
}
|
||||
|
||||
const resolveQueriesOutputObject = await runResolveQueries(codeqlCmd, suites);
|
||||
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
res[language] = Object.keys(<any>queries);
|
||||
if (res[language] === undefined) {
|
||||
res[language] = [];
|
||||
}
|
||||
res[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
|
||||
}
|
||||
}
|
||||
|
||||
if (config.additionalQueries.length !== 0) {
|
||||
const resolveQueriesOutputObject = await runResolveQueries(codeqlCmd, config.additionalQueries);
|
||||
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
if (res[language] === undefined) {
|
||||
res[language] = [];
|
||||
}
|
||||
res[language].push(...Object.keys(queries));
|
||||
}
|
||||
|
||||
const noDeclaredLanguage = resolveQueriesOutputObject.noDeclaredLanguage;
|
||||
@@ -102,22 +183,29 @@ async function runQueries(codeqlCmd: string, databaseFolder: string, sarifFolder
|
||||
for (let database of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + database);
|
||||
|
||||
const queries: string[] = [];
|
||||
if (!config.disableDefaultQueries) {
|
||||
queries.push(database + '-code-scanning.qls');
|
||||
const queries = queriesPerLanguage[database] || [];
|
||||
if (queries.length === 0) {
|
||||
throw new Error('Unable to analyse ' + database + ' as no queries were selected for this language');
|
||||
}
|
||||
queries.push(...(queriesPerLanguage[database] || []));
|
||||
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = path.join(databaseFolder, database + '-queries.qls');
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
core.debug('Query suite file for ' + database + '...\n' + querySuiteContents);
|
||||
|
||||
const sarifFile = path.join(sarifFolder, database + '.sarif');
|
||||
|
||||
await exec.exec(codeqlCmd, [
|
||||
'database',
|
||||
'analyze',
|
||||
getMemoryFlag(),
|
||||
path.join(databaseFolder, database),
|
||||
'--format=sarif-latest',
|
||||
'--output=' + sarifFile,
|
||||
'--no-sarif-add-snippets',
|
||||
...queries
|
||||
querySuite
|
||||
]);
|
||||
|
||||
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
|
||||
@@ -151,7 +239,7 @@ async function run() {
|
||||
|
||||
if ('true' === core.getInput('upload')) {
|
||||
if (!await upload_lib.upload(sarifFolder)) {
|
||||
await util.reportActionFailed('failed', 'upload');
|
||||
await util.reportActionFailed('finish', 'upload');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
3350
src/sarif_v2.1.0_schema.json
Normal file
3350
src/sarif_v2.1.0_schema.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -108,13 +108,14 @@ function concatTracerConfigs(configs: { [lang: string]: TracerConfig }): TracerC
|
||||
totalLines.push(...lines.slice(2));
|
||||
}
|
||||
|
||||
const newLogFilePath = path.resolve(util.workspaceFolder(), 'compound-build-tracer.log');
|
||||
const spec = path.resolve(util.workspaceFolder(), 'compound-spec');
|
||||
const tempFolder = path.resolve(util.workspaceFolder(), 'compound-temp');
|
||||
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
|
||||
const spec = path.resolve(tempFolder, 'compound-spec');
|
||||
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
|
||||
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
|
||||
|
||||
if (copyExecutables) {
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = tempFolder;
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
|
||||
envSize += 1;
|
||||
}
|
||||
|
||||
@@ -181,7 +182,7 @@ async function run() {
|
||||
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
||||
core.exportVariable('CODEQL_RAM', codeqlRam);
|
||||
|
||||
const databaseFolder = path.resolve(util.workspaceFolder(), 'codeql_databases');
|
||||
const databaseFolder = path.resolve(util.getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
|
||||
await io.mkdirP(databaseFolder);
|
||||
|
||||
let tracedLanguages: { [key: string]: TracerConfig } = {};
|
||||
@@ -238,8 +239,8 @@ async function run() {
|
||||
await util.reportActionFailed('init', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
await util.reportActionSucceeded('init');
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
}
|
||||
|
||||
run().catch(e => {
|
||||
|
||||
17
src/testdata/invalid-sarif.sarif
vendored
Normal file
17
src/testdata/invalid-sarif.sarif
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
|
||||
"version": "2.1.0",
|
||||
"runs": [
|
||||
{
|
||||
"tool": {
|
||||
"driver": {
|
||||
"name": "LGTM.com",
|
||||
"organization": "Semmle",
|
||||
"version": "1.24.0-SNAPSHOT",
|
||||
"rules": []
|
||||
}
|
||||
},
|
||||
"results": 42
|
||||
}
|
||||
]
|
||||
}
|
||||
239
src/testdata/valid-sarif.sarif
vendored
Normal file
239
src/testdata/valid-sarif.sarif
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
{
|
||||
"$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
|
||||
"version": "2.1.0",
|
||||
"runs": [{
|
||||
"tool": {
|
||||
"driver": {
|
||||
"name": "LGTM.com",
|
||||
"organization": "Semmle",
|
||||
"version": "1.24.0-SNAPSHOT",
|
||||
"rules": [{
|
||||
"id": "js/unused-local-variable",
|
||||
"name": "js/unused-local-variable",
|
||||
"shortDescription": {
|
||||
"text": "Unused variable, import, function or class"
|
||||
},
|
||||
"fullDescription": {
|
||||
"text": "Unused variables, imports, functions or classes may be a symptom of a bug and should be examined carefully."
|
||||
},
|
||||
"defaultConfiguration": {
|
||||
"level": "note"
|
||||
},
|
||||
"properties": {
|
||||
"tags": ["maintainability"],
|
||||
"kind": "problem",
|
||||
"precision": "very-high",
|
||||
"name": "Unused variable, import, function or class",
|
||||
"description": "Unused variables, imports, functions or classes may be a symptom of a bug\n and should be examined carefully.",
|
||||
"id": "js/unused-local-variable",
|
||||
"problem.severity": "recommendation"
|
||||
}
|
||||
}]
|
||||
}
|
||||
},
|
||||
"results": [{
|
||||
"ruleId": "js/unused-local-variable",
|
||||
"ruleIndex": 0,
|
||||
"message": {
|
||||
"text": "Unused variable foo."
|
||||
},
|
||||
"locations": [{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "main.js",
|
||||
"uriBaseId": "%SRCROOT%",
|
||||
"index": 0
|
||||
},
|
||||
"region": {
|
||||
"startLine": 2,
|
||||
"startColumn": 7,
|
||||
"endColumn": 10
|
||||
}
|
||||
}
|
||||
}],
|
||||
"partialFingerprints": {
|
||||
"primaryLocationLineHash": "39fa2ee980eb94b0:1",
|
||||
"primaryLocationStartColumnFingerprint": "4"
|
||||
}
|
||||
}],
|
||||
"columnKind": "utf16CodeUnits",
|
||||
"properties": {
|
||||
"semmle.formatSpecifier": "2.1.0",
|
||||
"semmle.sourceLanguage": "java"
|
||||
}
|
||||
},
|
||||
{
|
||||
"tool" : {
|
||||
"driver" : {
|
||||
"name" : "CodeQL command-line toolchain",
|
||||
"organization" : "GitHub",
|
||||
"semanticVersion" : "2.0.0",
|
||||
"rules" : [ {
|
||||
"id" : "js/unused-local-variable",
|
||||
"name" : "js/unused-local-variable",
|
||||
"shortDescription" : {
|
||||
"text" : "Unused variable, import, function or class"
|
||||
},
|
||||
"fullDescription" : {
|
||||
"text" : "Unused variables, imports, functions or classes may be a symptom of a bug and should be examined carefully."
|
||||
},
|
||||
"defaultConfiguration" : {
|
||||
"level": "note"
|
||||
},
|
||||
"properties" : {
|
||||
"tags" : [ "maintainability" ],
|
||||
"kind" : "problem",
|
||||
"precision" : "very-high",
|
||||
"name" : "Unused variable, import, function or class",
|
||||
"description" : "Unused variables, imports, functions or classes may be a symptom of a bug\n and should be examined carefully.",
|
||||
"id" : "js/unused-local-variable",
|
||||
"problem.severity" : "recommendation"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "js/inconsistent-use-of-new",
|
||||
"name": "js/inconsistent-use-of-new",
|
||||
"shortDescription": {
|
||||
"text": "Inconsistent use of 'new'"
|
||||
},
|
||||
"fullDescription": {
|
||||
"text": "If a function is intended to be a constructor, it should always be invoked with 'new'. Otherwise, it should always be invoked as a normal function, that is, without 'new'."
|
||||
},
|
||||
"defaultConfiguration": {
|
||||
"level": "note"
|
||||
},
|
||||
"properties": {
|
||||
"tags": [
|
||||
"reliability",
|
||||
"correctness",
|
||||
"language-features"
|
||||
],
|
||||
"kind": "problem",
|
||||
"precision": "very-high",
|
||||
"problem.severity": "warning"
|
||||
}
|
||||
} ]
|
||||
}
|
||||
},
|
||||
"artifacts" : [ {
|
||||
"location" : {
|
||||
"uri" : "main.js",
|
||||
"uriBaseId" : "%SRCROOT%",
|
||||
"index" : 0
|
||||
}
|
||||
},
|
||||
{
|
||||
"location": {
|
||||
"uri": "src/promiseUtils.js",
|
||||
"uriBaseId": "%SRCROOT%",
|
||||
"index": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"location": {
|
||||
"uri": "src/LiveQueryClient.js",
|
||||
"uriBaseId": "%SRCROOT%",
|
||||
"index": 2
|
||||
}
|
||||
},
|
||||
{
|
||||
"location": {
|
||||
"uri": "src/ParseObject.js",
|
||||
"uriBaseId": "%SRCROOT%",
|
||||
"index": 3
|
||||
}
|
||||
} ],
|
||||
"results" : [ {
|
||||
"ruleId" : "js/unused-local-variable",
|
||||
"ruleIndex" : 0,
|
||||
"message" : {
|
||||
"text" : "Unused variable foo."
|
||||
},
|
||||
"locations" : [ {
|
||||
"physicalLocation" : {
|
||||
"artifactLocation" : {
|
||||
"uri" : "main.js",
|
||||
"uriBaseId" : "%SRCROOT%",
|
||||
"index" : 0
|
||||
},
|
||||
"region" : {
|
||||
"startLine" : 2,
|
||||
"startColumn" : 7,
|
||||
"endColumn" : 10
|
||||
}
|
||||
}
|
||||
} ],
|
||||
"partialFingerprints" : {
|
||||
"primaryLocationLineHash" : "39fa2ee980eb94b0:1",
|
||||
"primaryLocationStartColumnFingerprint" : "4"
|
||||
}
|
||||
},
|
||||
{
|
||||
"ruleId": "js/inconsistent-use-of-new",
|
||||
"ruleIndex": 1,
|
||||
"message": {
|
||||
"text": "Function resolvingPromise is sometimes invoked as a constructor (for example [here](1)), and sometimes as a normal function (for example [here](2))."
|
||||
},
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "src/promiseUtils.js",
|
||||
"uriBaseId": "%SRCROOT%",
|
||||
"index": 1
|
||||
},
|
||||
"region": {
|
||||
"startLine": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"partialFingerprints": {
|
||||
"primaryLocationLineHash": "5061c3315a741b7d:1",
|
||||
"primaryLocationStartColumnFingerprint": "7"
|
||||
},
|
||||
"relatedLocations": [
|
||||
{
|
||||
"id": 1,
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "src/ParseObject.js",
|
||||
"uriBaseId": "%SRCROOT%",
|
||||
"index": 3
|
||||
},
|
||||
"region": {
|
||||
"startLine": 2281,
|
||||
"startColumn": 33,
|
||||
"endColumn": 55
|
||||
}
|
||||
},
|
||||
"message": {
|
||||
"text": "here"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "src/LiveQueryClient.js",
|
||||
"uriBaseId": "%SRCROOT%",
|
||||
"index": 2
|
||||
},
|
||||
"region": {
|
||||
"startLine": 166
|
||||
}
|
||||
},
|
||||
"message": {
|
||||
"text": "here"
|
||||
}
|
||||
}
|
||||
]
|
||||
} ],
|
||||
"newlineSequences" : [ "\r\n", "\n", "
", "
" ],
|
||||
"columnKind" : "utf16CodeUnits",
|
||||
"properties" : {
|
||||
"semmle.formatSpecifier" : "sarif-latest"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
15
src/upload-lib.test.ts
Normal file
15
src/upload-lib.test.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import test from 'ava';
|
||||
|
||||
import * as uploadLib from './upload-lib';
|
||||
|
||||
test('validateSarifFileSchema - valid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
|
||||
t.true(uploadLib.validateSarifFileSchema(inputFile));
|
||||
});
|
||||
|
||||
test('validateSarifFileSchema - invalid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
|
||||
t.false(uploadLib.validateSarifFileSchema(inputFile));
|
||||
// validateSarifFileSchema calls core.setFailed which sets the exit code on error
|
||||
process.exitCode = 0;
|
||||
});
|
||||
@@ -1,9 +1,9 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as http from '@actions/http-client';
|
||||
import * as auth from '@actions/http-client/auth';
|
||||
import * as io from '@actions/io';
|
||||
import fileUrl from 'file-url';
|
||||
import * as fs from 'fs';
|
||||
import * as jsonschema from 'jsonschema';
|
||||
import * as path from 'path';
|
||||
import zlib from 'zlib';
|
||||
|
||||
@@ -11,19 +11,6 @@ import * as fingerprints from './fingerprints';
|
||||
import * as sharedEnv from './shared-environment';
|
||||
import * as util from './util';
|
||||
|
||||
// Construct the location of the sentinel file for detecting multiple uploads.
|
||||
// The returned location should be writable.
|
||||
async function getSentinelFilePath(): Promise<string> {
|
||||
// Use the temp dir instead of placing next to the sarif file because of
|
||||
// issues with docker actions. The directory containing the sarif file
|
||||
// may not be writable by us.
|
||||
const uploadsTmpDir = path.join(process.env['RUNNER_TEMP'] || '/tmp/codeql-action', 'uploads');
|
||||
await io.mkdirP(uploadsTmpDir);
|
||||
// Hash the absolute path so we'll behave correctly in the unlikely
|
||||
// scenario a file is referenced twice with different paths.
|
||||
return path.join(uploadsTmpDir, 'codeql-action-upload-sentinel');
|
||||
}
|
||||
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
export function combineSarifFiles(sarifFiles: string[]): string {
|
||||
@@ -128,29 +115,69 @@ export async function upload(input: string): Promise<boolean> {
|
||||
}
|
||||
}
|
||||
|
||||
// Counts the number of results in the given SARIF file
|
||||
export function countResultsInSarif(sarif: string): number {
|
||||
let numResults = 0;
|
||||
for (const run of JSON.parse(sarif).runs) {
|
||||
numResults += run.results.length;
|
||||
}
|
||||
return numResults;
|
||||
}
|
||||
|
||||
// Validates that the given file path refers to a valid SARIF file.
|
||||
// Returns a non-empty list of error message if the file is invalid,
|
||||
// otherwise returns the empty list if the file is valid.
|
||||
export function validateSarifFileSchema(sarifFilePath: string): boolean {
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
|
||||
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
|
||||
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
if (result.valid) {
|
||||
return true;
|
||||
} else {
|
||||
// Set the failure message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const errorMessages = result.errors.map(e => "- " + e.stack);
|
||||
core.setFailed("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + errorMessages.join("\n"));
|
||||
|
||||
// Also output the more verbose error messages in groups as these may be very large.
|
||||
for (const error of result.errors) {
|
||||
core.startGroup("Error details: " + error.stack);
|
||||
core.info(JSON.stringify(error, null, 2));
|
||||
core.endGroup();
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles: string[]): Promise<boolean> {
|
||||
core.startGroup("Uploading results");
|
||||
let succeeded = false;
|
||||
try {
|
||||
// Check if an upload has happened before. If so then abort.
|
||||
// This is intended to catch when the finish and upload-sarif actions
|
||||
// are used together, and then the upload-sarif action is invoked twice.
|
||||
const sentinelFile = await getSentinelFilePath();
|
||||
if (fs.existsSync(sentinelFile)) {
|
||||
core.info("Aborting as an upload has already happened from this job");
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
return false;
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
|
||||
const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
if (!validateSarifFileSchema(file)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
const commitOid = await util.getCommitOid();
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRef();
|
||||
const analysisKey = await util.getAnalysisKey();
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
|
||||
@@ -184,15 +211,14 @@ async function uploadFiles(sarifFiles: string[]): Promise<boolean> {
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
|
||||
// Log some useful debug info about the info
|
||||
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
|
||||
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
|
||||
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
|
||||
|
||||
// Make the upload
|
||||
succeeded = await uploadPayload(payload);
|
||||
const succeeded = await uploadPayload(payload);
|
||||
|
||||
// Mark that we have made an upload
|
||||
fs.writeFileSync(sentinelFile, '');
|
||||
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
}
|
||||
core.endGroup();
|
||||
|
||||
return succeeded;
|
||||
|
||||
55
src/util.ts
55
src/util.ts
@@ -1,4 +1,5 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as http from '@actions/http-client';
|
||||
import * as auth from '@actions/http-client/auth';
|
||||
import * as octokit from '@octokit/rest';
|
||||
@@ -25,13 +26,6 @@ export function should_abort(actionName: string, requireInitActionHasRun: boolea
|
||||
return true;
|
||||
}
|
||||
|
||||
// Should abort if called on a merge commit for a pull request.
|
||||
if (ref.startsWith('refs/pull/')) {
|
||||
core.warning('The CodeQL ' + actionName + ' action is intended for workflows triggered on `push` events, '
|
||||
+ 'but the current workflow is running on a pull request. Aborting.');
|
||||
return true;
|
||||
}
|
||||
|
||||
// If the init action is required, then check the it completed successfully.
|
||||
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
|
||||
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
|
||||
@@ -41,17 +35,6 @@ export function should_abort(actionName: string, requireInitActionHasRun: boolea
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the path to the workspace folder.
|
||||
*/
|
||||
export function workspaceFolder(): string {
|
||||
let workspaceFolder = process.env['RUNNER_WORKSPACE'];
|
||||
if (!workspaceFolder)
|
||||
workspaceFolder = path.resolve('..');
|
||||
|
||||
return workspaceFolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an environment parameter, but throw an error if it is not set.
|
||||
*/
|
||||
@@ -152,6 +135,21 @@ export async function getLanguages(): Promise<string[]> {
|
||||
return languages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
export async function getCommitOid(): Promise<string> {
|
||||
let commitOid = '';
|
||||
await exec.exec('git', ['rev-parse', 'HEAD'], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => { commitOid += data.toString(); },
|
||||
stderr: (data) => { process.stderr.write(data); }
|
||||
}
|
||||
});
|
||||
return commitOid.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the path of the currently executing workflow.
|
||||
*/
|
||||
@@ -204,8 +202,20 @@ export async function getAnalysisKey(): Promise<string> {
|
||||
* Get the ref currently being analyzed.
|
||||
*/
|
||||
export function getRef(): string {
|
||||
// it's in the form "refs/heads/master"
|
||||
return getRequiredEnvParam('GITHUB_REF');
|
||||
// Will be in the form "refs/heads/master" on a push event
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const ref = getRequiredEnvParam('GITHUB_REF');
|
||||
|
||||
// For pull request refs we want to convert from the 'merge' ref
|
||||
// to the 'head' ref, as that is what we want to analyse.
|
||||
// There should have been some code earlier in the workflow to do
|
||||
// the checkout, but we have no way of verifying that here.
|
||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||
if (pull_ref_regex.test(ref)) {
|
||||
return ref.replace(pull_ref_regex, 'refs/pull/$1/head');
|
||||
} else {
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
|
||||
interface StatusReport {
|
||||
@@ -377,8 +387,9 @@ export function getToolNames(sarifContents: string): string[] {
|
||||
|
||||
// Creates a random temporary directory, runs the given body, and then deletes the directory.
|
||||
// Mostly intended for use within tests.
|
||||
export async function withTmpDir(body: (tmpDir: string) => Promise<void>) {
|
||||
export async function withTmpDir<T>(body: (tmpDir: string) => Promise<T>): Promise<T> {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
|
||||
await body(tmpDir);
|
||||
const result = await body(tmpDir);
|
||||
fs.rmdirSync(tmpDir, { recursive: true });
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -3,15 +3,31 @@ name: Use custom queries
|
||||
disable-default-queries: true
|
||||
|
||||
queries:
|
||||
- name: Additional C++ queries
|
||||
uses: github/codeql/cpp/ql/src/codeql-suites/cpp-lgtm.qls@lgtm.com
|
||||
- name: Additional C# queries
|
||||
uses: github/codeql/csharp/ql/src/codeql-suites/csharp-lgtm.qls@lgtm.com
|
||||
- name: Additional Go queries
|
||||
uses: github/codeql-go/ql/src/codeql-suites/go-lgtm.qls@lgtm.com
|
||||
- name: Additional Java queries
|
||||
uses: github/codeql/java/ql/src/codeql-suites/java-lgtm.qls@lgtm.com
|
||||
- name: Additional Javascript queries
|
||||
uses: github/codeql/javascript/ql/src/codeql-suites/javascript-lgtm.qls@lgtm.com
|
||||
- name: Additional Python queries
|
||||
uses: github/codeql/python/ql/src/codeql-suites/python-lgtm.qls@lgtm.com
|
||||
# Query suites
|
||||
- name: Select a query suite
|
||||
uses: ./codeql-qlpacks/complex-python-qlpack/rootAndBar.qls
|
||||
# QL pack subset
|
||||
- name: Select a ql file
|
||||
uses: ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
|
||||
- name: Select a subfolder
|
||||
uses: ./codeql-qlpacks/complex-javascript-qlpack/foo
|
||||
- name: Select a folder with two subfolders
|
||||
uses: ./codeql-qlpacks/complex-javascript-qlpack/foo2
|
||||
# Inrepo QL pack
|
||||
- name: Select an inrepo ql pack
|
||||
uses: ./codeql-qlpacks/csharp-qlpack
|
||||
- name: Java queries
|
||||
uses: ./codeql-qlpacks/java-qlpack
|
||||
# External QL packs
|
||||
- name: Go queries
|
||||
uses: Anthophila/go-querypack@master
|
||||
- name: Cpp queries
|
||||
uses: Anthophila/cpp-querypack@second-branch
|
||||
- name: Javascript queries
|
||||
uses: Anthophila/javascript-querypack/show_ifs2.ql@master
|
||||
- name: Python queries
|
||||
uses: Anthophila/python-querypack/show_ifs2.ql@second-branch
|
||||
|
||||
|
||||
paths-ignore:
|
||||
- tests
|
||||
Reference in New Issue
Block a user