mirror of
https://github.com/github/codeql-action.git
synced 2025-12-08 00:38:30 +08:00
Compare commits
1 Commits
codeql-bun
...
test-paths
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e4011f4497 |
@@ -1,10 +0,0 @@
|
|||||||
root = true
|
|
||||||
|
|
||||||
[*]
|
|
||||||
end_of_line = lf
|
|
||||||
insert_final_newline = true
|
|
||||||
trim_trailing_whitespace = true
|
|
||||||
|
|
||||||
[*.ts]
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 2
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
**/webpack.config.js
|
|
||||||
lib/**
|
|
||||||
runner/dist/**
|
|
||||||
src/testdata/**
|
|
||||||
tests/**
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
|
|
||||||
{
|
|
||||||
"parser": "@typescript-eslint/parser",
|
|
||||||
"parserOptions": {
|
|
||||||
"project": "./tsconfig.json"
|
|
||||||
},
|
|
||||||
"plugins": ["@typescript-eslint", "filenames", "github", "import", "no-async-foreach"],
|
|
||||||
"extends": [
|
|
||||||
"eslint:recommended",
|
|
||||||
"plugin:@typescript-eslint/recommended",
|
|
||||||
"plugin:@typescript-eslint/recommended-requiring-type-checking",
|
|
||||||
"plugin:github/recommended",
|
|
||||||
"plugin:github/typescript"
|
|
||||||
],
|
|
||||||
"rules": {
|
|
||||||
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],
|
|
||||||
"import/extensions": "error",
|
|
||||||
"import/no-amd": "error",
|
|
||||||
"import/no-commonjs": "error",
|
|
||||||
"import/no-dynamic-require": "error",
|
|
||||||
"import/no-extraneous-dependencies": ["error"],
|
|
||||||
"import/no-namespace": "off",
|
|
||||||
"import/no-unresolved": "error",
|
|
||||||
"import/no-webpack-loader-syntax": "error",
|
|
||||||
"import/order": ["error", {
|
|
||||||
"alphabetize": {"order": "asc"},
|
|
||||||
"newlines-between": "always"
|
|
||||||
}],
|
|
||||||
"no-async-foreach/no-async-foreach": "error",
|
|
||||||
"no-console": "off",
|
|
||||||
"no-sequences": "error",
|
|
||||||
"no-shadow": "off",
|
|
||||||
"@typescript-eslint/no-shadow": ["error"],
|
|
||||||
"one-var": ["error", "never"]
|
|
||||||
},
|
|
||||||
"overrides": [{
|
|
||||||
// "temporarily downgraded during transition to eslint
|
|
||||||
"files": "**",
|
|
||||||
"rules": {
|
|
||||||
"@typescript-eslint/ban-types": "off",
|
|
||||||
"@typescript-eslint/explicit-module-boundary-types": "off",
|
|
||||||
"@typescript-eslint/no-explicit-any": "off",
|
|
||||||
"@typescript-eslint/no-unsafe-assignment": "off",
|
|
||||||
"@typescript-eslint/no-unsafe-call": "off",
|
|
||||||
"@typescript-eslint/no-unsafe-member-access": "off",
|
|
||||||
"@typescript-eslint/no-unsafe-return": "off",
|
|
||||||
"@typescript-eslint/no-var-requires": "off",
|
|
||||||
"@typescript-eslint/prefer-regexp-exec": "off",
|
|
||||||
"@typescript-eslint/require-await": "off",
|
|
||||||
"@typescript-eslint/restrict-template-expressions": "off",
|
|
||||||
"func-style": "off"
|
|
||||||
}
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
4
.github/ISSUE_TEMPLATE/config.yml
vendored
4
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,5 +1,5 @@
|
|||||||
blank_issues_enabled: true
|
blank_issues_enabled: true
|
||||||
contact_links:
|
contact_links:
|
||||||
- name: Contact GitHub Support
|
- name: Contact GitHub Support
|
||||||
url: https://support.github.com/request
|
url: https://support.github.com/contact?subject=Code+Scanning+Beta+Support&tags=code-scanning-support
|
||||||
about: Contact Support
|
about: Contact Support about code scanning
|
||||||
|
|||||||
3
.github/codeql/codeql-config.yml
vendored
3
.github/codeql/codeql-config.yml
vendored
@@ -10,5 +10,4 @@ queries:
|
|||||||
- uses: security-extended
|
- uses: security-extended
|
||||||
- uses: security-and-quality
|
- uses: security-and-quality
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- tests
|
- tests
|
||||||
- lib
|
|
||||||
3
.github/pull_request_template.md
vendored
3
.github/pull_request_template.md
vendored
@@ -1,4 +1,7 @@
|
|||||||
### Merge / deployment checklist
|
### Merge / deployment checklist
|
||||||
|
|
||||||
|
- Run test builds as necessary. Can be on this repository or elsewhere as needed in order to test the change - please include links to tests in other repos!
|
||||||
|
- [ ] CodeQL using init/analyze actions
|
||||||
|
- [ ] 3rd party tool using upload action
|
||||||
- [ ] Confirm this change is backwards compatible with existing workflows.
|
- [ ] Confirm this change is backwards compatible with existing workflows.
|
||||||
- [ ] Confirm the [readme](https://github.com/github/codeql-action/blob/master/README.md) has been updated if necessary.
|
- [ ] Confirm the [readme](https://github.com/github/codeql-action/blob/master/README.md) has been updated if necessary.
|
||||||
|
|||||||
50
.github/update-release-branch.py
vendored
50
.github/update-release-branch.py
vendored
@@ -7,7 +7,7 @@ import sys
|
|||||||
|
|
||||||
# The branch being merged from.
|
# The branch being merged from.
|
||||||
# This is the one that contains day-to-day development work.
|
# This is the one that contains day-to-day development work.
|
||||||
MAIN_BRANCH = 'main'
|
MASTER_BRANCH = 'master'
|
||||||
# The branch being merged into.
|
# The branch being merged into.
|
||||||
# This is the release branch that users reference.
|
# This is the release branch that users reference.
|
||||||
LATEST_RELEASE_BRANCH = 'v1'
|
LATEST_RELEASE_BRANCH = 'v1'
|
||||||
@@ -28,14 +28,14 @@ def branch_exists_on_remote(branch_name):
|
|||||||
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
||||||
|
|
||||||
# Opens a PR from the given branch to the release branch
|
# Opens a PR from the given branch to the release branch
|
||||||
def open_pr(repo, all_commits, short_main_sha, branch_name):
|
def open_pr(repo, all_commits, short_master_sha, branch_name):
|
||||||
# Sort the commits into the pull requests that introduced them,
|
# Sort the commits into the pull requests that introduced them,
|
||||||
# and any commits that don't have a pull request
|
# and any commits that don't have a pull request
|
||||||
pull_requests = []
|
pull_requests = []
|
||||||
commits_without_pull_requests = []
|
commits_without_pull_requests = []
|
||||||
for commit in all_commits:
|
for commit in all_commits:
|
||||||
pr = get_pr_for_commit(repo, commit)
|
pr = get_pr_for_commit(repo, commit)
|
||||||
|
|
||||||
if pr is None:
|
if pr is None:
|
||||||
commits_without_pull_requests.append(commit)
|
commits_without_pull_requests.append(commit)
|
||||||
elif not any(p for p in pull_requests if p.number == pr.number):
|
elif not any(p for p in pull_requests if p.number == pr.number):
|
||||||
@@ -45,11 +45,11 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
|
|||||||
print('Found ' + str(len(commits_without_pull_requests)) + ' commits not in a pull request')
|
print('Found ' + str(len(commits_without_pull_requests)) + ' commits not in a pull request')
|
||||||
|
|
||||||
# Sort PRs and commits by age
|
# Sort PRs and commits by age
|
||||||
pull_requests = sorted(pull_requests, key=lambda pr: pr.number)
|
sorted(pull_requests, key=lambda pr: pr.number)
|
||||||
commits_without_pull_requests = sorted(commits_without_pull_requests, key=lambda c: c.commit.author.date)
|
sorted(commits_without_pull_requests, key=lambda c: c.commit.author.date)
|
||||||
|
|
||||||
# Start constructing the body text
|
# Start constructing the body text
|
||||||
body = 'Merging ' + short_main_sha + ' into ' + LATEST_RELEASE_BRANCH
|
body = 'Merging ' + short_master_sha + ' into ' + LATEST_RELEASE_BRANCH
|
||||||
|
|
||||||
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
|
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
|
||||||
body += '\n\nConductor for this PR is @' + conductor
|
body += '\n\nConductor for this PR is @' + conductor
|
||||||
@@ -62,7 +62,7 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
|
|||||||
body += '\n- #' + str(pr.number)
|
body += '\n- #' + str(pr.number)
|
||||||
body += ' - ' + pr.title
|
body += ' - ' + pr.title
|
||||||
body += ' (@' + merger + ')'
|
body += ' (@' + merger + ')'
|
||||||
|
|
||||||
# List all commits not part of a PR
|
# List all commits not part of a PR
|
||||||
if len(commits_without_pull_requests) > 0:
|
if len(commits_without_pull_requests) > 0:
|
||||||
body += '\n\nContains the following commits not from a pull request:'
|
body += '\n\nContains the following commits not from a pull request:'
|
||||||
@@ -71,7 +71,7 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
|
|||||||
body += ' - ' + get_truncated_commit_message(commit)
|
body += ' - ' + get_truncated_commit_message(commit)
|
||||||
body += ' (@' + commit.author.login + ')'
|
body += ' (@' + commit.author.login + ')'
|
||||||
|
|
||||||
title = 'Merge ' + MAIN_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
|
title = 'Merge ' + MASTER_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
|
||||||
|
|
||||||
# Create the pull request
|
# Create the pull request
|
||||||
pr = repo.create_pull(title=title, body=body, head=branch_name, base=LATEST_RELEASE_BRANCH)
|
pr = repo.create_pull(title=title, body=body, head=branch_name, base=LATEST_RELEASE_BRANCH)
|
||||||
@@ -86,16 +86,16 @@ def get_conductor(repo, pull_requests, other_commits):
|
|||||||
# If there are any PRs then use whoever merged the last one
|
# If there are any PRs then use whoever merged the last one
|
||||||
if len(pull_requests) > 0:
|
if len(pull_requests) > 0:
|
||||||
return get_merger_of_pr(repo, pull_requests[-1])
|
return get_merger_of_pr(repo, pull_requests[-1])
|
||||||
|
|
||||||
# Otherwise take the author of the latest commit
|
# Otherwise take the author of the latest commit
|
||||||
return other_commits[-1].author.login
|
return other_commits[-1].author.login
|
||||||
|
|
||||||
# Gets a list of the SHAs of all commits that have happened on main
|
# Gets a list of the SHAs of all commits that have happened on master
|
||||||
# since the release branched off.
|
# since the release branched off.
|
||||||
# This will not include any commits that exist on the release branch
|
# This will not include any commits that exist on the release branch
|
||||||
# that aren't on main.
|
# that aren't on master.
|
||||||
def get_commit_difference(repo):
|
def get_commit_difference(repo):
|
||||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '..' + MAIN_BRANCH).strip().split('\n')
|
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '...' + MASTER_BRANCH).strip().split('\n')
|
||||||
|
|
||||||
# Convert to full-fledged commit objects
|
# Convert to full-fledged commit objects
|
||||||
commits = [repo.get_commit(c) for c in commits]
|
commits = [repo.get_commit(c) for c in commits]
|
||||||
@@ -115,16 +115,16 @@ def get_truncated_commit_message(commit):
|
|||||||
else:
|
else:
|
||||||
return message
|
return message
|
||||||
|
|
||||||
# Converts a commit into the PR that introduced it to the main branch.
|
# Converts a commit into the PR that introduced it to the master branch.
|
||||||
# Returns the PR object, or None if no PR could be found.
|
# Returns the PR object, or None if no PR could be found.
|
||||||
def get_pr_for_commit(repo, commit):
|
def get_pr_for_commit(repo, commit):
|
||||||
prs = commit.get_pulls()
|
prs = commit.get_pulls()
|
||||||
|
|
||||||
if prs.totalCount > 0:
|
if prs.totalCount > 0:
|
||||||
# In the case that there are multiple PRs, return the earliest one
|
# In the case that there are multiple PRs, return the earliest one
|
||||||
prs = list(prs)
|
prs = list(prs)
|
||||||
sorted_prs = sorted(prs, key=lambda pr: int(pr.number))
|
sorted(prs, key=lambda pr: int(pr.number))
|
||||||
return sorted_prs[0]
|
return prs[0]
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -144,20 +144,20 @@ def main():
|
|||||||
repo = Github(github_token).get_repo(repository_nwo)
|
repo = Github(github_token).get_repo(repository_nwo)
|
||||||
|
|
||||||
# Print what we intend to go
|
# Print what we intend to go
|
||||||
print('Considering difference between ' + MAIN_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
|
print('Considering difference between ' + MASTER_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
|
||||||
short_main_sha = run_git('rev-parse', '--short', MAIN_BRANCH).strip()
|
short_master_sha = run_git('rev-parse', '--short', MASTER_BRANCH).strip()
|
||||||
print('Current head of ' + MAIN_BRANCH + ' is ' + short_main_sha)
|
print('Current head of ' + MASTER_BRANCH + ' is ' + short_master_sha)
|
||||||
|
|
||||||
# See if there are any commits to merge in
|
# See if there are any commits to merge in
|
||||||
commits = get_commit_difference(repo)
|
commits = get_commit_difference(repo)
|
||||||
if len(commits) == 0:
|
if len(commits) == 0:
|
||||||
print('No commits to merge from ' + MAIN_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
|
print('No commits to merge from ' + MASTER_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
|
||||||
return
|
return
|
||||||
|
|
||||||
# The branch name is based off of the name of branch being merged into
|
# The branch name is based off of the name of branch being merged into
|
||||||
# and the SHA of the branch being merged from. Thus if the branch already
|
# and the SHA of the branch being merged from. Thus if the branch already
|
||||||
# exists we can assume we don't need to recreate it.
|
# exists we can assume we don't need to recreate it.
|
||||||
new_branch_name = 'update-' + LATEST_RELEASE_BRANCH + '-' + short_main_sha
|
new_branch_name = 'update-' + LATEST_RELEASE_BRANCH + '-' + short_master_sha
|
||||||
print('Branch name is ' + new_branch_name)
|
print('Branch name is ' + new_branch_name)
|
||||||
|
|
||||||
# Check if the branch already exists. If so we can abort as this script
|
# Check if the branch already exists. If so we can abort as this script
|
||||||
@@ -165,14 +165,14 @@ def main():
|
|||||||
if branch_exists_on_remote(new_branch_name):
|
if branch_exists_on_remote(new_branch_name):
|
||||||
print('Branch ' + new_branch_name + ' already exists. Nothing to do.')
|
print('Branch ' + new_branch_name + ' already exists. Nothing to do.')
|
||||||
return
|
return
|
||||||
|
|
||||||
# Create the new branch and push it to the remote
|
# Create the new branch and push it to the remote
|
||||||
print('Creating branch ' + new_branch_name)
|
print('Creating branch ' + new_branch_name)
|
||||||
run_git('checkout', '-b', new_branch_name, MAIN_BRANCH)
|
run_git('checkout', '-b', new_branch_name, MASTER_BRANCH)
|
||||||
run_git('push', ORIGIN, new_branch_name)
|
run_git('push', ORIGIN, new_branch_name)
|
||||||
|
|
||||||
# Open a PR to update the branch
|
# Open a PR to update the branch
|
||||||
open_pr(repo, commits, short_main_sha, new_branch_name)
|
open_pr(repo, commits, short_master_sha, new_branch_name)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -1,22 +0,0 @@
|
|||||||
name: Check Expected Release Files
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- .github/workflows/check-expected-release-files.yml
|
|
||||||
- src/defaults.json
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-expected-release-files:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout CodeQL Action
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Check Expected Release Files
|
|
||||||
run: |
|
|
||||||
bundle_version="$(cat "./src/defaults.json" | jq -r ".bundleVersion")"
|
|
||||||
set -x
|
|
||||||
for expected_file in "codeql-bundle.tar.gz" "codeql-bundle-linux64.tar.gz" "codeql-bundle-osx64.tar.gz" "codeql-bundle-win64.tar.gz" "codeql-runner-linux" "codeql-runner-macos" "codeql-runner-win.exe"; do
|
|
||||||
curl --location --fail --head --request GET "https://github.com/github/codeql-action/releases/download/$bundle_version/$expected_file" > /dev/null
|
|
||||||
done
|
|
||||||
69
.github/workflows/codeql.yml
vendored
69
.github/workflows/codeql.yml
vendored
@@ -1,73 +1,28 @@
|
|||||||
name: "CodeQL action"
|
name: "CodeQL action"
|
||||||
|
|
||||||
on:
|
on: [push, pull_request]
|
||||||
push:
|
|
||||||
branches: [main, v1]
|
|
||||||
pull_request:
|
|
||||||
branches: [main, v1]
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Identify the CodeQL tool versions to use in the analysis job.
|
|
||||||
check-codeql-versions:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
versions: ${{ steps.compare.outputs.versions }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Init with default CodeQL bundle from the VM image
|
|
||||||
id: init-default
|
|
||||||
uses: ./init
|
|
||||||
with:
|
|
||||||
languages: javascript
|
|
||||||
- name: Remove empty database
|
|
||||||
# allows us to run init a second time
|
|
||||||
run: |
|
|
||||||
rm -rf "$RUNNER_TEMP/codeql_databases"
|
|
||||||
- name: Init with latest CodeQL bundle
|
|
||||||
id: init-latest
|
|
||||||
uses: ./init
|
|
||||||
with:
|
|
||||||
tools: latest
|
|
||||||
languages: javascript
|
|
||||||
- name: Compare default and latest CodeQL bundle versions
|
|
||||||
id: compare
|
|
||||||
env:
|
|
||||||
CODEQL_DEFAULT: ${{ steps.init-default.outputs.codeql-path }}
|
|
||||||
CODEQL_LATEST: ${{ steps.init-latest.outputs.codeql-path }}
|
|
||||||
run: |
|
|
||||||
CODEQL_VERSION_DEFAULT="$("$CODEQL_DEFAULT" version --format terse)"
|
|
||||||
CODEQL_VERSION_LATEST="$("$CODEQL_LATEST" version --format terse)"
|
|
||||||
echo "Default CodeQL bundle version is $CODEQL_VERSION_DEFAULT"
|
|
||||||
echo "Latest CodeQL bundle version is $CODEQL_VERSION_LATEST"
|
|
||||||
if [[ "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then
|
|
||||||
# Just use `tools: null` to avoid duplication in the analysis job.
|
|
||||||
VERSIONS_JSON='[null]'
|
|
||||||
else
|
|
||||||
# Use both `tools: null` and `tools: latest` in the analysis job.
|
|
||||||
VERSIONS_JSON='[null, "latest"]'
|
|
||||||
fi
|
|
||||||
# Output a JSON-encoded list with the distinct versions to test against.
|
|
||||||
echo "Suggested matrix config for analysis job: $VERSIONS_JSON"
|
|
||||||
echo "::set-output name=versions::${VERSIONS_JSON}"
|
|
||||||
|
|
||||||
build:
|
build:
|
||||||
needs: [check-codeql-versions]
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest,windows-latest,macos-latest]
|
os: [ubuntu-latest,windows-latest,macos-latest]
|
||||||
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v1
|
||||||
|
with:
|
||||||
|
# Must fetch at least the immediate parents so that if this is
|
||||||
|
# a pull request then we can checkout the head of the pull request.
|
||||||
|
fetch-depth: 2
|
||||||
|
|
||||||
|
# If this run was triggered by a pull request event then checkout
|
||||||
|
# the head of the pull request instead of the merge commit.
|
||||||
|
- run: git checkout HEAD^2
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
|
||||||
- uses: ./init
|
- uses: ./init
|
||||||
id: init
|
|
||||||
with:
|
with:
|
||||||
languages: javascript
|
languages: javascript
|
||||||
config-file: ./.github/codeql/codeql-config.yml
|
config-file: ./.github/codeql/codeql-config.yml
|
||||||
tools: ${{ matrix.tools }}
|
|
||||||
# confirm steps.init.outputs.codeql-path points to the codeql binary
|
|
||||||
- name: Print CodeQL Version
|
|
||||||
run: ${{steps.init.outputs.codeql-path}} version --format=json
|
|
||||||
- uses: ./analyze
|
- uses: ./analyze
|
||||||
|
|||||||
126
.github/workflows/integration-testing.yml
vendored
Normal file
126
.github/workflows/integration-testing.yml
vendored
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
name: "Integration Testing"
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
multi-language-repo_test-autodetect-languages:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
- uses: ./../action/init
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- run: |
|
||||||
|
cd "$CODEQL_ACTION_DATABASE_DIR"
|
||||||
|
# List all directories as there will be precisely one directory per database
|
||||||
|
# but there may be other files in this directory such as query suites.
|
||||||
|
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
||||||
|
[[ ! -d cpp ]] || \
|
||||||
|
[[ ! -d csharp ]] || \
|
||||||
|
[[ ! -d go ]] || \
|
||||||
|
[[ ! -d java ]] || \
|
||||||
|
[[ ! -d javascript ]] || \
|
||||||
|
[[ ! -d python ]]; then
|
||||||
|
echo "Did not find expected number of databases. Database dir contains: $(ls)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
multi-language-repo_test-custom-queries:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: cpp,csharp,java,javascript,python
|
||||||
|
config-file: ./.github/codeql/custom-queries.yml
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
# Currently is not possible to analyze Go in conjunction with other languages in macos
|
||||||
|
multi-language-repo_test-go-custom-queries:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/setup-go@v2
|
||||||
|
if: ${{ matrix.os == 'macos-latest' }}
|
||||||
|
with:
|
||||||
|
go-version: '^1.13.1'
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: go
|
||||||
|
config-file: ./.github/codeql/custom-queries.yml
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
|
||||||
|
multi-language-repo_rubocop:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
- name: Set up Ruby
|
||||||
|
uses: ruby/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: 2.6
|
||||||
|
- name: Install Code Scanning integration
|
||||||
|
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
||||||
|
- name: Install dependencies
|
||||||
|
run: bundle install
|
||||||
|
- name: Rubocop run
|
||||||
|
run: |
|
||||||
|
bash -c "
|
||||||
|
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
||||||
|
[[ $? -ne 2 ]]
|
||||||
|
"
|
||||||
|
- uses: ./../action/upload-sarif
|
||||||
|
with:
|
||||||
|
sarif_file: rubocop.sarif
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
655
.github/workflows/pr-checks.yml
vendored
655
.github/workflows/pr-checks.yml
vendored
@@ -1,642 +1,71 @@
|
|||||||
name: "PR checks"
|
name: "PR checks"
|
||||||
|
|
||||||
env:
|
on: [push, pull_request]
|
||||||
GO111MODULE: auto
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main, v1]
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint-js:
|
tslint:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v1
|
||||||
- name: Run Lint
|
- name: tslint
|
||||||
run: npm run-script lint
|
run: npm run-script lint
|
||||||
|
|
||||||
check-js:
|
check-js:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v1
|
||||||
- name: Check generated JavaScript
|
- name: Check generated JavaScript
|
||||||
run: .github/workflows/script/check-js.sh
|
run: |
|
||||||
|
# Sanity check that repo is clean to start with
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then this workflow needs attention...
|
||||||
|
>&2 echo "Failed: Repo should be clean before testing!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
# Generate the JavaScript files
|
||||||
|
npm run-script build
|
||||||
|
# Check that repo is still clean
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then the PR needs attention
|
||||||
|
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||||
|
git status
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Success: JavaScript files are up to date"
|
||||||
|
|
||||||
check-node-modules:
|
check-node-modules:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v1
|
||||||
- name: Check node modules up to date
|
- name: Check node modules up to date
|
||||||
run: .github/workflows/script/check-node-modules.sh
|
|
||||||
|
|
||||||
npm-test:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest,macos-latest]
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: npm run-script test
|
|
||||||
run: npm run-script test
|
|
||||||
|
|
||||||
multi-language-repo_test-autodetect-languages:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
mkdir ../action
|
# Sanity check that repo is clean to start with
|
||||||
mv * .github ../action/
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
# If we get a fail here then this workflow needs attention...
|
||||||
mv ../action/.github/workflows .github
|
>&2 echo "Failed: Repo should be clean before testing!"
|
||||||
- uses: ./../action/init
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: ./build.sh
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
- run: |
|
|
||||||
cd "$RUNNER_TEMP/codeql_databases"
|
|
||||||
# List all directories as there will be precisely one directory per database
|
|
||||||
# but there may be other files in this directory such as query suites.
|
|
||||||
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
|
||||||
[[ ! -d cpp ]] || \
|
|
||||||
[[ ! -d csharp ]] || \
|
|
||||||
[[ ! -d go ]] || \
|
|
||||||
[[ ! -d java ]] || \
|
|
||||||
[[ ! -d javascript ]] || \
|
|
||||||
[[ ! -d python ]]; then
|
|
||||||
echo "Did not find expected number of databases. Database dir contains: $(ls)"
|
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Identify the CodeQL tool versions to integration test against.
|
# Reinstall modules and then clean to remove absolute paths
|
||||||
check-codeql-versions:
|
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||||
needs: [check-js, check-node-modules]
|
npm ci
|
||||||
runs-on: ubuntu-latest
|
npm run removeNPMAbsolutePaths
|
||||||
outputs:
|
# Check that repo is still clean
|
||||||
versions: ${{ steps.compare.outputs.versions }}
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then the PR needs attention
|
||||||
steps:
|
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
|
||||||
- uses: actions/checkout@v2
|
git status
|
||||||
- name: Move codeql-action
|
exit 1
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
- name: Init with default CodeQL bundle from the VM image
|
|
||||||
id: init-default
|
|
||||||
uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: javascript
|
|
||||||
- name: Remove empty database
|
|
||||||
# allows us to run init a second time
|
|
||||||
run: |
|
|
||||||
rm -rf "$RUNNER_TEMP/codeql_databases"
|
|
||||||
- name: Init with latest CodeQL bundle
|
|
||||||
id: init-latest
|
|
||||||
uses: ./../action/init
|
|
||||||
with:
|
|
||||||
tools: latest
|
|
||||||
languages: javascript
|
|
||||||
- name: Compare default and latest CodeQL bundle versions
|
|
||||||
id: compare
|
|
||||||
env:
|
|
||||||
CODEQL_DEFAULT: ${{ steps.init-default.outputs.codeql-path }}
|
|
||||||
CODEQL_LATEST: ${{ steps.init-latest.outputs.codeql-path }}
|
|
||||||
run: |
|
|
||||||
CODEQL_VERSION_DEFAULT="$("$CODEQL_DEFAULT" version --format terse)"
|
|
||||||
CODEQL_VERSION_LATEST="$("$CODEQL_LATEST" version --format terse)"
|
|
||||||
echo "Default CodeQL bundle version is $CODEQL_VERSION_DEFAULT"
|
|
||||||
echo "Latest CodeQL bundle version is $CODEQL_VERSION_LATEST"
|
|
||||||
if [[ "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then
|
|
||||||
# Just use `tools: null` to avoid duplication in the integration tests.
|
|
||||||
VERSIONS_JSON='[null]'
|
|
||||||
else
|
|
||||||
# Use both `tools: null` and `tools: latest` in the integration tests.
|
|
||||||
VERSIONS_JSON='[null, "latest"]'
|
|
||||||
fi
|
fi
|
||||||
# Output a JSON-encoded list with the distinct versions to test against.
|
echo "Success: node_modules are up to date"
|
||||||
echo "Suggested matrix config for integration tests: $VERSIONS_JSON"
|
|
||||||
echo "::set-output name=versions::${VERSIONS_JSON}"
|
|
||||||
|
|
||||||
multi-language-repo_test-custom-queries-and-remote-config:
|
npm-test:
|
||||||
needs: [check-js, check-node-modules, check-codeql-versions]
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
|
||||||
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
tools: ${{ matrix.tools }}
|
|
||||||
languages: cpp,csharp,java,javascript,python
|
|
||||||
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: ./build.sh
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
# Currently is not possible to analyze Go in conjunction with other languages in macos
|
|
||||||
multi-language-repo_test-go-custom-queries:
|
|
||||||
needs: [check-js, check-node-modules, check-codeql-versions]
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
|
||||||
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-go@v2
|
|
||||||
if: ${{ matrix.os == 'macos-latest' }}
|
|
||||||
with:
|
|
||||||
go-version: '^1.13.1'
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: go
|
|
||||||
config-file: ./.github/codeql/custom-queries.yml
|
|
||||||
tools: ${{ matrix.tools }}
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: ./build.sh
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
go-custom-tracing:
|
|
||||||
needs: [check-js, check-node-modules, check-codeql-versions]
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
|
||||||
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
env:
|
|
||||||
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-go@v2
|
|
||||||
if: ${{ matrix.os == 'macos-latest' }}
|
|
||||||
with:
|
|
||||||
go-version: '^1.13.1'
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: go
|
|
||||||
tools: ${{ matrix.tools }}
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: go build main.go
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
go-custom-tracing-autobuild:
|
|
||||||
needs: [check-js, check-node-modules, check-codeql-versions]
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
|
||||||
# No need to test Go autobuild on multiple OSes since
|
|
||||||
# we're testing Go custom tracing with a manual build on all OSes.
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: go
|
|
||||||
tools: ${{ matrix.tools }}
|
|
||||||
- uses: ./../action/autobuild
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
multi-language-repo_rubocop:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v1
|
||||||
- name: Move codeql-action
|
- name: npm run-script test
|
||||||
shell: bash
|
run: npm run-script test
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
- name: Set up Ruby
|
|
||||||
uses: ruby/setup-ruby@v1
|
|
||||||
with:
|
|
||||||
ruby-version: 2.6
|
|
||||||
- name: Install Code Scanning integration
|
|
||||||
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
|
||||||
- name: Install dependencies
|
|
||||||
run: bundle install
|
|
||||||
- name: Rubocop run
|
|
||||||
run: |
|
|
||||||
bash -c "
|
|
||||||
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
|
||||||
[[ $? -ne 2 ]]
|
|
||||||
"
|
|
||||||
- uses: ./../action/upload-sarif
|
|
||||||
with:
|
|
||||||
sarif_file: rubocop.sarif
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
test-proxy:
|
|
||||||
needs: [check-js, check-node-modules, check-codeql-versions]
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: ubuntu:18.04
|
|
||||||
options: --dns 127.0.0.1
|
|
||||||
services:
|
|
||||||
squid-proxy:
|
|
||||||
image: datadog/squid:latest
|
|
||||||
ports:
|
|
||||||
- 3128:3128
|
|
||||||
env:
|
|
||||||
https_proxy: http://squid-proxy:3128
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: javascript
|
|
||||||
tools: ${{ matrix.tools }}
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-javascript-ubuntu:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
# Pass --config-file here, but not for other jobs in this workflow.
|
|
||||||
# This means we're testing the config file parsing in the runner
|
|
||||||
# but not slowing down all jobs unnecessarily as it doesn't add much
|
|
||||||
# testing the parsing on different operating systems and languages.
|
|
||||||
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-javascript-windows:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
runs-on: windows-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-javascript-macos:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
runs-on: macos-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-csharp-ubuntu:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
run: |
|
|
||||||
. ./codeql-runner/codeql-env.sh
|
|
||||||
$CODEQL_RUNNER dotnet build
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-csharp-windows:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
runs-on: windows-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
shell: powershell
|
|
||||||
run: |
|
|
||||||
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
|
|
||||||
& $Env:CODEQL_RUNNER dotnet build
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-csharp-macos:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
runs-on: macos-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
. ./codeql-runner/codeql-env.sh
|
|
||||||
$CODEQL_RUNNER dotnet build
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
|
|
||||||
runner-analyze-csharp-autobuild-ubuntu:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-linux autobuild
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-csharp-autobuild-windows:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
runs-on: windows-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
shell: powershell
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-win.exe autobuild
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-csharp-autobuild-macos:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
runs-on: macos-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd ../action/runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Run init
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos autobuild
|
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-upload-sarif:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- name: Upload with runner
|
|
||||||
run: |
|
|
||||||
# Deliberately don't use TEST_MODE here. This is specifically testing
|
|
||||||
# the compatibility with the API.
|
|
||||||
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
133
.github/workflows/python-deps.yml
vendored
133
.github/workflows/python-deps.yml
vendored
@@ -1,133 +0,0 @@
|
|||||||
name: Test Python Package Installation on Linux and Mac
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main, v1]
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
test-setup-python-scripts:
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest, macos-latest]
|
|
||||||
include:
|
|
||||||
- test_dir: python-setup/tests/pipenv/requests-2
|
|
||||||
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 2
|
|
||||||
- test_dir: python-setup/tests/pipenv/requests-3
|
|
||||||
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 3
|
|
||||||
|
|
||||||
- test_dir: python-setup/tests/poetry/requests-2
|
|
||||||
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 2
|
|
||||||
- test_dir: python-setup/tests/poetry/requests-3
|
|
||||||
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 3
|
|
||||||
|
|
||||||
- test_dir: python-setup/tests/requirements/requests-2
|
|
||||||
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 2
|
|
||||||
- test_dir: python-setup/tests/requirements/requests-3
|
|
||||||
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 3
|
|
||||||
|
|
||||||
- test_dir: python-setup/tests/setup_py/requests-2
|
|
||||||
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 2
|
|
||||||
- test_dir: python-setup/tests/setup_py/requests-3
|
|
||||||
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 3
|
|
||||||
|
|
||||||
# This one shouldn't fail, but also won't install packages
|
|
||||||
- test_dir: python-setup/tests/requirements/non-standard-location
|
|
||||||
test_script: test -z $LGTM_INDEX_IMPORT_PATH
|
|
||||||
|
|
||||||
steps:
|
|
||||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
|
||||||
uses: ./init
|
|
||||||
id: init
|
|
||||||
with:
|
|
||||||
tools: latest
|
|
||||||
languages: python
|
|
||||||
setup-python-dependencies: false
|
|
||||||
|
|
||||||
- name: Test Auto Package Installation
|
|
||||||
run: |
|
|
||||||
set -x
|
|
||||||
$GITHUB_WORKSPACE/python-setup/install_tools.sh
|
|
||||||
|
|
||||||
cd $GITHUB_WORKSPACE/${{ matrix.test_dir }}
|
|
||||||
|
|
||||||
case ${{ matrix.os }} in
|
|
||||||
ubuntu-latest*) basePath="/opt";;
|
|
||||||
macos-latest*) basePath="/Users/runner";;
|
|
||||||
esac
|
|
||||||
echo ${basePath}
|
|
||||||
|
|
||||||
$GITHUB_WORKSPACE/python-setup/auto_install_packages.py "$(dirname ${{steps.init.outputs.codeql-path}})"
|
|
||||||
- name: Setup for extractor
|
|
||||||
run: |
|
|
||||||
echo $CODEQL_PYTHON
|
|
||||||
# only run if $CODEQL_PYTHON is set
|
|
||||||
if [ ! -z $CODEQL_PYTHON ]; then
|
|
||||||
$GITHUB_WORKSPACE/python-setup/tests/from_python_exe.py $CODEQL_PYTHON;
|
|
||||||
fi
|
|
||||||
- name: Verify packages installed
|
|
||||||
run: |
|
|
||||||
${{ matrix.test_script }}
|
|
||||||
|
|
||||||
test-setup-python-scripts-windows:
|
|
||||||
runs-on: windows-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- test_dir: python-setup/tests/pipenv/requests-2
|
|
||||||
python_version: 2
|
|
||||||
- test_dir: python-setup/tests/pipenv/requests-3
|
|
||||||
python_version: 3
|
|
||||||
|
|
||||||
- test_dir: python-setup/tests/poetry/requests-2
|
|
||||||
python_version: 2
|
|
||||||
- test_dir: python-setup/tests/poetry/requests-3
|
|
||||||
python_version: 3
|
|
||||||
|
|
||||||
- test_dir: python-setup/tests/requirements/requests-2
|
|
||||||
python_version: 2
|
|
||||||
- test_dir: python-setup/tests/requirements/requests-3
|
|
||||||
python_version: 3
|
|
||||||
|
|
||||||
- test_dir: python-setup/tests/setup_py/requests-2
|
|
||||||
python_version: 2
|
|
||||||
- test_dir: python-setup/tests/setup_py/requests-3
|
|
||||||
python_version: 3
|
|
||||||
|
|
||||||
steps:
|
|
||||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
|
||||||
uses: ./init
|
|
||||||
with:
|
|
||||||
tools: latest
|
|
||||||
languages: python
|
|
||||||
setup-python-dependencies: false
|
|
||||||
|
|
||||||
- name: Test Auto Package Installation
|
|
||||||
run: |
|
|
||||||
$cmd = $Env:GITHUB_WORKSPACE + "\\python-setup\\install_tools.ps1"
|
|
||||||
powershell -File $cmd
|
|
||||||
|
|
||||||
cd $Env:GITHUB_WORKSPACE\\${{ matrix.test_dir }}
|
|
||||||
$DefaultsPath = Join-Path (Join-Path $Env:GITHUB_WORKSPACE "src") "defaults.json"
|
|
||||||
$CodeQLBundleName = (Get-Content -Raw -Path $DefaultsPath | ConvertFrom-Json).bundleVersion
|
|
||||||
$CodeQLVersion = "0.0.0-" + $CodeQLBundleName.split("-")[-1]
|
|
||||||
py -3 $Env:GITHUB_WORKSPACE\\python-setup\\auto_install_packages.py C:\\hostedtoolcache\\windows\\CodeQL\\$CodeQLVersion\\x64\\codeql
|
|
||||||
- name: Setup for extractor
|
|
||||||
run: |
|
|
||||||
echo $Env:CODEQL_PYTHON
|
|
||||||
|
|
||||||
py -3 $Env:GITHUB_WORKSPACE\\python-setup\\tests\\from_python_exe.py $Env:CODEQL_PYTHON
|
|
||||||
- name: Verify packages installed
|
|
||||||
run: |
|
|
||||||
$cmd = $Env:GITHUB_WORKSPACE + "\\python-setup\\tests\\check_requests_123.ps1"
|
|
||||||
powershell -File $cmd ${{ matrix.python_version }}
|
|
||||||
54
.github/workflows/release-runner.yml
vendored
54
.github/workflows/release-runner.yml
vendored
@@ -1,54 +0,0 @@
|
|||||||
name: Release runner
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
bundle-tag:
|
|
||||||
description: 'Tag of the bundle release (e.g., "codeql-bundle-20200826")'
|
|
||||||
required: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
release-runner:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
extension: ["linux", "macos", "win.exe"]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: codeql-runner-${{matrix.extension}}
|
|
||||||
path: runner/dist/codeql-runner-${{matrix.extension}}
|
|
||||||
|
|
||||||
- name: Resolve Upload URL for the release
|
|
||||||
if: ${{ github.event.inputs.bundle-tag != null }}
|
|
||||||
id: save_url
|
|
||||||
run: |
|
|
||||||
UPLOAD_URL=$(curl -sS \
|
|
||||||
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}" \
|
|
||||||
-H "Accept: application/json" \
|
|
||||||
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" | jq .upload_url | sed s/\"//g)
|
|
||||||
echo ${UPLOAD_URL}
|
|
||||||
echo "::set-output name=upload_url::${UPLOAD_URL}"
|
|
||||||
|
|
||||||
- name: Upload Platform Package
|
|
||||||
if: ${{ github.event.inputs.bundle-tag != null }}
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.save_url.outputs.upload_url }}
|
|
||||||
asset_path: runner/dist/codeql-runner-${{matrix.extension}}
|
|
||||||
asset_name: codeql-runner-${{matrix.extension}}
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
21
.github/workflows/script/check-js.sh
vendored
21
.github/workflows/script/check-js.sh
vendored
@@ -1,21 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
# Sanity check that repo is clean to start with
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then this workflow needs attention...
|
|
||||||
>&2 echo "Failed: Repo should be clean before testing!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
# Wipe the lib directory incase there are extra unnecessary files in there
|
|
||||||
rm -rf lib
|
|
||||||
# Generate the JavaScript files
|
|
||||||
npm run-script build
|
|
||||||
# Check that repo is still clean
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then the PR needs attention
|
|
||||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
|
||||||
git status
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "Success: JavaScript files are up to date"
|
|
||||||
21
.github/workflows/script/check-node-modules.sh
vendored
21
.github/workflows/script/check-node-modules.sh
vendored
@@ -1,21 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
# Sanity check that repo is clean to start with
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then this workflow needs attention...
|
|
||||||
>&2 echo "Failed: Repo should be clean before testing!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
# Reinstall modules and then clean to remove absolute paths
|
|
||||||
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
|
||||||
npm ci
|
|
||||||
npm run removeNPMAbsolutePaths
|
|
||||||
# Check that repo is still clean
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then the PR needs attention
|
|
||||||
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
|
|
||||||
git status
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "Success: node_modules are up to date"
|
|
||||||
73
.github/workflows/split.yml
vendored
73
.github/workflows/split.yml
vendored
@@ -1,73 +0,0 @@
|
|||||||
#
|
|
||||||
# Split the CodeQL Bundle into platform bundles
|
|
||||||
#
|
|
||||||
# Instructions:
|
|
||||||
# 1. Upload the new codeql-bundle (codeql-bundle.tar.gz) as an asset of the
|
|
||||||
# release (codeql-bundle-20200826)
|
|
||||||
# 2. Take note of the CLI Release used by the bundle (e.g., v2.2.5)
|
|
||||||
# 3. Manually launch this workflow file (via the Actions UI) specifying
|
|
||||||
# - The CLI Release (e.g., v2.2.5)
|
|
||||||
# - The release tag (e.g., codeql-bundle-20200826)
|
|
||||||
# 4. If everything succeeds you should see 3 new assets.
|
|
||||||
#
|
|
||||||
|
|
||||||
name: Split Bundle
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
cli-release:
|
|
||||||
description: 'CodeQL CLI Release (e.g., "v2.2.5")'
|
|
||||||
required: true
|
|
||||||
bundle-tag:
|
|
||||||
description: 'Tag of the bundle release (e.g., "codeql-bundle-20200826")'
|
|
||||||
required: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
CLI_RELEASE: "${{ github.event.inputs.cli-release }}"
|
|
||||||
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
platform: ["linux64", "osx64", "win64"]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Resolve Upload URL for the release
|
|
||||||
id: save_url
|
|
||||||
run: |
|
|
||||||
UPLOAD_URL=$(curl -sS \
|
|
||||||
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}" \
|
|
||||||
-H "Accept: application/json" \
|
|
||||||
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" | jq .upload_url | sed s/\"//g)
|
|
||||||
echo ${UPLOAD_URL}
|
|
||||||
echo "::set-output name=upload_url::${UPLOAD_URL}"
|
|
||||||
|
|
||||||
- name: Download CodeQL CLI and Bundle
|
|
||||||
run: |
|
|
||||||
wget --no-verbose "https://github.com/${GITHUB_REPOSITORY}/releases/download/${RELEASE_TAG}/codeql-bundle.tar.gz"
|
|
||||||
wget --no-verbose "https://github.com/github/codeql-cli-binaries/releases/download/${CLI_RELEASE}/codeql-${{matrix.platform}}.zip"
|
|
||||||
|
|
||||||
- name: Create Platform Package
|
|
||||||
# Replace the codeql-binaries with the platform specific ones
|
|
||||||
run: |
|
|
||||||
gunzip codeql-bundle.tar.gz
|
|
||||||
tar -f codeql-bundle.tar --delete codeql
|
|
||||||
unzip -q codeql-${{matrix.platform}}.zip
|
|
||||||
tar -f codeql-bundle.tar --append codeql
|
|
||||||
gzip codeql-bundle.tar
|
|
||||||
mv codeql-bundle.tar.gz codeql-bundle-${{matrix.platform}}.tar.gz
|
|
||||||
du -sh codeql-bundle-${{matrix.platform}}.tar.gz
|
|
||||||
|
|
||||||
- name: Upload Platform Package
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.save_url.outputs.upload_url }}
|
|
||||||
asset_path: ./codeql-bundle-${{matrix.platform}}.tar.gz
|
|
||||||
asset_name: codeql-bundle-${{matrix.platform}}.tar.gz
|
|
||||||
asset_content_type: application/tar+gzip
|
|
||||||
2
.github/workflows/update-release-branch.yml
vendored
2
.github/workflows/update-release-branch.yml
vendored
@@ -7,12 +7,10 @@ on:
|
|||||||
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
|
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
|
||||||
# Replace <token> with a personal access token from this page: https://github.com/settings/tokens
|
# Replace <token> with a personal access token from this page: https://github.com/settings/tokens
|
||||||
types: [update-release-branch]
|
types: [update-release-branch]
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
update:
|
update:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: ${{ github.repository == 'github/codeql-action' }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -1,43 +0,0 @@
|
|||||||
name: Update Supported Enterprise Server Versions
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: "0 0 * * *"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update-supported-enterprise-server-versions:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Setup Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: "3.7"
|
|
||||||
- name: Checkout CodeQL Action
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Checkout Enterprise Releases
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
repository: github/enterprise-releases
|
|
||||||
ssh-key: ${{ secrets.ENTERPRISE_RELEASES_SSH_KEY }}
|
|
||||||
path: ${{ github.workspace }}/enterprise-releases/
|
|
||||||
- name: Update Supported Enterprise Server Versions
|
|
||||||
run: |
|
|
||||||
cd ./.github/workflows/update-supported-enterprise-server-versions/
|
|
||||||
python3 -m pip install pipenv
|
|
||||||
pipenv install
|
|
||||||
pipenv run ./update.py
|
|
||||||
rm --recursive "$ENTERPRISE_RELEASES_PATH"
|
|
||||||
npm run build
|
|
||||||
env:
|
|
||||||
ENTERPRISE_RELEASES_PATH: ${{ github.workspace }}/enterprise-releases/
|
|
||||||
- name: Commit Changes
|
|
||||||
uses: peter-evans/create-pull-request@c7f493a8000b8aeb17a1332e326ba76b57cb83eb # v3.4.1
|
|
||||||
with:
|
|
||||||
commit-message: Update supported GitHub Enterprise Server versions.
|
|
||||||
title: Update supported GitHub Enterprise Server versions.
|
|
||||||
body: ""
|
|
||||||
author: GitHub <noreply@github.com>
|
|
||||||
branch: update-supported-enterprise-server-versions
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
[[source]]
|
|
||||||
name = "pypi"
|
|
||||||
url = "https://pypi.org/simple"
|
|
||||||
verify_ssl = true
|
|
||||||
|
|
||||||
[dev-packages]
|
|
||||||
|
|
||||||
[packages]
|
|
||||||
semver = "*"
|
|
||||||
27
.github/workflows/update-supported-enterprise-server-versions/Pipfile.lock
generated
vendored
27
.github/workflows/update-supported-enterprise-server-versions/Pipfile.lock
generated
vendored
@@ -1,27 +0,0 @@
|
|||||||
{
|
|
||||||
"_meta": {
|
|
||||||
"hash": {
|
|
||||||
"sha256": "e3ba923dcb4888e05de5448c18a732bf40197e80fabfa051a61c01b22c504879"
|
|
||||||
},
|
|
||||||
"pipfile-spec": 6,
|
|
||||||
"requires": {},
|
|
||||||
"sources": [
|
|
||||||
{
|
|
||||||
"name": "pypi",
|
|
||||||
"url": "https://pypi.org/simple",
|
|
||||||
"verify_ssl": true
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"default": {
|
|
||||||
"semver": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4",
|
|
||||||
"sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==2.13.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"develop": {}
|
|
||||||
}
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
import datetime
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import pathlib
|
|
||||||
|
|
||||||
import semver
|
|
||||||
|
|
||||||
_API_COMPATIBILITY_PATH = pathlib.Path(__file__).absolute().parents[3] / "src" / "api-compatibility.json"
|
|
||||||
_ENTERPRISE_RELEASES_PATH = pathlib.Path(os.environ["ENTERPRISE_RELEASES_PATH"])
|
|
||||||
_RELEASE_FILE_PATH = _ENTERPRISE_RELEASES_PATH / "releases.json"
|
|
||||||
_FIRST_SUPPORTED_RELEASE = semver.VersionInfo.parse("2.22.0") # Versions older than this did not include Code Scanning.
|
|
||||||
|
|
||||||
def main():
|
|
||||||
api_compatibility_data = json.loads(_API_COMPATIBILITY_PATH.read_text())
|
|
||||||
|
|
||||||
releases = json.loads(_RELEASE_FILE_PATH.read_text())
|
|
||||||
oldest_supported_release = None
|
|
||||||
newest_supported_release = semver.VersionInfo.parse(api_compatibility_data["maximumVersion"] + ".0")
|
|
||||||
|
|
||||||
for release_version_string, release_data in releases.items():
|
|
||||||
release_version = semver.VersionInfo.parse(release_version_string + ".0")
|
|
||||||
if release_version < _FIRST_SUPPORTED_RELEASE:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if release_version > newest_supported_release:
|
|
||||||
feature_freeze_date = datetime.date.fromisoformat(release_data["feature_freeze"])
|
|
||||||
if feature_freeze_date < datetime.date.today() + datetime.timedelta(weeks=2):
|
|
||||||
newest_supported_release = release_version
|
|
||||||
|
|
||||||
if oldest_supported_release is None or release_version < oldest_supported_release:
|
|
||||||
end_of_life_date = datetime.date.fromisoformat(release_data["end"])
|
|
||||||
if end_of_life_date > datetime.date.today():
|
|
||||||
oldest_supported_release = release_version
|
|
||||||
|
|
||||||
api_compatibility_data = {
|
|
||||||
"minimumVersion": f"{oldest_supported_release.major}.{oldest_supported_release.minor}",
|
|
||||||
"maximumVersion": f"{newest_supported_release.major}.{newest_supported_release.minor}",
|
|
||||||
}
|
|
||||||
_API_COMPATIBILITY_PATH.write_text(json.dumps(api_compatibility_data, sort_keys=True) + "\n")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,2 +0,0 @@
|
|||||||
/runner/dist/
|
|
||||||
/runner/node_modules/
|
|
||||||
25
.vscode/launch.json
vendored
25
.vscode/launch.json
vendored
@@ -1,25 +0,0 @@
|
|||||||
{
|
|
||||||
// Use IntelliSense to learn about possible attributes.
|
|
||||||
// Hover to view descriptions of existing attributes.
|
|
||||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
|
||||||
"version": "0.2.0",
|
|
||||||
"configurations": [
|
|
||||||
{
|
|
||||||
"type": "node",
|
|
||||||
"request": "launch",
|
|
||||||
"name": "Debug AVA test file",
|
|
||||||
"runtimeExecutable": "${workspaceFolder}/node_modules/.bin/ava",
|
|
||||||
"runtimeArgs": [
|
|
||||||
"${file}",
|
|
||||||
"--break",
|
|
||||||
"--serial",
|
|
||||||
"--timeout=20m"
|
|
||||||
],
|
|
||||||
"port": 9229,
|
|
||||||
"outputCapture": "std",
|
|
||||||
"skipFiles": [
|
|
||||||
"<node_internals>/**/*.js"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
10
.vscode/settings.json
vendored
10
.vscode/settings.json
vendored
@@ -1,10 +0,0 @@
|
|||||||
{
|
|
||||||
"files.exclude": {
|
|
||||||
// include the defaults from VS Code
|
|
||||||
"**/.git": true,
|
|
||||||
"**/.DS_Store": true,
|
|
||||||
|
|
||||||
// transpiled JavaScript
|
|
||||||
"lib": true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
# Contributing
|
## Contributing
|
||||||
|
|
||||||
[fork]: https://github.com/github/codeql-action/fork
|
[fork]: https://github.com/github/codeql-action/fork
|
||||||
[pr]: https://github.com/github/codeql-action/compare
|
[pr]: https://github.com/github/codeql-action/compare
|
||||||
@@ -10,59 +10,13 @@ Contributions to this project are [released](https://help.github.com/articles/gi
|
|||||||
|
|
||||||
Please note that this project is released with a [Contributor Code of Conduct][code-of-conduct]. By participating in this project you agree to abide by its terms.
|
Please note that this project is released with a [Contributor Code of Conduct][code-of-conduct]. By participating in this project you agree to abide by its terms.
|
||||||
|
|
||||||
## Development and Testing
|
|
||||||
|
|
||||||
Before you start, ensure that you have a recent version of node installed. You can see which version of node is used by the action in `init/action.yml`.
|
|
||||||
|
|
||||||
### Common tasks
|
|
||||||
|
|
||||||
* Transpile the TypeScript to JavaScript: `npm run build`. Note that the JavaScript files are committed to git.
|
|
||||||
* Run tests: `npm run test`. You’ll need to ensure that the JavaScript files are up-to-date first by running the command above.
|
|
||||||
* Run the linter: `npm run lint`.
|
|
||||||
|
|
||||||
This project also includes configuration to run tests from VSCode (with support for breakpoints) - open the test file you wish to run and choose "Debug AVA test file" from the Run menu in the Run panel.
|
|
||||||
|
|
||||||
### Running the action
|
|
||||||
|
|
||||||
To see the effect of your changes and to test them, push your changes in a branch and then look at the [Actions output](https://github.com/github/codeql-action/actions) for that branch. You can also exercise the code locally by running the automated tests.
|
|
||||||
|
|
||||||
### Running the action locally
|
|
||||||
|
|
||||||
It is possible to run this action locally via [act](https://github.com/nektos/act) via the following steps:
|
|
||||||
|
|
||||||
1. Create a GitHub [Personal Access Token](https://github.com/settings/tokens) (PAT).
|
|
||||||
1. Install [act](https://github.com/nektos/act) v0.2.10 or greater.
|
|
||||||
1. Add a `.env` file in the root of the project you are running:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
CODEQL_LOCAL_RUN=true
|
|
||||||
GITHUB_SERVER_URL=https://github.com
|
|
||||||
|
|
||||||
# Optional, for better logging
|
|
||||||
GITHUB_JOB=<ANY_JOB_NAME>
|
|
||||||
```
|
|
||||||
|
|
||||||
1. Run `act -j codeql -s GITHUB_TOKEN=<PAT>`
|
|
||||||
|
|
||||||
Running locally will generate the CodeQL database and run all the queries, but it will avoid uploading and reporting results to GitHub. Note that this must be done on a repository that _consumes_ this action, not this repository. The use case is to debug failures of this action on specific repositories.
|
|
||||||
|
|
||||||
### Integration tests
|
|
||||||
|
|
||||||
As well as the unit tests (see _Common tasks_ above), there are integration tests, defined in `.github/workflows/integration-testing.yml`. These are run by a CI check. Depending on the change you’re making, you may want to add a test to this file or extend an existing one.
|
|
||||||
|
|
||||||
### Building the CodeQL runner
|
|
||||||
|
|
||||||
Navigate to the `runner` directory and run `npm install` to install dependencies needed only for compiling the CodeQL runner. Run `npm run build-runner` to output files to the `runner/dist` directory.
|
|
||||||
|
|
||||||
## Submitting a pull request
|
## Submitting a pull request
|
||||||
|
|
||||||
1. [Fork][fork] and clone the repository
|
1. [Fork][fork] and clone the repository
|
||||||
2. Create a new branch: `git checkout -b my-branch-name`
|
2. Create a new branch: `git checkout -b my-branch-name`
|
||||||
3. Make your change, add tests, and make sure the tests still pass
|
3. Make your change, add tests, and make sure the tests still pass
|
||||||
4. Push to your fork and [submit a pull request][pr]
|
4. Push to your fork and [submit a pull request][pr]
|
||||||
5. Pat yourself on the back and wait for your pull request to be reviewed and merged.
|
5. Pat your self on the back and wait for your pull request to be reviewed and merged.
|
||||||
|
|
||||||
If you're a GitHub staff member, you can merge your own PR once it's approved; for external contributors, GitHub staff will merge your PR once it's approved.
|
|
||||||
|
|
||||||
Here are a few things you can do that will increase the likelihood of your pull request being accepted:
|
Here are a few things you can do that will increase the likelihood of your pull request being accepted:
|
||||||
|
|
||||||
|
|||||||
53
README.md
53
README.md
@@ -22,25 +22,31 @@ on:
|
|||||||
push:
|
push:
|
||||||
pull_request:
|
pull_request:
|
||||||
schedule:
|
schedule:
|
||||||
# ┌───────────── minute (0 - 59)
|
- cron: '0 0 * * 0'
|
||||||
# │ ┌───────────── hour (0 - 23)
|
|
||||||
# │ │ ┌───────────── day of the month (1 - 31)
|
|
||||||
# │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
|
|
||||||
# │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
|
|
||||||
# │ │ │ │ │
|
|
||||||
# │ │ │ │ │
|
|
||||||
# │ │ │ │ │
|
|
||||||
# * * * * *
|
|
||||||
- cron: '30 1 * * 0'
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
CodeQL-Build:
|
CodeQL-Build:
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
|
||||||
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
# Must fetch at least the immediate parents so that if this is
|
||||||
|
# a pull request then we can checkout the head of the pull request.
|
||||||
|
# Only include this option if you are running this workflow on pull requests.
|
||||||
|
fetch-depth: 2
|
||||||
|
|
||||||
|
# If this run was triggered by a pull request event then checkout
|
||||||
|
# the head of the pull request instead of the merge commit.
|
||||||
|
# Only include this step if you are running this workflow on pull requests.
|
||||||
|
- run: git checkout HEAD^2
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
@@ -96,32 +102,7 @@ Use the `config-file` parameter of the `init` action to enable the configuration
|
|||||||
config-file: ./.github/codeql/codeql-config.yml
|
config-file: ./.github/codeql/codeql-config.yml
|
||||||
```
|
```
|
||||||
|
|
||||||
The configuration file can be located in a different repository. This is useful if you want to share the same configuration across multiple repositories. If the configuration file is in a private repository you can also specify an `external-repository-token` option. This should be a personal access token that has read access to any repositories containing referenced config files and queries.
|
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration)."
|
||||||
|
|
||||||
```yaml
|
|
||||||
- uses: github/codeql-action/init@v1
|
|
||||||
with:
|
|
||||||
config-file: owner/repo/codeql-config.yml@branch
|
|
||||||
external-repository-token: ${{ secrets.EXTERNAL_REPOSITORY_TOKEN }}
|
|
||||||
```
|
|
||||||
|
|
||||||
For information on how to write a configuration file, see "[Using a custom configuration file](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration-file)."
|
|
||||||
|
|
||||||
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- uses: github/codeql-action/init@v1
|
|
||||||
with:
|
|
||||||
queries: <local-or-remote-query>,<another-query>
|
|
||||||
```
|
|
||||||
|
|
||||||
By default, this will override any queries specified in a config file. If you wish to use both sets of queries, prefix the list of queries in the workflow with `+`:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- uses: github/codeql-action/init@v1
|
|
||||||
with:
|
|
||||||
queries: +<local-or-remote-query>,<another-query>
|
|
||||||
```
|
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ author: 'GitHub'
|
|||||||
inputs:
|
inputs:
|
||||||
check_name:
|
check_name:
|
||||||
description: The name of the check run to add text to.
|
description: The name of the check run to add text to.
|
||||||
required: false
|
|
||||||
output:
|
output:
|
||||||
description: The path of the directory in which to save the SARIF results
|
description: The path of the directory in which to save the SARIF results
|
||||||
required: false
|
required: false
|
||||||
@@ -12,25 +11,14 @@ inputs:
|
|||||||
upload:
|
upload:
|
||||||
description: Upload the SARIF file
|
description: Upload the SARIF file
|
||||||
required: false
|
required: false
|
||||||
default: "true"
|
default: true
|
||||||
ram:
|
ram:
|
||||||
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
|
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
|
||||||
required: false
|
required: false
|
||||||
add-snippets:
|
|
||||||
description: Specify whether or not to add code snippets to the output sarif file.
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
threads:
|
|
||||||
description: The number of threads to be used by CodeQL.
|
|
||||||
required: false
|
|
||||||
checkout_path:
|
|
||||||
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
|
|
||||||
required: false
|
|
||||||
default: ${{ github.workspace }}
|
|
||||||
token:
|
token:
|
||||||
default: ${{ github.token }}
|
default: ${{ github.token }}
|
||||||
matrix:
|
matrix:
|
||||||
default: ${{ toJson(matrix) }}
|
default: ${{ toJson(matrix) }}
|
||||||
runs:
|
runs:
|
||||||
using: 'node12'
|
using: 'node12'
|
||||||
main: '../lib/analyze-action.js'
|
main: '../lib/finalize-db.js'
|
||||||
|
|||||||
@@ -8,4 +8,4 @@ inputs:
|
|||||||
default: ${{ toJson(matrix) }}
|
default: ${{ toJson(matrix) }}
|
||||||
runs:
|
runs:
|
||||||
using: 'node12'
|
using: 'node12'
|
||||||
main: '../lib/autobuild-action.js'
|
main: '../lib/autobuild.js'
|
||||||
@@ -1,11 +1,11 @@
|
|||||||
name: 'CodeQL: Init'
|
name: 'CodeQL: Init'
|
||||||
description: 'Set up CodeQL'
|
description: 'Setup the CodeQL tracer'
|
||||||
author: 'GitHub'
|
author: 'GitHub'
|
||||||
inputs:
|
inputs:
|
||||||
tools:
|
tools:
|
||||||
description: URL of CodeQL tools
|
description: URL of CodeQL tools
|
||||||
required: false
|
required: false
|
||||||
# If not specified the Action will check in several places until it finds the CodeQL tools.
|
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200601/codeql-bundle.tar.gz
|
||||||
languages:
|
languages:
|
||||||
description: The languages to be analysed
|
description: The languages to be analysed
|
||||||
required: false
|
required: false
|
||||||
@@ -16,19 +16,6 @@ inputs:
|
|||||||
config-file:
|
config-file:
|
||||||
description: Path of the config file to use
|
description: Path of the config file to use
|
||||||
required: false
|
required: false
|
||||||
queries:
|
|
||||||
description: Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file; prefix with "+" to use both sets of queries.
|
|
||||||
required: false
|
|
||||||
external-repository-token:
|
|
||||||
description: A token for fetching external config files and queries if they reside in a private repository.
|
|
||||||
required: false
|
|
||||||
setup-python-dependencies:
|
|
||||||
description: Try to auto-install your python dependencies
|
|
||||||
required: true
|
|
||||||
default: 'true'
|
|
||||||
outputs:
|
|
||||||
codeql-path:
|
|
||||||
description: The path of the CodeQL binary used for analysis
|
|
||||||
runs:
|
runs:
|
||||||
using: 'node12'
|
using: 'node12'
|
||||||
main: '../lib/init-action.js'
|
main: '../lib/setup-tracer.js'
|
||||||
|
|||||||
542
lib/actions-util.js
generated
542
lib/actions-util.js
generated
@@ -1,542 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const fs = __importStar(require("fs"));
|
|
||||||
const path = __importStar(require("path"));
|
|
||||||
const core = __importStar(require("@actions/core"));
|
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
|
||||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
|
||||||
const yaml = __importStar(require("js-yaml"));
|
|
||||||
const api = __importStar(require("./api-client"));
|
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
|
||||||
const util_1 = require("./util");
|
|
||||||
/**
|
|
||||||
* Wrapper around core.getInput for inputs that always have a value.
|
|
||||||
* Also see getOptionalInput.
|
|
||||||
*
|
|
||||||
* This allows us to get stronger type checking of required/optional inputs
|
|
||||||
* and make behaviour more consistent between actions and the runner.
|
|
||||||
*/
|
|
||||||
function getRequiredInput(name) {
|
|
||||||
return core.getInput(name, { required: true });
|
|
||||||
}
|
|
||||||
exports.getRequiredInput = getRequiredInput;
|
|
||||||
/**
|
|
||||||
* Wrapper around core.getInput that converts empty inputs to undefined.
|
|
||||||
* Also see getRequiredInput.
|
|
||||||
*
|
|
||||||
* This allows us to get stronger type checking of required/optional inputs
|
|
||||||
* and make behaviour more consistent between actions and the runner.
|
|
||||||
*/
|
|
||||||
function getOptionalInput(name) {
|
|
||||||
const value = core.getInput(name);
|
|
||||||
return value.length > 0 ? value : undefined;
|
|
||||||
}
|
|
||||||
exports.getOptionalInput = getOptionalInput;
|
|
||||||
/**
|
|
||||||
* Get an environment parameter, but throw an error if it is not set.
|
|
||||||
*/
|
|
||||||
function getRequiredEnvParam(paramName) {
|
|
||||||
const value = process.env[paramName];
|
|
||||||
if (value === undefined || value.length === 0) {
|
|
||||||
throw new Error(`${paramName} environment variable must be set`);
|
|
||||||
}
|
|
||||||
core.debug(`${paramName}=${value}`);
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
exports.getRequiredEnvParam = getRequiredEnvParam;
|
|
||||||
function getTemporaryDirectory() {
|
|
||||||
const value = process.env["CODEQL_ACTION_TEMP"];
|
|
||||||
return value !== undefined && value !== ""
|
|
||||||
? value
|
|
||||||
: getRequiredEnvParam("RUNNER_TEMP");
|
|
||||||
}
|
|
||||||
exports.getTemporaryDirectory = getTemporaryDirectory;
|
|
||||||
/**
|
|
||||||
* Ensures all required environment variables are set in the context of a local run.
|
|
||||||
*/
|
|
||||||
function prepareLocalRunEnvironment() {
|
|
||||||
if (!util_1.isLocalRun()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
core.debug("Action is running locally.");
|
|
||||||
if (!process.env.GITHUB_JOB) {
|
|
||||||
core.exportVariable("GITHUB_JOB", "UNKNOWN-JOB");
|
|
||||||
}
|
|
||||||
if (!process.env.CODEQL_ACTION_ANALYSIS_KEY) {
|
|
||||||
core.exportVariable("CODEQL_ACTION_ANALYSIS_KEY", `LOCAL-RUN:${process.env.GITHUB_JOB}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.prepareLocalRunEnvironment = prepareLocalRunEnvironment;
|
|
||||||
/**
|
|
||||||
* Gets the SHA of the commit that is currently checked out.
|
|
||||||
*/
|
|
||||||
exports.getCommitOid = async function (ref = "HEAD") {
|
|
||||||
// Try to use git to get the current commit SHA. If that fails then
|
|
||||||
// log but otherwise silently fall back to using the SHA from the environment.
|
|
||||||
// The only time these two values will differ is during analysis of a PR when
|
|
||||||
// the workflow has changed the current commit to the head commit instead of
|
|
||||||
// the merge commit, which must mean that git is available.
|
|
||||||
// Even if this does go wrong, it's not a huge problem for the alerts to
|
|
||||||
// reported on the merge commit.
|
|
||||||
try {
|
|
||||||
let commitOid = "";
|
|
||||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), ["rev-parse", ref], {
|
|
||||||
silent: true,
|
|
||||||
listeners: {
|
|
||||||
stdout: (data) => {
|
|
||||||
commitOid += data.toString();
|
|
||||||
},
|
|
||||||
stderr: (data) => {
|
|
||||||
process.stderr.write(data);
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}).exec();
|
|
||||||
return commitOid.trim();
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
|
|
||||||
return getRequiredEnvParam("GITHUB_SHA");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
function isObject(o) {
|
|
||||||
return o !== null && typeof o === "object";
|
|
||||||
}
|
|
||||||
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
|
|
||||||
function escapeRegExp(string) {
|
|
||||||
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
|
||||||
}
|
|
||||||
function patternToRegExp(value) {
|
|
||||||
return new RegExp(`^${value
|
|
||||||
.toString()
|
|
||||||
.split(GLOB_PATTERN)
|
|
||||||
.reduce(function (arr, cur) {
|
|
||||||
if (cur === "**") {
|
|
||||||
arr.push(".*?");
|
|
||||||
}
|
|
||||||
else if (cur === "*") {
|
|
||||||
arr.push("[^/]*?");
|
|
||||||
}
|
|
||||||
else if (cur) {
|
|
||||||
arr.push(escapeRegExp(cur));
|
|
||||||
}
|
|
||||||
return arr;
|
|
||||||
}, [])
|
|
||||||
.join("")}$`);
|
|
||||||
}
|
|
||||||
// this function should return true if patternA is a superset of patternB
|
|
||||||
// e.g: * is a superset of main-* but main-* is not a superset of *.
|
|
||||||
function patternIsSuperset(patternA, patternB) {
|
|
||||||
return patternToRegExp(patternA).test(patternB);
|
|
||||||
}
|
|
||||||
exports.patternIsSuperset = patternIsSuperset;
|
|
||||||
function branchesToArray(branches) {
|
|
||||||
if (typeof branches === "string") {
|
|
||||||
return [branches];
|
|
||||||
}
|
|
||||||
if (Array.isArray(branches)) {
|
|
||||||
if (branches.length === 0) {
|
|
||||||
return "**";
|
|
||||||
}
|
|
||||||
return branches;
|
|
||||||
}
|
|
||||||
return "**";
|
|
||||||
}
|
|
||||||
function toCodedErrors(errors) {
|
|
||||||
return Object.entries(errors).reduce((acc, [key, value]) => {
|
|
||||||
acc[key] = { message: value, code: key };
|
|
||||||
return acc;
|
|
||||||
}, {});
|
|
||||||
}
|
|
||||||
// code to send back via status report
|
|
||||||
// message to add as a warning annotation to the run
|
|
||||||
exports.WorkflowErrors = toCodedErrors({
|
|
||||||
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
|
||||||
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
|
||||||
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
|
||||||
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
|
||||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
|
||||||
});
|
|
||||||
function getWorkflowErrors(doc) {
|
|
||||||
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
||||||
const errors = [];
|
|
||||||
const jobName = process.env.GITHUB_JOB;
|
|
||||||
if (jobName) {
|
|
||||||
const job = (_b = (_a = doc) === null || _a === void 0 ? void 0 : _a.jobs) === null || _b === void 0 ? void 0 : _b[jobName];
|
|
||||||
const steps = (_c = job) === null || _c === void 0 ? void 0 : _c.steps;
|
|
||||||
if (Array.isArray(steps)) {
|
|
||||||
for (const step of steps) {
|
|
||||||
// this was advice that we used to give in the README
|
|
||||||
// we actually want to run the analysis on the merge commit
|
|
||||||
// to produce results that are more inline with expectations
|
|
||||||
// (i.e: this is what will happen if you merge this PR)
|
|
||||||
// and avoid some race conditions
|
|
||||||
if (((_d = step) === null || _d === void 0 ? void 0 : _d.run) === "git checkout HEAD^2") {
|
|
||||||
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let missingPush = false;
|
|
||||||
if (doc.on === undefined) {
|
|
||||||
// this is not a valid config
|
|
||||||
}
|
|
||||||
else if (typeof doc.on === "string") {
|
|
||||||
if (doc.on === "pull_request") {
|
|
||||||
missingPush = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (Array.isArray(doc.on)) {
|
|
||||||
const hasPush = doc.on.includes("push");
|
|
||||||
const hasPullRequest = doc.on.includes("pull_request");
|
|
||||||
if (hasPullRequest && !hasPush) {
|
|
||||||
missingPush = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (isObject(doc.on)) {
|
|
||||||
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
|
||||||
const hasPullRequest = Object.prototype.hasOwnProperty.call(doc.on, "pull_request");
|
|
||||||
if (!hasPush && hasPullRequest) {
|
|
||||||
missingPush = true;
|
|
||||||
}
|
|
||||||
if (hasPush && hasPullRequest) {
|
|
||||||
const paths = (_e = doc.on.push) === null || _e === void 0 ? void 0 : _e.paths;
|
|
||||||
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
|
||||||
// if they didn't change any files
|
|
||||||
// currently we cannot go back through the history and find the most recent baseline
|
|
||||||
if (Array.isArray(paths) && paths.length > 0) {
|
|
||||||
errors.push(exports.WorkflowErrors.PathsSpecified);
|
|
||||||
}
|
|
||||||
const pathsIgnore = (_f = doc.on.push) === null || _f === void 0 ? void 0 : _f["paths-ignore"];
|
|
||||||
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
|
||||||
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// if doc.on.pull_request is null that means 'all branches'
|
|
||||||
// if doc.on.pull_request is undefined that means 'off'
|
|
||||||
// we only want to check for mismatched branches if pull_request is on.
|
|
||||||
if (doc.on.pull_request !== undefined) {
|
|
||||||
const push = branchesToArray((_g = doc.on.push) === null || _g === void 0 ? void 0 : _g.branches);
|
|
||||||
if (push !== "**") {
|
|
||||||
const pull_request = branchesToArray((_h = doc.on.pull_request) === null || _h === void 0 ? void 0 : _h.branches);
|
|
||||||
if (pull_request !== "**") {
|
|
||||||
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
|
|
||||||
if (difference.length > 0) {
|
|
||||||
// there are branches in pull_request that may not have a baseline
|
|
||||||
// because we are not building them on push
|
|
||||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (push.length > 0) {
|
|
||||||
// push is set up to run on a subset of branches
|
|
||||||
// and you could open a PR against a branch with no baseline
|
|
||||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (missingPush) {
|
|
||||||
errors.push(exports.WorkflowErrors.MissingPushHook);
|
|
||||||
}
|
|
||||||
return errors;
|
|
||||||
}
|
|
||||||
exports.getWorkflowErrors = getWorkflowErrors;
|
|
||||||
async function validateWorkflow() {
|
|
||||||
let workflow;
|
|
||||||
try {
|
|
||||||
workflow = await getWorkflow();
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
return `error: getWorkflow() failed: ${e.toString()}`;
|
|
||||||
}
|
|
||||||
let workflowErrors;
|
|
||||||
try {
|
|
||||||
workflowErrors = getWorkflowErrors(workflow);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
return `error: getWorkflowErrors() failed: ${e.toString()}`;
|
|
||||||
}
|
|
||||||
if (workflowErrors.length > 0) {
|
|
||||||
let message;
|
|
||||||
try {
|
|
||||||
message = formatWorkflowErrors(workflowErrors);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
return `error: formatWorkflowErrors() failed: ${e.toString()}`;
|
|
||||||
}
|
|
||||||
core.warning(message);
|
|
||||||
}
|
|
||||||
return formatWorkflowCause(workflowErrors);
|
|
||||||
}
|
|
||||||
exports.validateWorkflow = validateWorkflow;
|
|
||||||
function formatWorkflowErrors(errors) {
|
|
||||||
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
|
||||||
const errorsList = errors.map((e) => e.message).join(" ");
|
|
||||||
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
|
|
||||||
}
|
|
||||||
exports.formatWorkflowErrors = formatWorkflowErrors;
|
|
||||||
function formatWorkflowCause(errors) {
|
|
||||||
if (errors.length === 0) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return errors.map((e) => e.code).join(",");
|
|
||||||
}
|
|
||||||
exports.formatWorkflowCause = formatWorkflowCause;
|
|
||||||
async function getWorkflow() {
|
|
||||||
const relativePath = await getWorkflowPath();
|
|
||||||
const absolutePath = path.join(getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath);
|
|
||||||
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
|
|
||||||
}
|
|
||||||
exports.getWorkflow = getWorkflow;
|
|
||||||
/**
|
|
||||||
* Get the path of the currently executing workflow.
|
|
||||||
*/
|
|
||||||
async function getWorkflowPath() {
|
|
||||||
if (util_1.isLocalRun()) {
|
|
||||||
return getRequiredEnvParam("WORKFLOW_PATH");
|
|
||||||
}
|
|
||||||
const repo_nwo = getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
|
||||||
const owner = repo_nwo[0];
|
|
||||||
const repo = repo_nwo[1];
|
|
||||||
const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID"));
|
|
||||||
const apiClient = api.getActionsApiClient();
|
|
||||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
run_id,
|
|
||||||
});
|
|
||||||
const workflowUrl = runsResponse.data.workflow_url;
|
|
||||||
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
|
||||||
return workflowResponse.data.path;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get the workflow run ID.
|
|
||||||
*/
|
|
||||||
function getWorkflowRunID() {
|
|
||||||
const workflowRunID = parseInt(getRequiredEnvParam("GITHUB_RUN_ID"), 10);
|
|
||||||
if (Number.isNaN(workflowRunID)) {
|
|
||||||
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
|
||||||
}
|
|
||||||
return workflowRunID;
|
|
||||||
}
|
|
||||||
exports.getWorkflowRunID = getWorkflowRunID;
|
|
||||||
/**
|
|
||||||
* Get the analysis key parameter for the current job.
|
|
||||||
*
|
|
||||||
* This will combine the workflow path and current job name.
|
|
||||||
* Computing this the first time requires making requests to
|
|
||||||
* the github API, but after that the result will be cached.
|
|
||||||
*/
|
|
||||||
async function getAnalysisKey() {
|
|
||||||
const analysisKeyEnvVar = "CODEQL_ACTION_ANALYSIS_KEY";
|
|
||||||
let analysisKey = process.env[analysisKeyEnvVar];
|
|
||||||
if (analysisKey !== undefined) {
|
|
||||||
return analysisKey;
|
|
||||||
}
|
|
||||||
const workflowPath = await getWorkflowPath();
|
|
||||||
const jobName = getRequiredEnvParam("GITHUB_JOB");
|
|
||||||
analysisKey = `${workflowPath}:${jobName}`;
|
|
||||||
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
|
||||||
return analysisKey;
|
|
||||||
}
|
|
||||||
exports.getAnalysisKey = getAnalysisKey;
|
|
||||||
/**
|
|
||||||
* Get the ref currently being analyzed.
|
|
||||||
*/
|
|
||||||
async function getRef() {
|
|
||||||
// Will be in the form "refs/heads/master" on a push event
|
|
||||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
|
||||||
const ref = getRequiredEnvParam("GITHUB_REF");
|
|
||||||
const sha = getRequiredEnvParam("GITHUB_SHA");
|
|
||||||
// For pull request refs we want to detect whether the workflow
|
|
||||||
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
|
||||||
// than the 'merge' ref. If so, we want to convert the ref that
|
|
||||||
// we report back.
|
|
||||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
|
||||||
if (!pull_ref_regex.test(ref)) {
|
|
||||||
return ref;
|
|
||||||
}
|
|
||||||
const head = await exports.getCommitOid("HEAD");
|
|
||||||
// in actions/checkout@v2 we can check if git rev-parse HEAD == GITHUB_SHA
|
|
||||||
// in actions/checkout@v1 this may not be true as it checks out the repository
|
|
||||||
// using GITHUB_REF. There is a subtle race condition where
|
|
||||||
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
|
||||||
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
|
|
||||||
const hasChangedRef = sha !== head &&
|
|
||||||
(await exports.getCommitOid(ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !==
|
|
||||||
head;
|
|
||||||
if (hasChangedRef) {
|
|
||||||
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
|
||||||
core.debug(`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`);
|
|
||||||
return newRef;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return ref;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.getRef = getRef;
|
|
||||||
/**
|
|
||||||
* Compose a StatusReport.
|
|
||||||
*
|
|
||||||
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
|
||||||
* @param status The status. Must be 'success', 'failure', or 'starting'
|
|
||||||
* @param startedAt The time this action started executing.
|
|
||||||
* @param cause Cause of failure (only supply if status is 'failure')
|
|
||||||
* @param exception Exception (only supply if status is 'failure')
|
|
||||||
*/
|
|
||||||
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
|
|
||||||
const commitOid = process.env["GITHUB_SHA"] || "";
|
|
||||||
const ref = await getRef();
|
|
||||||
const workflowRunIDStr = process.env["GITHUB_RUN_ID"];
|
|
||||||
let workflowRunID = -1;
|
|
||||||
if (workflowRunIDStr) {
|
|
||||||
workflowRunID = parseInt(workflowRunIDStr, 10);
|
|
||||||
}
|
|
||||||
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
|
|
||||||
const jobName = process.env["GITHUB_JOB"] || "";
|
|
||||||
const analysis_key = await getAnalysisKey();
|
|
||||||
let workflowStartedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
|
|
||||||
if (workflowStartedAt === undefined) {
|
|
||||||
workflowStartedAt = actionStartedAt.toISOString();
|
|
||||||
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
|
|
||||||
}
|
|
||||||
// If running locally then the GITHUB_ACTION_REF cannot be trusted as it may be for the previous action
|
|
||||||
// See https://github.com/actions/runner/issues/803
|
|
||||||
const actionRef = isRunningLocalAction()
|
|
||||||
? undefined
|
|
||||||
: process.env["GITHUB_ACTION_REF"];
|
|
||||||
const statusReport = {
|
|
||||||
workflow_run_id: workflowRunID,
|
|
||||||
workflow_name: workflowName,
|
|
||||||
job_name: jobName,
|
|
||||||
analysis_key,
|
|
||||||
commit_oid: commitOid,
|
|
||||||
ref,
|
|
||||||
action_name: actionName,
|
|
||||||
action_ref: actionRef,
|
|
||||||
action_oid: "unknown",
|
|
||||||
started_at: workflowStartedAt,
|
|
||||||
action_started_at: actionStartedAt.toISOString(),
|
|
||||||
status,
|
|
||||||
};
|
|
||||||
// Add optional parameters
|
|
||||||
if (cause) {
|
|
||||||
statusReport.cause = cause;
|
|
||||||
}
|
|
||||||
if (exception) {
|
|
||||||
statusReport.exception = exception;
|
|
||||||
}
|
|
||||||
if (status === "success" || status === "failure" || status === "aborted") {
|
|
||||||
statusReport.completed_at = new Date().toISOString();
|
|
||||||
}
|
|
||||||
const matrix = getRequiredInput("matrix");
|
|
||||||
if (matrix) {
|
|
||||||
statusReport.matrix_vars = matrix;
|
|
||||||
}
|
|
||||||
return statusReport;
|
|
||||||
}
|
|
||||||
exports.createStatusReportBase = createStatusReportBase;
|
|
||||||
function isHTTPError(arg) {
|
|
||||||
var _a;
|
|
||||||
return ((_a = arg) === null || _a === void 0 ? void 0 : _a.status) !== undefined && Number.isInteger(arg.status);
|
|
||||||
}
|
|
||||||
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
|
|
||||||
const GENERIC_404_MSG = "Not authorized to used the CodeQL code scanning feature on this repo.";
|
|
||||||
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
|
||||||
const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
|
|
||||||
/**
|
|
||||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
|
||||||
*
|
|
||||||
* Optionally checks the response from the API endpoint and sets the action
|
|
||||||
* as failed if the status report failed. This is only expected to be used
|
|
||||||
* when sending a 'starting' report.
|
|
||||||
*
|
|
||||||
* Returns whether sending the status report was successful of not.
|
|
||||||
*/
|
|
||||||
async function sendStatusReport(statusReport) {
|
|
||||||
if (util_1.isLocalRun()) {
|
|
||||||
core.debug("Not sending status report because this is a local run");
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
const statusReportJSON = JSON.stringify(statusReport);
|
|
||||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
|
||||||
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
|
|
||||||
const [owner, repo] = nwo.split("/");
|
|
||||||
const client = api.getActionsApiClient();
|
|
||||||
try {
|
|
||||||
await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
data: statusReportJSON,
|
|
||||||
});
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
console.log(e);
|
|
||||||
if (isHTTPError(e)) {
|
|
||||||
switch (e.status) {
|
|
||||||
case 403:
|
|
||||||
if (workflowIsTriggeredByPushEvent() && isDependabotActor()) {
|
|
||||||
core.setFailed('Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
|
||||||
"Uploading Code Scanning results requires write access. " +
|
|
||||||
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
|
||||||
"See https://docs.github.com/en/code-security/secure-coding/configuring-code-scanning#scanning-on-push for more information on how to configure these events.");
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
core.setFailed(e.message || GENERIC_403_MSG);
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
case 404:
|
|
||||||
core.setFailed(GENERIC_404_MSG);
|
|
||||||
return false;
|
|
||||||
case 422:
|
|
||||||
// schema incompatibility when reporting status
|
|
||||||
// this means that this action version is no longer compatible with the API
|
|
||||||
// we still want to continue as it is likely the analysis endpoint will work
|
|
||||||
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
|
|
||||||
core.debug(INCOMPATIBLE_MSG);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
core.debug(OUT_OF_DATE_MSG);
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// something else has gone wrong and the request/response will be logged by octokit
|
|
||||||
// it's possible this is a transient error and we should continue scanning
|
|
||||||
core.error("An unexpected error occurred when sending code scanning status report.");
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.sendStatusReport = sendStatusReport;
|
|
||||||
// Was the workflow run triggered by a `push` event, for example as opposed to a `pull_request` event.
|
|
||||||
function workflowIsTriggeredByPushEvent() {
|
|
||||||
return process.env["GITHUB_EVENT_NAME"] === "push";
|
|
||||||
}
|
|
||||||
// Is dependabot the actor that triggered the current workflow run.
|
|
||||||
function isDependabotActor() {
|
|
||||||
return process.env["GITHUB_ACTOR"] === "dependabot[bot]";
|
|
||||||
}
|
|
||||||
// Is the current action executing a local copy (i.e. we're running a workflow on the codeql-action repo itself)
|
|
||||||
// as opposed to running a remote action (i.e. when another repo references us)
|
|
||||||
function isRunningLocalAction() {
|
|
||||||
const relativeScriptPath = getRelativeScriptPath();
|
|
||||||
return (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath));
|
|
||||||
}
|
|
||||||
exports.isRunningLocalAction = isRunningLocalAction;
|
|
||||||
// Get the location where the action is running from.
|
|
||||||
// This can be used to get the actions name or tell if we're running a local action.
|
|
||||||
function getRelativeScriptPath() {
|
|
||||||
const runnerTemp = getRequiredEnvParam("RUNNER_TEMP");
|
|
||||||
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
|
|
||||||
return path.relative(actionsDirectory, __filename);
|
|
||||||
}
|
|
||||||
exports.getRelativeScriptPath = getRelativeScriptPath;
|
|
||||||
//# sourceMappingURL=actions-util.js.map
|
|
||||||
File diff suppressed because one or more lines are too long
419
lib/actions-util.test.js
generated
419
lib/actions-util.test.js
generated
@@ -1,419 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const ava_1 = __importDefault(require("ava"));
|
|
||||||
const yaml = __importStar(require("js-yaml"));
|
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
|
||||||
const actionsutil = __importStar(require("./actions-util"));
|
|
||||||
const testing_utils_1 = require("./testing-utils");
|
|
||||||
function errorCodes(actual, expected) {
|
|
||||||
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
|
||||||
}
|
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
|
||||||
ava_1.default("getRef() throws on the empty string", async (t) => {
|
|
||||||
process.env["GITHUB_REF"] = "";
|
|
||||||
await t.throwsAsync(actionsutil.getRef);
|
|
||||||
});
|
|
||||||
ava_1.default("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
|
||||||
const expectedRef = "refs/pull/1/merge";
|
|
||||||
const currentSha = "a".repeat(40);
|
|
||||||
process.env["GITHUB_REF"] = expectedRef;
|
|
||||||
process.env["GITHUB_SHA"] = currentSha;
|
|
||||||
const callback = sinon_1.default.stub(actionsutil, "getCommitOid");
|
|
||||||
callback.withArgs("HEAD").resolves(currentSha);
|
|
||||||
const actualRef = await actionsutil.getRef();
|
|
||||||
t.deepEqual(actualRef, expectedRef);
|
|
||||||
callback.restore();
|
|
||||||
});
|
|
||||||
ava_1.default("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
|
||||||
const expectedRef = "refs/pull/1/merge";
|
|
||||||
process.env["GITHUB_REF"] = expectedRef;
|
|
||||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
|
||||||
const sha = "a".repeat(40);
|
|
||||||
const callback = sinon_1.default.stub(actionsutil, "getCommitOid");
|
|
||||||
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
|
||||||
callback.withArgs("HEAD").resolves(sha);
|
|
||||||
const actualRef = await actionsutil.getRef();
|
|
||||||
t.deepEqual(actualRef, expectedRef);
|
|
||||||
callback.restore();
|
|
||||||
});
|
|
||||||
ava_1.default("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
|
||||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
|
||||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
|
||||||
const callback = sinon_1.default.stub(actionsutil, "getCommitOid");
|
|
||||||
callback.withArgs("refs/pull/1/merge").resolves("a".repeat(40));
|
|
||||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
|
||||||
const actualRef = await actionsutil.getRef();
|
|
||||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
|
||||||
callback.restore();
|
|
||||||
});
|
|
||||||
ava_1.default("getAnalysisKey() when a local run", async (t) => {
|
|
||||||
process.env.CODEQL_LOCAL_RUN = "true";
|
|
||||||
process.env.CODEQL_ACTION_ANALYSIS_KEY = "";
|
|
||||||
process.env.GITHUB_JOB = "";
|
|
||||||
actionsutil.prepareLocalRunEnvironment();
|
|
||||||
const actualAnalysisKey = await actionsutil.getAnalysisKey();
|
|
||||||
t.deepEqual(actualAnalysisKey, "LOCAL-RUN:UNKNOWN-JOB");
|
|
||||||
});
|
|
||||||
ava_1.default("prepareEnvironment() when a local run", (t) => {
|
|
||||||
process.env.CODEQL_LOCAL_RUN = "false";
|
|
||||||
process.env.GITHUB_JOB = "YYY";
|
|
||||||
process.env.CODEQL_ACTION_ANALYSIS_KEY = "TEST";
|
|
||||||
actionsutil.prepareLocalRunEnvironment();
|
|
||||||
// unchanged
|
|
||||||
t.deepEqual(process.env.GITHUB_JOB, "YYY");
|
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "TEST");
|
|
||||||
process.env.CODEQL_LOCAL_RUN = "true";
|
|
||||||
actionsutil.prepareLocalRunEnvironment();
|
|
||||||
// unchanged
|
|
||||||
t.deepEqual(process.env.GITHUB_JOB, "YYY");
|
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "TEST");
|
|
||||||
process.env.CODEQL_ACTION_ANALYSIS_KEY = "";
|
|
||||||
actionsutil.prepareLocalRunEnvironment();
|
|
||||||
// updated
|
|
||||||
t.deepEqual(process.env.GITHUB_JOB, "YYY");
|
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:YYY");
|
|
||||||
process.env.GITHUB_JOB = "";
|
|
||||||
process.env.CODEQL_ACTION_ANALYSIS_KEY = "";
|
|
||||||
actionsutil.prepareLocalRunEnvironment();
|
|
||||||
// updated
|
|
||||||
t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB");
|
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on is empty", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook]));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.push is valid", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: ["push", "pull_request"],
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: ["push", "pull_request", "schedule"],
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.push should not have a path", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main"], paths: ["test/*"] },
|
|
||||||
pull_request: { branches: ["main"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified]));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.push is a correct object", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
pull_request:
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.push is mismatched", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main"] },
|
|
||||||
pull_request: { branches: ["feature"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main", "feature"] },
|
|
||||||
pull_request: { branches: ["main"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main"] },
|
|
||||||
pull_request: { branches: ["main", "feature"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: 1,
|
|
||||||
pull_request: 1,
|
|
||||||
},
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: 1,
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: [1],
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { 1: 1 },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: 1 },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: [1] },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: { steps: 1 } },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: [undefined] },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: {
|
|
||||||
branches: 1,
|
|
||||||
},
|
|
||||||
pull_request: {
|
|
||||||
branches: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}), []));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: ["main"]
|
|
||||||
pull_request:
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["feature/*"] },
|
|
||||||
pull_request: { branches: "feature/moose" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["feature/moose"] },
|
|
||||||
pull_request: { branches: "feature/*" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
|
||||||
process.env.GITHUB_JOB = "test";
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: ["push", "pull_request"],
|
|
||||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
|
||||||
});
|
|
||||||
ava_1.default("formatWorkflowErrors() when there is one error", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowErrors([
|
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
|
||||||
]);
|
|
||||||
t.true(message.startsWith("1 issue was detected with this workflow:"));
|
|
||||||
});
|
|
||||||
ava_1.default("formatWorkflowErrors() when there are multiple errors", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowErrors([
|
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
|
||||||
actionsutil.WorkflowErrors.PathsSpecified,
|
|
||||||
]);
|
|
||||||
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
|
||||||
});
|
|
||||||
ava_1.default("formatWorkflowCause() with no errors", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowCause([]);
|
|
||||||
t.deepEqual(message, undefined);
|
|
||||||
});
|
|
||||||
ava_1.default("formatWorkflowCause()", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowCause([
|
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
|
||||||
actionsutil.WorkflowErrors.PathsSpecified,
|
|
||||||
]);
|
|
||||||
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
|
||||||
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
|
|
||||||
});
|
|
||||||
ava_1.default("patternIsSuperset()", (t) => {
|
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("*", "*"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("*", "main-*"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "*"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("main", "main"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("*", "feature/*"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("**", "feature/*"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("feature-*", "**"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**/d"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**", "a/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**/c", "a/main-**/c"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/d/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/c/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/d/**/c"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/c/d/**/c", "a/**/b/**/c"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/main-**/c", "a/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when branches contain dots", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [4.1, master]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [4.1, master]
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master, ]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [master]
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
|
||||||
process.env.GITHUB_JOB = "test";
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [master]
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test2:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test3:
|
|
||||||
steps: []
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
|
||||||
process.env.GITHUB_JOB = "test3";
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [master]
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test2:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test3:
|
|
||||||
steps: []
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on is missing", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
|
||||||
name: "CodeQL"
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() with a different on setup", (t) => {
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on: "workflow_dispatch"
|
|
||||||
`)), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on: [workflow_dispatch]
|
|
||||||
`)), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
workflow_dispatch: {}
|
|
||||||
`)), []));
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
`)), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on: ["push"]
|
|
||||||
`)), []));
|
|
||||||
});
|
|
||||||
//# sourceMappingURL=actions-util.test.js.map
|
|
||||||
File diff suppressed because one or more lines are too long
64
lib/analysis-paths.js
generated
64
lib/analysis-paths.js
generated
@@ -7,63 +7,21 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const path = __importStar(require("path"));
|
const core = __importStar(require("@actions/core"));
|
||||||
function isInterpretedLanguage(language) {
|
function includeAndExcludeAnalysisPaths(config, languages) {
|
||||||
return language === "javascript" || language === "python";
|
|
||||||
}
|
|
||||||
// Matches a string containing only characters that are legal to include in paths on windows.
|
|
||||||
exports.legalWindowsPathCharactersRegex = /^[^<>:"|?]*$/;
|
|
||||||
// Builds an environment variable suitable for LGTM_INDEX_INCLUDE or LGTM_INDEX_EXCLUDE
|
|
||||||
function buildIncludeExcludeEnvVar(paths) {
|
|
||||||
// Ignore anything containing a *
|
|
||||||
paths = paths.filter((p) => p.indexOf("*") === -1);
|
|
||||||
// Some characters are illegal in path names in windows
|
|
||||||
if (process.platform === "win32") {
|
|
||||||
paths = paths.filter((p) => p.match(exports.legalWindowsPathCharactersRegex));
|
|
||||||
}
|
|
||||||
return paths.join("\n");
|
|
||||||
}
|
|
||||||
function printPathFiltersWarning(config, logger) {
|
|
||||||
// Index include/exclude/filters only work in javascript and python.
|
|
||||||
// If any other languages are detected/configured then show a warning.
|
|
||||||
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
|
|
||||||
!config.languages.every(isInterpretedLanguage)) {
|
|
||||||
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript and Python');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.printPathFiltersWarning = printPathFiltersWarning;
|
|
||||||
function includeAndExcludeAnalysisPaths(config) {
|
|
||||||
// The 'LGTM_INDEX_INCLUDE' and 'LGTM_INDEX_EXCLUDE' environment variables
|
|
||||||
// control which files/directories are traversed when scanning.
|
|
||||||
// This allows including files that otherwise would not be scanned, or
|
|
||||||
// excluding and not traversing entire file subtrees.
|
|
||||||
// It does not understand globs or double-globs because that would require it to
|
|
||||||
// traverse the entire file tree to determine which files are matched.
|
|
||||||
// Any paths containing "*" are not included in these.
|
|
||||||
if (config.paths.length !== 0) {
|
if (config.paths.length !== 0) {
|
||||||
process.env["LGTM_INDEX_INCLUDE"] = buildIncludeExcludeEnvVar(config.paths);
|
core.exportVariable('LGTM_INDEX_INCLUDE', config.paths.join('\n'));
|
||||||
}
|
}
|
||||||
// If the temporary or tools directory is in the working directory ignore that too.
|
if (config.pathsIgnore.length !== 0) {
|
||||||
const tempRelativeToWorking = path.relative(process.cwd(), config.tempDir);
|
core.exportVariable('LGTM_INDEX_EXCLUDE', config.pathsIgnore.join('\n'));
|
||||||
const toolsRelativeToWorking = path.relative(process.cwd(), config.toolCacheDir);
|
|
||||||
let pathsIgnore = config.pathsIgnore;
|
|
||||||
if (!tempRelativeToWorking.startsWith("..")) {
|
|
||||||
pathsIgnore = pathsIgnore.concat(tempRelativeToWorking);
|
|
||||||
}
|
}
|
||||||
if (!toolsRelativeToWorking.startsWith("..")) {
|
function isInterpretedLanguage(language) {
|
||||||
pathsIgnore = pathsIgnore.concat(toolsRelativeToWorking);
|
return language === 'javascript' || language === 'python';
|
||||||
}
|
}
|
||||||
if (pathsIgnore.length !== 0) {
|
// Index include/exclude only work in javascript and python
|
||||||
process.env["LGTM_INDEX_EXCLUDE"] = buildIncludeExcludeEnvVar(pathsIgnore);
|
// If some other language is detected/configured show a warning
|
||||||
}
|
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) && !languages.every(isInterpretedLanguage)) {
|
||||||
// The 'LGTM_INDEX_FILTERS' environment variable controls which files are
|
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
|
||||||
// extracted or ignored. It does not control which directories are traversed.
|
|
||||||
// This does understand the glob and double-glob syntax.
|
|
||||||
const filters = [];
|
|
||||||
filters.push(...config.paths.map((p) => `include:${p}`));
|
|
||||||
filters.push(...config.pathsIgnore.map((p) => `exclude:${p}`));
|
|
||||||
if (filters.length !== 0) {
|
|
||||||
process.env["LGTM_INDEX_FILTERS"] = filters.join("\n");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;
|
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,oEAAoE;IACpE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,4FAA4F,CAC7F,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}
|
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAgB,8BAA8B,CAAC,MAA0B,EAAE,SAAmB;IAC1F,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KACtE;IAED,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACjC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC5E;IAED,SAAS,qBAAqB,CAAC,QAAQ;QACnC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;IAC9D,CAAC;IAED,2DAA2D;IAC3D,+DAA+D;IAC/D,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAC3G,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC9G;AACL,CAAC;AAlBD,wEAkBC"}
|
||||||
75
lib/analysis-paths.test.js
generated
75
lib/analysis-paths.test.js
generated
@@ -1,4 +1,7 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
@@ -6,72 +9,22 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
result["default"] = mod;
|
result["default"] = mod;
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const path = __importStar(require("path"));
|
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
const util = __importStar(require("./util"));
|
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
|
||||||
ava_1.default("emptyPaths", async (t) => {
|
ava_1.default("emptyPaths", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
let config = new configUtils.Config();
|
||||||
const config = {
|
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||||
languages: [],
|
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
|
||||||
queries: {},
|
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
|
||||||
pathsIgnore: [],
|
|
||||||
paths: [],
|
|
||||||
originalUserInput: {},
|
|
||||||
tempDir: tmpDir,
|
|
||||||
toolCacheDir: tmpDir,
|
|
||||||
codeQLCmd: "",
|
|
||||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
|
||||||
};
|
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
|
||||||
t.is(process.env["LGTM_INDEX_EXCLUDE"], undefined);
|
|
||||||
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
ava_1.default("nonEmptyPaths", async (t) => {
|
ava_1.default("nonEmptyPaths", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
let config = new configUtils.Config();
|
||||||
const config = {
|
config.paths.push('path1', 'path2');
|
||||||
languages: [],
|
config.pathsIgnore.push('path3', 'path4');
|
||||||
queries: {},
|
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||||
paths: ["path1", "path2", "**/path3"],
|
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
|
||||||
pathsIgnore: ["path4", "path5", "path6/**"],
|
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path3\npath4');
|
||||||
originalUserInput: {},
|
|
||||||
tempDir: tmpDir,
|
|
||||||
toolCacheDir: tmpDir,
|
|
||||||
codeQLCmd: "",
|
|
||||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
|
||||||
};
|
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
|
||||||
t.is(process.env["LGTM_INDEX_EXCLUDE"], "path4\npath5");
|
|
||||||
t.is(process.env["LGTM_INDEX_FILTERS"], "include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("exclude temp dir", async (t) => {
|
|
||||||
return await util.withTmpDir(async (toolCacheDir) => {
|
|
||||||
const tempDir = path.join(process.cwd(), "codeql-runner-temp");
|
|
||||||
const config = {
|
|
||||||
languages: [],
|
|
||||||
queries: {},
|
|
||||||
pathsIgnore: [],
|
|
||||||
paths: [],
|
|
||||||
originalUserInput: {},
|
|
||||||
tempDir,
|
|
||||||
toolCacheDir,
|
|
||||||
codeQLCmd: "",
|
|
||||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
|
||||||
};
|
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
|
||||||
t.is(process.env["LGTM_INDEX_EXCLUDE"], "codeql-runner-temp");
|
|
||||||
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
//# sourceMappingURL=analysis-paths.test.js.map
|
//# sourceMappingURL=analysis-paths.test.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;SACzE,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;SACzE,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;SACzE,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,4DAA8C;AAE9C,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACzB,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC5B,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IACpC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC1C,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;AAC5D,CAAC,CAAC,CAAC"}
|
||||||
104
lib/analyze-action.js
generated
104
lib/analyze-action.js
generated
@@ -1,104 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const fs = __importStar(require("fs"));
|
|
||||||
const path = __importStar(require("path"));
|
|
||||||
const core = __importStar(require("@actions/core"));
|
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
|
||||||
const analyze_1 = require("./analyze");
|
|
||||||
const config_utils_1 = require("./config-utils");
|
|
||||||
const logging_1 = require("./logging");
|
|
||||||
const upload_lib = __importStar(require("./upload-lib"));
|
|
||||||
const util = __importStar(require("./util"));
|
|
||||||
async function sendStatusReport(startedAt, stats, error) {
|
|
||||||
var _a, _b, _c;
|
|
||||||
const status = ((_a = stats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined
|
|
||||||
? "failure"
|
|
||||||
: "success";
|
|
||||||
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
|
|
||||||
const statusReport = {
|
|
||||||
...statusReportBase,
|
|
||||||
...(stats || {}),
|
|
||||||
};
|
|
||||||
await actionsUtil.sendStatusReport(statusReport);
|
|
||||||
}
|
|
||||||
async function run() {
|
|
||||||
const startedAt = new Date();
|
|
||||||
let stats = undefined;
|
|
||||||
let config = undefined;
|
|
||||||
try {
|
|
||||||
actionsUtil.prepareLocalRunEnvironment();
|
|
||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const logger = logging_1.getActionsLogger();
|
|
||||||
config = await config_utils_1.getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
|
||||||
if (config === undefined) {
|
|
||||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
|
||||||
}
|
|
||||||
const apiDetails = {
|
|
||||||
auth: actionsUtil.getRequiredInput("token"),
|
|
||||||
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
|
||||||
};
|
|
||||||
const outputDir = actionsUtil.getRequiredInput("output");
|
|
||||||
const queriesStats = await analyze_1.runAnalyze(outputDir, util.getMemoryFlag(actionsUtil.getOptionalInput("ram")), util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger), config, logger);
|
|
||||||
if (actionsUtil.getRequiredInput("upload") === "true") {
|
|
||||||
const uploadStats = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
|
|
||||||
stats = { ...queriesStats, ...uploadStats };
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
logger.info("Not uploading results");
|
|
||||||
stats = { ...queriesStats };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
core.setFailed(error.message);
|
|
||||||
console.log(error);
|
|
||||||
if (error instanceof analyze_1.CodeQLAnalysisError) {
|
|
||||||
stats = { ...error.queriesStatusReport };
|
|
||||||
}
|
|
||||||
await sendStatusReport(startedAt, stats, error);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
if (core.isDebug() && config !== undefined) {
|
|
||||||
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
|
||||||
for (const language of config.languages) {
|
|
||||||
const databaseDirectory = util.getCodeQLDatabasePath(config.tempDir, language);
|
|
||||||
const logsDirectory = path.join(databaseDirectory, "log");
|
|
||||||
const walkLogFiles = (dir) => {
|
|
||||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
||||||
for (const entry of entries) {
|
|
||||||
if (entry.isFile()) {
|
|
||||||
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
|
|
||||||
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
|
|
||||||
core.endGroup();
|
|
||||||
}
|
|
||||||
else if (entry.isDirectory()) {
|
|
||||||
walkLogFiles(path.resolve(dir, entry.name));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
walkLogFiles(logsDirectory);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
await sendStatusReport(startedAt, stats);
|
|
||||||
}
|
|
||||||
async function runWrapper() {
|
|
||||||
try {
|
|
||||||
await run();
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
core.setFailed(`analyze action failed: ${error}`);
|
|
||||||
console.log(error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
void runWrapper();
|
|
||||||
//# sourceMappingURL=analyze-action.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAImB;AACnB,iDAAmD;AACnD,uCAA6C;AAC7C,yDAA2C;AAC3C,6CAA+B;AAU/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI,MAAM,GAAuB,SAAS,CAAC;IAC3C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,GAAG,MAAM,wBAAS,CAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;QACtE,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,YAAY,GAAG,MAAM,oBAAU,CACnC,SAAS,EACT,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EACvD,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACpE,MAAM,EACN,MAAM,CACP,CAAC;QAEF,IAAI,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EAAE;YACrD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,SAAS,EACT,MAAM,CAAC,aAAa,EACpB,UAAU,EACV,MAAM,CACP,CAAC;YACF,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;SAC7C;aAAM;YACL,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;YACrC,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,CAAC;SAC7B;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAEnB,IAAI,KAAK,YAAY,6BAAmB,EAAE;YACxC,KAAK,GAAG,EAAE,GAAG,KAAK,CAAC,mBAAmB,EAAE,CAAC;SAC1C;QAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;YAAS;QACR,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,MAAM,KAAK,SAAS,EAAE;YAC1C,IAAI,CAAC,IAAI,CAAC,iDAAiD,CAAC,CAAC;YAC7D,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,MAAM,iBAAiB,GAAG,IAAI,CAAC,qBAAqB,CAClD,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;gBACF,MAAM,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;gBAE1D,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE;oBACnC,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;oBAC7D,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;wBAC3B,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;4BAClB,IAAI,CAAC,UAAU,CACb,uBAAuB,QAAQ,MAAM,KAAK,CAAC,IAAI,EAAE,CAClD,CAAC;4BACF,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAC/C,CAAC;4BACF,IAAI,CAAC,QAAQ,EAAE,CAAC;yBACjB;6BAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;4BAC9B,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;yBAC7C;qBACF;gBACH,CAAC,CAAC;gBACF,YAAY,CAAC,aAAa,CAAC,CAAC;aAC7B;SACF;KACF;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
|
||||||
130
lib/analyze.js
generated
130
lib/analyze.js
generated
@@ -1,130 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const fs = __importStar(require("fs"));
|
|
||||||
const path = __importStar(require("path"));
|
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
|
||||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
|
||||||
const codeql_1 = require("./codeql");
|
|
||||||
const languages_1 = require("./languages");
|
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
|
||||||
const util = __importStar(require("./util"));
|
|
||||||
class CodeQLAnalysisError extends Error {
|
|
||||||
constructor(queriesStatusReport, message) {
|
|
||||||
super(message);
|
|
||||||
this.name = "CodeQLAnalysisError";
|
|
||||||
this.queriesStatusReport = queriesStatusReport;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.CodeQLAnalysisError = CodeQLAnalysisError;
|
|
||||||
async function setupPythonExtractor(logger) {
|
|
||||||
const codeqlPython = process.env["CODEQL_PYTHON"];
|
|
||||||
if (codeqlPython === undefined || codeqlPython.length === 0) {
|
|
||||||
// If CODEQL_PYTHON is not set, no dependencies were installed, so we don't need to do anything
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let output = "";
|
|
||||||
const options = {
|
|
||||||
listeners: {
|
|
||||||
stdout: (data) => {
|
|
||||||
output += data.toString();
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
await new toolrunner.ToolRunner(codeqlPython, [
|
|
||||||
"-c",
|
|
||||||
"import os; import pip; print(os.path.dirname(os.path.dirname(pip.__file__)))",
|
|
||||||
], options).exec();
|
|
||||||
logger.info(`Setting LGTM_INDEX_IMPORT_PATH=${output}`);
|
|
||||||
process.env["LGTM_INDEX_IMPORT_PATH"] = output;
|
|
||||||
output = "";
|
|
||||||
await new toolrunner.ToolRunner(codeqlPython, ["-c", "import sys; print(sys.version_info[0])"], options).exec();
|
|
||||||
logger.info(`Setting LGTM_PYTHON_SETUP_VERSION=${output}`);
|
|
||||||
process.env["LGTM_PYTHON_SETUP_VERSION"] = output;
|
|
||||||
}
|
|
||||||
async function createdDBForScannedLanguages(config, logger) {
|
|
||||||
// Insert the LGTM_INDEX_X env vars at this point so they are set when
|
|
||||||
// we extract any scanned languages.
|
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
|
||||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
|
||||||
for (const language of config.languages) {
|
|
||||||
if (languages_1.isScannedLanguage(language)) {
|
|
||||||
logger.startGroup(`Extracting ${language}`);
|
|
||||||
if (language === languages_1.Language.python) {
|
|
||||||
await setupPythonExtractor(logger);
|
|
||||||
}
|
|
||||||
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config.tempDir, language), language);
|
|
||||||
logger.endGroup();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
async function finalizeDatabaseCreation(config, threadsFlag, logger) {
|
|
||||||
await createdDBForScannedLanguages(config, logger);
|
|
||||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
|
||||||
for (const language of config.languages) {
|
|
||||||
logger.startGroup(`Finalizing ${language}`);
|
|
||||||
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config.tempDir, language), threadsFlag);
|
|
||||||
logger.endGroup();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Runs queries and creates sarif files in the given folder
|
|
||||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
|
|
||||||
const statusReport = {};
|
|
||||||
for (const language of config.languages) {
|
|
||||||
logger.startGroup(`Analyzing ${language}`);
|
|
||||||
const queries = config.queries[language];
|
|
||||||
if (queries.builtin.length === 0 && queries.custom.length === 0) {
|
|
||||||
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
for (const type of ["builtin", "custom"]) {
|
|
||||||
if (queries[type].length > 0) {
|
|
||||||
const startTime = new Date().getTime();
|
|
||||||
const databasePath = util.getCodeQLDatabasePath(config.tempDir, language);
|
|
||||||
// Pass the queries to codeql using a file instead of using the command
|
|
||||||
// line to avoid command line length restrictions, particularly on windows.
|
|
||||||
const querySuitePath = `${databasePath}-queries-${type}.qls`;
|
|
||||||
const querySuiteContents = queries[type]
|
|
||||||
.map((q) => `- query: ${q}`)
|
|
||||||
.join("\n");
|
|
||||||
fs.writeFileSync(querySuitePath, querySuiteContents);
|
|
||||||
logger.debug(`Query suite file for ${language}...\n${querySuiteContents}`);
|
|
||||||
const sarifFile = path.join(sarifFolder, `${language}-${type}.sarif`);
|
|
||||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
|
||||||
await codeql.databaseAnalyze(databasePath, sarifFile, querySuitePath, memoryFlag, addSnippetsFlag, threadsFlag);
|
|
||||||
logger.debug(`SARIF results for database ${language} created at "${sarifFile}"`);
|
|
||||||
logger.endGroup();
|
|
||||||
// Record the performance
|
|
||||||
const endTime = new Date().getTime();
|
|
||||||
statusReport[`analyze_${type}_queries_${language}_duration_ms`] =
|
|
||||||
endTime - startTime;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
logger.info(e);
|
|
||||||
statusReport.analyze_failure_language = language;
|
|
||||||
throw new CodeQLAnalysisError(statusReport, `Error running analysis for ${language}: ${e}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return statusReport;
|
|
||||||
}
|
|
||||||
exports.runQueries = runQueries;
|
|
||||||
async function runAnalyze(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
|
|
||||||
// Delete the tracer config env var to avoid tracing ourselves
|
|
||||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
|
||||||
fs.mkdirSync(outputDir, { recursive: true });
|
|
||||||
logger.info("Finalizing database creation");
|
|
||||||
await finalizeDatabaseCreation(config, threadsFlag, logger);
|
|
||||||
logger.info("Analyzing database");
|
|
||||||
const queriesStats = await runQueries(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger);
|
|
||||||
return { ...queriesStats };
|
|
||||||
}
|
|
||||||
exports.runAnalyze = runAnalyze;
|
|
||||||
//# sourceMappingURL=analyze.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAE3D,gEAAkD;AAClD,qCAAqC;AAErC,2CAA0D;AAE1D,gEAAkD;AAClD,6CAA+B;AAE/B,MAAa,mBAAoB,SAAQ,KAAK;IAG5C,YAAY,mBAAwC,EAAE,OAAe;QACnE,KAAK,CAAC,OAAO,CAAC,CAAC;QAEf,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC;QAClC,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;CACF;AATD,kDASC;AA+BD,KAAK,UAAU,oBAAoB,CAAC,MAAc;IAChD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IAClD,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3D,+FAA+F;QAC/F,OAAO;KACR;IAED,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,OAAO,GAAG;QACd,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC;IAEF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ;QACE,IAAI;QACJ,8EAA8E;KAC/E,EACD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,kCAAkC,MAAM,EAAE,CAAC,CAAC;IACxD,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;IAE/C,MAAM,GAAG,EAAE,CAAC;IACZ,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ,CAAC,IAAI,EAAE,wCAAwC,CAAC,EAChD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,qCAAqC,MAAM,EAAE,CAAC,CAAC;IAC3D,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC,GAAG,MAAM,CAAC;AACpD,CAAC;AAED,KAAK,UAAU,4BAA4B,CACzC,MAA0B,EAC1B,MAAc;IAEd,sEAAsE;IACtE,oCAAoC;IACpC,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;YAE5C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,MAAM,EAAE;gBAChC,MAAM,oBAAoB,CAAC,MAAM,CAAC,CAAC;aACpC;YAED,MAAM,MAAM,CAAC,sBAAsB,CACjC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,CACT,CAAC;YACF,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CACrC,MAA0B,EAC1B,WAAmB,EACnB,MAAc;IAEd,MAAM,4BAA4B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,gBAAgB,CAC3B,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,WAAW,CACZ,CAAC;QACF,MAAM,CAAC,QAAQ,EAAE,CAAC;KACnB;AACH,CAAC;AAED,2DAA2D;AACpD,KAAK,UAAU,UAAU,CAC9B,WAAmB,EACnB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,MAAM,YAAY,GAAwB,EAAE,CAAC;IAE7C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,aAAa,QAAQ,EAAE,CAAC,CAAC;QAE3C,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QACzC,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;YAC/D,MAAM,IAAI,KAAK,CACb,qBAAqB,QAAQ,gDAAgD,CAC9E,CAAC;SACH;QAED,IAAI;YACF,KAAK,MAAM,IAAI,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,EAAE;gBACxC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBAEvC,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAC7C,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;oBACF,uEAAuE;oBACvE,2EAA2E;oBAC3E,MAAM,cAAc,GAAG,GAAG,YAAY,YAAY,IAAI,MAAM,CAAC;oBAC7D,MAAM,kBAAkB,GAAG,OAAO,CAAC,IAAI,CAAC;yBACrC,GAAG,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC;yBACnC,IAAI,CAAC,IAAI,CAAC,CAAC;oBACd,EAAE,CAAC,aAAa,CAAC,cAAc,EAAE,kBAAkB,CAAC,CAAC;oBACrD,MAAM,CAAC,KAAK,CACV,wBAAwB,QAAQ,QAAQ,kBAAkB,EAAE,CAC7D,CAAC;oBAEF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,GAAG,QAAQ,IAAI,IAAI,QAAQ,CAAC,CAAC;oBAEtE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;oBAC3C,MAAM,MAAM,CAAC,eAAe,CAC1B,YAAY,EACZ,SAAS,EACT,cAAc,EACd,UAAU,EACV,eAAe,EACf,WAAW,CACZ,CAAC;oBAEF,MAAM,CAAC,KAAK,CACV,8BAA8B,QAAQ,gBAAgB,SAAS,GAAG,CACnE,CAAC;oBACF,MAAM,CAAC,QAAQ,EAAE,CAAC;oBAElB,yBAAyB;oBACzB,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBACrC,YAAY,CAAC,WAAW,IAAI,YAAY,QAAQ,cAAc,CAAC;wBAC7D,OAAO,GAAG,SAAS,CAAC;iBACvB;aACF;SACF;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YACf,YAAY,CAAC,wBAAwB,GAAG,QAAQ,CAAC;YACjD,MAAM,IAAI,mBAAmB,CAC3B,YAAY,EACZ,8BAA8B,QAAQ,KAAK,CAAC,EAAE,CAC/C,CAAC;SACH;KACF;IAED,OAAO,YAAY,CAAC;AACtB,CAAC;AA1ED,gCA0EC;AAEM,KAAK,UAAU,UAAU,CAC9B,SAAiB,EACjB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,8DAA8D;IAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;IAEzD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7C,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;IAC5C,MAAM,wBAAwB,CAAC,MAAM,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;IAE5D,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,MAAM,UAAU,CACnC,SAAS,EACT,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,MAAM,CACP,CAAC;IAEF,OAAO,EAAE,GAAG,YAAY,EAAE,CAAC;AAC7B,CAAC;AA3BD,gCA2BC"}
|
|
||||||
66
lib/analyze.test.js
generated
66
lib/analyze.test.js
generated
@@ -1,66 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const fs = __importStar(require("fs"));
|
|
||||||
const ava_1 = __importDefault(require("ava"));
|
|
||||||
const analyze_1 = require("./analyze");
|
|
||||||
const codeql_1 = require("./codeql");
|
|
||||||
const languages_1 = require("./languages");
|
|
||||||
const logging_1 = require("./logging");
|
|
||||||
const testing_utils_1 = require("./testing-utils");
|
|
||||||
const util = __importStar(require("./util"));
|
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
|
||||||
// Checks that the duration fields are populated for the correct language
|
|
||||||
// and correct case of builtin or custom.
|
|
||||||
ava_1.default("status report fields", async (t) => {
|
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
|
||||||
codeql_1.setCodeQL({
|
|
||||||
databaseAnalyze: async () => undefined,
|
|
||||||
});
|
|
||||||
const memoryFlag = "";
|
|
||||||
const addSnippetsFlag = "";
|
|
||||||
const threadsFlag = "";
|
|
||||||
for (const language of Object.values(languages_1.Language)) {
|
|
||||||
const config = {
|
|
||||||
languages: [language],
|
|
||||||
queries: {},
|
|
||||||
pathsIgnore: [],
|
|
||||||
paths: [],
|
|
||||||
originalUserInput: {},
|
|
||||||
tempDir: tmpDir,
|
|
||||||
toolCacheDir: tmpDir,
|
|
||||||
codeQLCmd: "",
|
|
||||||
gitHubVersion: {
|
|
||||||
type: util.GitHubVariant.DOTCOM,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
fs.mkdirSync(util.getCodeQLDatabasePath(config.tempDir, language), {
|
|
||||||
recursive: true,
|
|
||||||
});
|
|
||||||
config.queries[language] = {
|
|
||||||
builtin: ["foo.ql"],
|
|
||||||
custom: [],
|
|
||||||
};
|
|
||||||
const builtinStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logging_1.getRunnerLogger(true));
|
|
||||||
t.deepEqual(Object.keys(builtinStatusReport).length, 1);
|
|
||||||
t.true(`analyze_builtin_queries_${language}_duration_ms` in builtinStatusReport);
|
|
||||||
config.queries[language] = {
|
|
||||||
builtin: [],
|
|
||||||
custom: ["foo.ql"],
|
|
||||||
};
|
|
||||||
const customStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logging_1.getRunnerLogger(true));
|
|
||||||
t.deepEqual(Object.keys(customStatusReport).length, 1);
|
|
||||||
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
//# sourceMappingURL=analyze.test.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"analyze.test.js","sourceRoot":"","sources":["../src/analyze.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AAEzB,8CAAuB;AAEvB,uCAAuC;AACvC,qCAAqC;AAErC,2CAAuC;AACvC,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,yEAAyE;AACzE,yCAAyC;AACzC,aAAI,CAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,kBAAS,CAAC;YACR,eAAe,EAAE,KAAK,IAAI,EAAE,CAAC,SAAS;SACvC,CAAC,CAAC;QAEH,MAAM,UAAU,GAAG,EAAE,CAAC;QACtB,MAAM,eAAe,GAAG,EAAE,CAAC;QAC3B,MAAM,WAAW,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,MAAM,CAAC,oBAAQ,CAAC,EAAE;YAC9C,MAAM,MAAM,GAAW;gBACrB,SAAS,EAAE,CAAC,QAAQ,CAAC;gBACrB,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,EAAE;gBACf,KAAK,EAAE,EAAE;gBACT,iBAAiB,EAAE,EAAE;gBACrB,OAAO,EAAE,MAAM;gBACf,YAAY,EAAE,MAAM;gBACpB,SAAS,EAAE,EAAE;gBACb,aAAa,EAAE;oBACb,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;iBACV;aACxB,CAAC;YACF,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE;gBACjE,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,CAAC,QAAQ,CAAC;gBACnB,MAAM,EAAE,EAAE;aACX,CAAC;YACF,MAAM,mBAAmB,GAAG,MAAM,oBAAU,CAC1C,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACxD,CAAC,CAAC,IAAI,CACJ,2BAA2B,QAAQ,cAAc,IAAI,mBAAmB,CACzE,CAAC;YAEF,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,EAAE;gBACX,MAAM,EAAE,CAAC,QAAQ,CAAC;aACnB,CAAC;YACF,MAAM,kBAAkB,GAAG,MAAM,oBAAU,CACzC,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACvD,CAAC,CAAC,IAAI,CACJ,0BAA0B,QAAQ,cAAc,IAAI,kBAAkB,CACvE,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
|
||||||
56
lib/api-client.js
generated
56
lib/api-client.js
generated
@@ -1,56 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const path = __importStar(require("path"));
|
|
||||||
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
|
||||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
|
||||||
const actions_util_1 = require("./actions-util");
|
|
||||||
const util_1 = require("./util");
|
|
||||||
var DisallowedAPIVersionReason;
|
|
||||||
(function (DisallowedAPIVersionReason) {
|
|
||||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
|
|
||||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
|
|
||||||
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
|
|
||||||
exports.getApiClient = function (apiDetails, { allowLocalRun = false, allowExternal = false } = {}) {
|
|
||||||
if (util_1.isLocalRun() && !allowLocalRun) {
|
|
||||||
throw new Error("Invalid API call in local run");
|
|
||||||
}
|
|
||||||
const auth = (allowExternal && apiDetails.externalRepoAuth) || apiDetails.auth;
|
|
||||||
return new githubUtils.GitHub(githubUtils.getOctokitOptions(auth, {
|
|
||||||
baseUrl: getApiUrl(apiDetails.url),
|
|
||||||
userAgent: "CodeQL Action",
|
|
||||||
log: console_log_level_1.default({ level: "debug" }),
|
|
||||||
}));
|
|
||||||
};
|
|
||||||
function getApiUrl(githubUrl) {
|
|
||||||
const url = new URL(githubUrl);
|
|
||||||
// If we detect this is trying to connect to github.com
|
|
||||||
// then return with a fixed canonical URL.
|
|
||||||
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
|
|
||||||
return "https://api.github.com";
|
|
||||||
}
|
|
||||||
// Add the /api/v3 API prefix
|
|
||||||
url.pathname = path.join(url.pathname, "api", "v3");
|
|
||||||
return url.toString();
|
|
||||||
}
|
|
||||||
// Temporary function to aid in the transition to running on and off of github actions.
|
|
||||||
// Once all code has been converted this function should be removed or made canonical
|
|
||||||
// and called only from the action entrypoints.
|
|
||||||
function getActionsApiClient(allowLocalRun = false) {
|
|
||||||
const apiDetails = {
|
|
||||||
auth: actions_util_1.getRequiredInput("token"),
|
|
||||||
url: actions_util_1.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
|
||||||
};
|
|
||||||
return exports.getApiClient(apiDetails, { allowLocalRun });
|
|
||||||
}
|
|
||||||
exports.getActionsApiClient = getActionsApiClient;
|
|
||||||
//# sourceMappingURL=api-client.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,0EAAgD;AAEhD,iDAAuE;AACvE,iCAAoC;AAEpC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeY,QAAA,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAErD,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IAED,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,OAAO,IAAI,WAAW,CAAC,MAAM,CAC3B,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,+BAAgB,CAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,kCAAmB,CAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,oBAAY,CAAC,UAAU,EAAE,EAAE,aAAa,EAAE,CAAC,CAAC;AACrD,CAAC;AAPD,kDAOC"}
|
|
||||||
72
lib/api-client.test.js
generated
72
lib/api-client.test.js
generated
@@ -1,72 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
|
||||||
const ava_1 = __importDefault(require("ava"));
|
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
|
||||||
const api_client_1 = require("./api-client");
|
|
||||||
const testing_utils_1 = require("./testing-utils");
|
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
|
||||||
let githubStub;
|
|
||||||
ava_1.default.beforeEach(() => {
|
|
||||||
githubStub = sinon_1.default.stub(githubUtils, "GitHub");
|
|
||||||
});
|
|
||||||
ava_1.default("Get the client API", async (t) => {
|
|
||||||
doTest(t, {
|
|
||||||
auth: "xyz",
|
|
||||||
externalRepoAuth: "abc",
|
|
||||||
url: "http://hucairz",
|
|
||||||
}, undefined, {
|
|
||||||
auth: "token xyz",
|
|
||||||
baseUrl: "http://hucairz/api/v3",
|
|
||||||
userAgent: "CodeQL Action",
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("Get the client API external", async (t) => {
|
|
||||||
doTest(t, {
|
|
||||||
auth: "xyz",
|
|
||||||
externalRepoAuth: "abc",
|
|
||||||
url: "http://hucairz",
|
|
||||||
}, { allowExternal: true }, {
|
|
||||||
auth: "token abc",
|
|
||||||
baseUrl: "http://hucairz/api/v3",
|
|
||||||
userAgent: "CodeQL Action",
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("Get the client API external not present", async (t) => {
|
|
||||||
doTest(t, {
|
|
||||||
auth: "xyz",
|
|
||||||
url: "http://hucairz",
|
|
||||||
}, { allowExternal: true }, {
|
|
||||||
auth: "token xyz",
|
|
||||||
baseUrl: "http://hucairz/api/v3",
|
|
||||||
userAgent: "CodeQL Action",
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("Get the client API with github url", async (t) => {
|
|
||||||
doTest(t, {
|
|
||||||
auth: "xyz",
|
|
||||||
url: "https://github.com/some/invalid/url",
|
|
||||||
}, undefined, {
|
|
||||||
auth: "token xyz",
|
|
||||||
baseUrl: "https://api.github.com",
|
|
||||||
userAgent: "CodeQL Action",
|
|
||||||
});
|
|
||||||
});
|
|
||||||
function doTest(t, clientArgs, clientOptions, expected) {
|
|
||||||
api_client_1.getApiClient(clientArgs, clientOptions);
|
|
||||||
const firstCallArgs = githubStub.args[0];
|
|
||||||
// log is a function, so we don't need to test for equality of it
|
|
||||||
delete firstCallArgs[0].log;
|
|
||||||
t.deepEqual(firstCallArgs, [expected]);
|
|
||||||
}
|
|
||||||
//# sourceMappingURL=api-client.test.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uEAAyD;AACzD,8CAA6C;AAC7C,kDAA0B;AAE1B,6CAA4C;AAC5C,mDAA6C;AAE7C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,IAAI,UAA2B,CAAC;AAEhC,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,UAAU,GAAG,eAAK,CAAC,IAAI,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;AACjD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,oBAAoB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrC,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,eAAe;KAC3B,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,6BAA6B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9C,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,eAAe;KAC3B,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,eAAe;KAC3B,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,oCAAoC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrD,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,qCAAqC;KAC3C,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,wBAAwB;QACjC,SAAS,EAAE,eAAe;KAC3B,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,MAAM,CACb,CAA4B,EAC5B,UAAe,EACf,aAAkB,EAClB,QAAa;IAEb,yBAAY,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAExC,MAAM,aAAa,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IACzC,iEAAiE;IACjE,OAAO,aAAa,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;IAC5B,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzC,CAAC"}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{ "maximumVersion": "3.1", "minimumVersion": "2.22" }
|
|
||||||
64
lib/autobuild-action.js
generated
64
lib/autobuild-action.js
generated
@@ -1,64 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const core = __importStar(require("@actions/core"));
|
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
|
||||||
const autobuild_1 = require("./autobuild");
|
|
||||||
const config_utils = __importStar(require("./config-utils"));
|
|
||||||
const logging_1 = require("./logging");
|
|
||||||
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
|
||||||
var _a, _b;
|
|
||||||
const status = failingLanguage !== undefined || cause !== undefined
|
|
||||||
? "failure"
|
|
||||||
: "success";
|
|
||||||
const statusReportBase = await actionsUtil.createStatusReportBase("autobuild", status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
|
|
||||||
const statusReport = {
|
|
||||||
...statusReportBase,
|
|
||||||
autobuild_languages: allLanguages.join(","),
|
|
||||||
autobuild_failure: failingLanguage,
|
|
||||||
};
|
|
||||||
await actionsUtil.sendStatusReport(statusReport);
|
|
||||||
}
|
|
||||||
async function run() {
|
|
||||||
const logger = logging_1.getActionsLogger();
|
|
||||||
const startedAt = new Date();
|
|
||||||
let language = undefined;
|
|
||||||
try {
|
|
||||||
actionsUtil.prepareLocalRunEnvironment();
|
|
||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("autobuild", "starting", startedAt)))) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const config = await config_utils.getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
|
||||||
if (config === undefined) {
|
|
||||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
|
||||||
}
|
|
||||||
language = autobuild_1.determineAutobuildLanguage(config, logger);
|
|
||||||
if (language !== undefined) {
|
|
||||||
await autobuild_1.runAutobuild(language, config, logger);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
core.setFailed(`We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error.message}`);
|
|
||||||
console.log(error);
|
|
||||||
await sendCompletedStatusReport(startedAt, language ? [language] : [], language, error);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
await sendCompletedStatusReport(startedAt, language ? [language] : []);
|
|
||||||
}
|
|
||||||
async function runWrapper() {
|
|
||||||
try {
|
|
||||||
await run();
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
core.setFailed(`autobuild action failed. ${error}`);
|
|
||||||
console.log(error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
void runWrapper();
|
|
||||||
//# sourceMappingURL=autobuild-action.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAS7C,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,WAAW,EACX,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,sCAA0B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,wBAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIAAmI,KAAK,CAAC,OAAO,EAAE,CACnJ,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
|
||||||
81
lib/autobuild.js
generated
81
lib/autobuild.js
generated
@@ -1,32 +1,61 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const codeql_1 = require("./codeql");
|
const core = __importStar(require("@actions/core"));
|
||||||
const languages_1 = require("./languages");
|
const exec = __importStar(require("@actions/exec"));
|
||||||
function determineAutobuildLanguage(config, logger) {
|
const path = __importStar(require("path"));
|
||||||
// Attempt to find a language to autobuild
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
// We want pick the dominant language in the repo from the ones we're able to build
|
const util = __importStar(require("./util"));
|
||||||
// The languages are sorted in order specified by user or by lines of code if we got
|
async function run() {
|
||||||
// them from the GitHub API, so try to build the first language on the list.
|
var _a;
|
||||||
const autobuildLanguages = config.languages.filter(languages_1.isTracedLanguage);
|
try {
|
||||||
const language = autobuildLanguages[0];
|
if (util.should_abort('autobuild', true) || !await util.reportActionStarting('autobuild')) {
|
||||||
if (!language) {
|
return;
|
||||||
logger.info("None of the languages in this project require extra build steps");
|
}
|
||||||
return undefined;
|
// Attempt to find a language to autobuild
|
||||||
|
// We want pick the dominant language in the repo from the ones we're able to build
|
||||||
|
// The languages are sorted in order specified by user or by lines of code if we got
|
||||||
|
// them from the GitHub API, so try to build the first language on the list.
|
||||||
|
const autobuildLanguages = ((_a = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]) === null || _a === void 0 ? void 0 : _a.split(',')) || [];
|
||||||
|
const language = autobuildLanguages[0];
|
||||||
|
if (!language) {
|
||||||
|
core.info("None of the languages in this project require extra build steps");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
core.debug(`Detected dominant traced language: ${language}`);
|
||||||
|
if (autobuildLanguages.length > 1) {
|
||||||
|
core.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages.slice(1).join(' and ')}, you must replace this block with custom build steps.`);
|
||||||
|
}
|
||||||
|
core.startGroup(`Attempting to automatically build ${language} code`);
|
||||||
|
// TODO: share config accross actions better via env variables
|
||||||
|
const codeqlCmd = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_CMD);
|
||||||
|
const cmdName = process.platform === 'win32' ? 'autobuild.cmd' : 'autobuild.sh';
|
||||||
|
const autobuildCmd = path.join(path.dirname(codeqlCmd), language, 'tools', cmdName);
|
||||||
|
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
|
||||||
|
// This is because of an issue with Azure pipelines timing out connections after 4 minutes
|
||||||
|
// and Maven not properly handling closed connections
|
||||||
|
// Otherwise long build processes will timeout when pulling down Java packages
|
||||||
|
// https://developercommunity.visualstudio.com/content/problem/292284/maven-hosted-agent-connection-timeout.html
|
||||||
|
let javaToolOptions = process.env['JAVA_TOOL_OPTIONS'] || "";
|
||||||
|
process.env['JAVA_TOOL_OPTIONS'] = [...javaToolOptions.split(/\s+/), '-Dhttp.keepAlive=false', '-Dmaven.wagon.http.pool=false'].join(' ');
|
||||||
|
await exec.exec(autobuildCmd);
|
||||||
|
core.endGroup();
|
||||||
}
|
}
|
||||||
logger.debug(`Detected dominant traced language: ${language}`);
|
catch (error) {
|
||||||
if (autobuildLanguages.length > 1) {
|
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
|
||||||
logger.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages
|
await util.reportActionFailed('autobuild', error.message, error.stack);
|
||||||
.slice(1)
|
return;
|
||||||
.join(" and ")}, you must replace this call with custom build steps.`);
|
|
||||||
}
|
}
|
||||||
return language;
|
await util.reportActionSucceeded('autobuild');
|
||||||
}
|
}
|
||||||
exports.determineAutobuildLanguage = determineAutobuildLanguage;
|
run().catch(e => {
|
||||||
async function runAutobuild(language, config, logger) {
|
core.setFailed("autobuild action failed. " + e);
|
||||||
logger.startGroup(`Attempting to automatically build ${language} code`);
|
console.log(e);
|
||||||
const codeQL = codeql_1.getCodeQL(config.codeQLCmd);
|
});
|
||||||
await codeQL.runAutobuild(language);
|
|
||||||
logger.endGroup();
|
|
||||||
}
|
|
||||||
exports.runAutobuild = runAutobuild;
|
|
||||||
//# sourceMappingURL=autobuild.js.map
|
//# sourceMappingURL=autobuild.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;AAAA,qCAAqC;AAErC,2CAAyD;AAGzD,SAAgB,0BAA0B,CACxC,MAA2B,EAC3B,MAAc;IAEd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IACrE,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;IAEvC,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;IAE/D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QACjC,MAAM,CAAC,OAAO,CACZ,oCAAoC,QAAQ,8BAA8B,kBAAkB;aACzF,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CAAC,OAAO,CAAC,uDAAuD,CACxE,CAAC;KACH;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AA7BD,gEA6BC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}
|
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,2CAA6B;AAE7B,gEAAkD;AAClD,6CAA+B;AAE/B,KAAK,UAAU,GAAG;;IAChB,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,EAAE;YACzF,OAAO;SACR;QAED,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,OAAA,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,8BAA8B,CAAC,0CAAE,KAAK,CAAC,GAAG,MAAK,EAAE,CAAC;QACnG,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,8DAA8D;QAC9D,MAAM,SAAS,GAAG,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,iBAAiB,CAAC,CAAC;QAExE,MAAM,OAAO,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,cAAc,CAAC;QAChF,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;QAGpF,+DAA+D;QAC/D,0FAA0F;QAC1F,qDAAqD;QACrD,8EAA8E;QAC9E,gHAAgH;QAChH,IAAI,eAAe,GAAG,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,EAAE,CAAC;QAC7D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,wBAAwB,EAAE,+BAA+B,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAE1I,MAAM,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QAC9B,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,MAAM,IAAI,CAAC,kBAAkB,CAAC,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QACvE,OAAO;KACR;IAED,MAAM,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,CAAC;AAChD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||||
494
lib/codeql.js
generated
494
lib/codeql.js
generated
@@ -1,494 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const fs = __importStar(require("fs"));
|
|
||||||
const path = __importStar(require("path"));
|
|
||||||
const stream = __importStar(require("stream"));
|
|
||||||
const globalutil = __importStar(require("util"));
|
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
|
||||||
const http = __importStar(require("@actions/http-client"));
|
|
||||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
|
||||||
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
|
|
||||||
const query_string_1 = __importDefault(require("query-string"));
|
|
||||||
const semver = __importStar(require("semver"));
|
|
||||||
const uuid_1 = require("uuid");
|
|
||||||
const actions_util_1 = require("./actions-util");
|
|
||||||
const api = __importStar(require("./api-client"));
|
|
||||||
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
|
||||||
const error_matcher_1 = require("./error-matcher");
|
|
||||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
|
||||||
const util = __importStar(require("./util"));
|
|
||||||
/**
|
|
||||||
* Stores the CodeQL object, and is populated by `setupCodeQL` or `getCodeQL`.
|
|
||||||
* Can be overridden in tests using `setCodeQL`.
|
|
||||||
*/
|
|
||||||
let cachedCodeQL = undefined;
|
|
||||||
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
|
|
||||||
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
|
||||||
function getCodeQLBundleName() {
|
|
||||||
let platform;
|
|
||||||
if (process.platform === "win32") {
|
|
||||||
platform = "win64";
|
|
||||||
}
|
|
||||||
else if (process.platform === "linux") {
|
|
||||||
platform = "linux64";
|
|
||||||
}
|
|
||||||
else if (process.platform === "darwin") {
|
|
||||||
platform = "osx64";
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return "codeql-bundle.tar.gz";
|
|
||||||
}
|
|
||||||
return `codeql-bundle-${platform}.tar.gz`;
|
|
||||||
}
|
|
||||||
function getCodeQLActionRepository(mode, logger) {
|
|
||||||
if (mode !== "actions") {
|
|
||||||
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return getActionsCodeQLActionRepository(logger);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function getActionsCodeQLActionRepository(logger) {
|
|
||||||
if (process.env["GITHUB_ACTION_REPOSITORY"] !== undefined) {
|
|
||||||
return process.env["GITHUB_ACTION_REPOSITORY"];
|
|
||||||
}
|
|
||||||
// The Actions Runner used with GitHub Enterprise Server 2.22 did not set the GITHUB_ACTION_REPOSITORY variable.
|
|
||||||
// This fallback logic can be removed after the end-of-support for 2.22 on 2021-09-23.
|
|
||||||
if (actions_util_1.isRunningLocalAction()) {
|
|
||||||
// This handles the case where the Action does not come from an Action repository,
|
|
||||||
// e.g. our integration tests which use the Action code from the current checkout.
|
|
||||||
logger.info("The CodeQL Action is checked out locally. Using the default CodeQL Action repository.");
|
|
||||||
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
|
||||||
}
|
|
||||||
logger.info("GITHUB_ACTION_REPOSITORY environment variable was not set. Falling back to legacy method of finding the GitHub Action.");
|
|
||||||
const relativeScriptPathParts = actions_util_1.getRelativeScriptPath().split(path.sep);
|
|
||||||
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
|
|
||||||
}
|
|
||||||
async function getCodeQLBundleDownloadURL(apiDetails, mode, variant, logger) {
|
|
||||||
const codeQLActionRepository = getCodeQLActionRepository(mode, logger);
|
|
||||||
const potentialDownloadSources = [
|
|
||||||
// This GitHub instance, and this Action.
|
|
||||||
[apiDetails.url, codeQLActionRepository],
|
|
||||||
// This GitHub instance, and the canonical Action.
|
|
||||||
[apiDetails.url, CODEQL_DEFAULT_ACTION_REPOSITORY],
|
|
||||||
// GitHub.com, and the canonical Action.
|
|
||||||
[util.GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
|
|
||||||
];
|
|
||||||
// We now filter out any duplicates.
|
|
||||||
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
|
|
||||||
const uniqueDownloadSources = potentialDownloadSources.filter((source, index, self) => {
|
|
||||||
return !self.slice(0, index).some((other) => fast_deep_equal_1.default(source, other));
|
|
||||||
});
|
|
||||||
const codeQLBundleName = getCodeQLBundleName();
|
|
||||||
if (variant === util.GitHubVariant.GHAE) {
|
|
||||||
try {
|
|
||||||
const release = await api
|
|
||||||
.getApiClient(apiDetails)
|
|
||||||
.request("GET /enterprise/code-scanning/codeql-bundle/find/{tag}", {
|
|
||||||
tag: CODEQL_BUNDLE_VERSION,
|
|
||||||
});
|
|
||||||
const assetID = release.data.assets[codeQLBundleName];
|
|
||||||
if (assetID !== undefined) {
|
|
||||||
const download = await api
|
|
||||||
.getApiClient(apiDetails)
|
|
||||||
.request("GET /enterprise/code-scanning/codeql-bundle/download/{asset_id}", { asset_id: assetID });
|
|
||||||
const downloadURL = download.data.url;
|
|
||||||
logger.info(`Found CodeQL bundle at GitHub AE endpoint with URL ${downloadURL}.`);
|
|
||||||
return downloadURL;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
logger.info(`Attempted to fetch bundle from GitHub AE endpoint but the bundle ${codeQLBundleName} was not found in the assets ${JSON.stringify(release.data.assets)}.`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
logger.info(`Attempted to fetch bundle from GitHub AE endpoint but got error ${e}.`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (const downloadSource of uniqueDownloadSources) {
|
|
||||||
const [apiURL, repository] = downloadSource;
|
|
||||||
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
|
|
||||||
if (apiURL === util.GITHUB_DOTCOM_URL &&
|
|
||||||
repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
const [repositoryOwner, repositoryName] = repository.split("/");
|
|
||||||
try {
|
|
||||||
const release = await api.getApiClient(apiDetails).repos.getReleaseByTag({
|
|
||||||
owner: repositoryOwner,
|
|
||||||
repo: repositoryName,
|
|
||||||
tag: CODEQL_BUNDLE_VERSION,
|
|
||||||
});
|
|
||||||
for (const asset of release.data.assets) {
|
|
||||||
if (asset.name === codeQLBundleName) {
|
|
||||||
logger.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
|
|
||||||
return asset.url;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
logger.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
|
|
||||||
}
|
|
||||||
// We have to download CodeQL manually because the toolcache doesn't support Accept headers.
|
|
||||||
// This can be removed once https://github.com/actions/toolkit/pull/530 is merged and released.
|
|
||||||
async function toolcacheDownloadTool(url, headers, tempDir, logger) {
|
|
||||||
const client = new http.HttpClient("CodeQL Action");
|
|
||||||
const dest = path.join(tempDir, uuid_1.v4());
|
|
||||||
const response = await client.get(url, headers);
|
|
||||||
if (response.message.statusCode !== 200) {
|
|
||||||
logger.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
|
||||||
throw new Error(`Unexpected HTTP response: ${response.message.statusCode}`);
|
|
||||||
}
|
|
||||||
const pipeline = globalutil.promisify(stream.pipeline);
|
|
||||||
fs.mkdirSync(path.dirname(dest), { recursive: true });
|
|
||||||
await pipeline(response.message, fs.createWriteStream(dest));
|
|
||||||
return dest;
|
|
||||||
}
|
|
||||||
async function setupCodeQL(codeqlURL, apiDetails, tempDir, mode, variant, logger) {
|
|
||||||
try {
|
|
||||||
// We use the special value of 'latest' to prioritize the version in the
|
|
||||||
// defaults over any pinned cached version.
|
|
||||||
const forceLatest = codeqlURL === "latest";
|
|
||||||
if (forceLatest) {
|
|
||||||
codeqlURL = undefined;
|
|
||||||
}
|
|
||||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
|
|
||||||
const codeqlURLSemVer = convertToSemVer(codeqlURLVersion, logger);
|
|
||||||
// If we find the specified version, we always use that.
|
|
||||||
let codeqlFolder = toolcache.find("CodeQL", codeqlURLSemVer);
|
|
||||||
// If we don't find the requested version, in some cases we may allow a
|
|
||||||
// different version to save download time if the version hasn't been
|
|
||||||
// specified explicitly (in which case we always honor it).
|
|
||||||
if (!codeqlFolder && !codeqlURL && !forceLatest) {
|
|
||||||
const codeqlVersions = toolcache.findAllVersions("CodeQL");
|
|
||||||
if (codeqlVersions.length === 1) {
|
|
||||||
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0]);
|
|
||||||
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
|
|
||||||
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
|
|
||||||
codeqlFolder = tmpCodeqlFolder;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (codeqlFolder) {
|
|
||||||
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if (!codeqlURL) {
|
|
||||||
codeqlURL = await getCodeQLBundleDownloadURL(apiDetails, mode, variant, logger);
|
|
||||||
}
|
|
||||||
const parsedCodeQLURL = new URL(codeqlURL);
|
|
||||||
const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search);
|
|
||||||
const headers = { accept: "application/octet-stream" };
|
|
||||||
// We only want to provide an authorization header if we are downloading
|
|
||||||
// from the same GitHub instance the Action is running on.
|
|
||||||
// This avoids leaking Enterprise tokens to dotcom.
|
|
||||||
// We also don't want to send an authorization header if there's already a token provided in the URL.
|
|
||||||
if (codeqlURL.startsWith(`${apiDetails.url}/`) &&
|
|
||||||
parsedQueryString["token"] === undefined) {
|
|
||||||
logger.debug("Downloading CodeQL bundle with token.");
|
|
||||||
headers.authorization = `token ${apiDetails.auth}`;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
logger.debug("Downloading CodeQL bundle without token.");
|
|
||||||
}
|
|
||||||
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`);
|
|
||||||
const codeqlPath = await toolcacheDownloadTool(codeqlURL, headers, tempDir, logger);
|
|
||||||
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
|
||||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
|
||||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLSemVer);
|
|
||||||
}
|
|
||||||
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
|
|
||||||
if (process.platform === "win32") {
|
|
||||||
codeqlCmd += ".exe";
|
|
||||||
}
|
|
||||||
else if (process.platform !== "linux" && process.platform !== "darwin") {
|
|
||||||
throw new Error(`Unsupported platform: ${process.platform}`);
|
|
||||||
}
|
|
||||||
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
|
|
||||||
return { codeql: cachedCodeQL, toolsVersion: codeqlURLVersion };
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
logger.error(e);
|
|
||||||
throw new Error("Unable to download and extract CodeQL CLI");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.setupCodeQL = setupCodeQL;
|
|
||||||
function getCodeQLURLVersion(url) {
|
|
||||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
|
||||||
if (match === null || match.length < 2) {
|
|
||||||
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
|
|
||||||
}
|
|
||||||
return match[1];
|
|
||||||
}
|
|
||||||
exports.getCodeQLURLVersion = getCodeQLURLVersion;
|
|
||||||
function convertToSemVer(version, logger) {
|
|
||||||
if (!semver.valid(version)) {
|
|
||||||
logger.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
|
||||||
version = `0.0.0-${version}`;
|
|
||||||
}
|
|
||||||
const s = semver.clean(version);
|
|
||||||
if (!s) {
|
|
||||||
throw new Error(`Bundle version ${version} is not in SemVer format.`);
|
|
||||||
}
|
|
||||||
return s;
|
|
||||||
}
|
|
||||||
exports.convertToSemVer = convertToSemVer;
|
|
||||||
/**
|
|
||||||
* Use the CodeQL executable located at the given path.
|
|
||||||
*/
|
|
||||||
function getCodeQL(cmd) {
|
|
||||||
if (cachedCodeQL === undefined) {
|
|
||||||
cachedCodeQL = getCodeQLForCmd(cmd);
|
|
||||||
}
|
|
||||||
return cachedCodeQL;
|
|
||||||
}
|
|
||||||
exports.getCodeQL = getCodeQL;
|
|
||||||
function resolveFunction(partialCodeql, methodName, defaultImplementation) {
|
|
||||||
if (typeof partialCodeql[methodName] !== "function") {
|
|
||||||
if (defaultImplementation !== undefined) {
|
|
||||||
return defaultImplementation;
|
|
||||||
}
|
|
||||||
const dummyMethod = () => {
|
|
||||||
throw new Error(`CodeQL ${methodName} method not correctly defined`);
|
|
||||||
};
|
|
||||||
return dummyMethod;
|
|
||||||
}
|
|
||||||
return partialCodeql[methodName];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set the functionality for CodeQL methods. Only for use in tests.
|
|
||||||
*
|
|
||||||
* Accepts a partial object and any undefined methods will be implemented
|
|
||||||
* to immediately throw an exception indicating which method is missing.
|
|
||||||
*/
|
|
||||||
function setCodeQL(partialCodeql) {
|
|
||||||
cachedCodeQL = {
|
|
||||||
getPath: resolveFunction(partialCodeql, "getPath", () => "/tmp/dummy-path"),
|
|
||||||
printVersion: resolveFunction(partialCodeql, "printVersion"),
|
|
||||||
getTracerEnv: resolveFunction(partialCodeql, "getTracerEnv"),
|
|
||||||
databaseInit: resolveFunction(partialCodeql, "databaseInit"),
|
|
||||||
runAutobuild: resolveFunction(partialCodeql, "runAutobuild"),
|
|
||||||
extractScannedLanguage: resolveFunction(partialCodeql, "extractScannedLanguage"),
|
|
||||||
finalizeDatabase: resolveFunction(partialCodeql, "finalizeDatabase"),
|
|
||||||
resolveQueries: resolveFunction(partialCodeql, "resolveQueries"),
|
|
||||||
databaseAnalyze: resolveFunction(partialCodeql, "databaseAnalyze"),
|
|
||||||
};
|
|
||||||
return cachedCodeQL;
|
|
||||||
}
|
|
||||||
exports.setCodeQL = setCodeQL;
|
|
||||||
/**
|
|
||||||
* Get the cached CodeQL object. Should only be used from tests.
|
|
||||||
*
|
|
||||||
* TODO: Work out a good way for tests to get this from the test context
|
|
||||||
* instead of having to have this method.
|
|
||||||
*/
|
|
||||||
function getCachedCodeQL() {
|
|
||||||
if (cachedCodeQL === undefined) {
|
|
||||||
// Should never happen as setCodeQL is called by testing-utils.setupTests
|
|
||||||
throw new Error("cachedCodeQL undefined");
|
|
||||||
}
|
|
||||||
return cachedCodeQL;
|
|
||||||
}
|
|
||||||
exports.getCachedCodeQL = getCachedCodeQL;
|
|
||||||
function getCodeQLForCmd(cmd) {
|
|
||||||
return {
|
|
||||||
getPath() {
|
|
||||||
return cmd;
|
|
||||||
},
|
|
||||||
async printVersion() {
|
|
||||||
await new toolrunner.ToolRunner(cmd, ["version", "--format=json"]).exec();
|
|
||||||
},
|
|
||||||
async getTracerEnv(databasePath) {
|
|
||||||
// Write tracer-env.js to a temp location.
|
|
||||||
const tracerEnvJs = path.resolve(databasePath, "working", "tracer-env.js");
|
|
||||||
fs.mkdirSync(path.dirname(tracerEnvJs), { recursive: true });
|
|
||||||
fs.writeFileSync(tracerEnvJs, `
|
|
||||||
const fs = require('fs');
|
|
||||||
const env = {};
|
|
||||||
for (let entry of Object.entries(process.env)) {
|
|
||||||
const key = entry[0];
|
|
||||||
const value = entry[1];
|
|
||||||
if (typeof value !== 'undefined' && key !== '_' && !key.startsWith('JAVA_MAIN_CLASS_')) {
|
|
||||||
env[key] = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
process.stdout.write(process.argv[2]);
|
|
||||||
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');`);
|
|
||||||
const envFile = path.resolve(databasePath, "working", "env.tmp");
|
|
||||||
await new toolrunner.ToolRunner(cmd, [
|
|
||||||
"database",
|
|
||||||
"trace-command",
|
|
||||||
databasePath,
|
|
||||||
...getExtraOptionsFromEnv(["database", "trace-command"]),
|
|
||||||
process.execPath,
|
|
||||||
tracerEnvJs,
|
|
||||||
envFile,
|
|
||||||
]).exec();
|
|
||||||
return JSON.parse(fs.readFileSync(envFile, "utf-8"));
|
|
||||||
},
|
|
||||||
async databaseInit(databasePath, language, sourceRoot) {
|
|
||||||
await new toolrunner.ToolRunner(cmd, [
|
|
||||||
"database",
|
|
||||||
"init",
|
|
||||||
databasePath,
|
|
||||||
`--language=${language}`,
|
|
||||||
`--source-root=${sourceRoot}`,
|
|
||||||
...getExtraOptionsFromEnv(["database", "init"]),
|
|
||||||
]).exec();
|
|
||||||
},
|
|
||||||
async runAutobuild(language) {
|
|
||||||
const cmdName = process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh";
|
|
||||||
const autobuildCmd = path.join(path.dirname(cmd), language, "tools", cmdName);
|
|
||||||
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
|
|
||||||
// This is because of an issue with Azure pipelines timing out connections after 4 minutes
|
|
||||||
// and Maven not properly handling closed connections
|
|
||||||
// Otherwise long build processes will timeout when pulling down Java packages
|
|
||||||
// https://developercommunity.visualstudio.com/content/problem/292284/maven-hosted-agent-connection-timeout.html
|
|
||||||
const javaToolOptions = process.env["JAVA_TOOL_OPTIONS"] || "";
|
|
||||||
process.env["JAVA_TOOL_OPTIONS"] = [
|
|
||||||
...javaToolOptions.split(/\s+/),
|
|
||||||
"-Dhttp.keepAlive=false",
|
|
||||||
"-Dmaven.wagon.http.pool=false",
|
|
||||||
].join(" ");
|
|
||||||
await new toolrunner.ToolRunner(autobuildCmd).exec();
|
|
||||||
},
|
|
||||||
async extractScannedLanguage(databasePath, language) {
|
|
||||||
// Get extractor location
|
|
||||||
let extractorPath = "";
|
|
||||||
await new toolrunner.ToolRunner(cmd, [
|
|
||||||
"resolve",
|
|
||||||
"extractor",
|
|
||||||
"--format=json",
|
|
||||||
`--language=${language}`,
|
|
||||||
...getExtraOptionsFromEnv(["resolve", "extractor"]),
|
|
||||||
], {
|
|
||||||
silent: true,
|
|
||||||
listeners: {
|
|
||||||
stdout: (data) => {
|
|
||||||
extractorPath += data.toString();
|
|
||||||
},
|
|
||||||
stderr: (data) => {
|
|
||||||
process.stderr.write(data);
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}).exec();
|
|
||||||
// Set trace command
|
|
||||||
const ext = process.platform === "win32" ? ".cmd" : ".sh";
|
|
||||||
const traceCommand = path.resolve(JSON.parse(extractorPath), "tools", `autobuild${ext}`);
|
|
||||||
// Run trace command
|
|
||||||
await toolrunner_error_catcher_1.toolrunnerErrorCatcher(cmd, [
|
|
||||||
"database",
|
|
||||||
"trace-command",
|
|
||||||
...getExtraOptionsFromEnv(["database", "trace-command"]),
|
|
||||||
databasePath,
|
|
||||||
"--",
|
|
||||||
traceCommand,
|
|
||||||
], error_matcher_1.errorMatchers);
|
|
||||||
},
|
|
||||||
async finalizeDatabase(databasePath, threadsFlag) {
|
|
||||||
await toolrunner_error_catcher_1.toolrunnerErrorCatcher(cmd, [
|
|
||||||
"database",
|
|
||||||
"finalize",
|
|
||||||
threadsFlag,
|
|
||||||
...getExtraOptionsFromEnv(["database", "finalize"]),
|
|
||||||
databasePath,
|
|
||||||
], error_matcher_1.errorMatchers);
|
|
||||||
},
|
|
||||||
async resolveQueries(queries, extraSearchPath) {
|
|
||||||
const codeqlArgs = [
|
|
||||||
"resolve",
|
|
||||||
"queries",
|
|
||||||
...queries,
|
|
||||||
"--format=bylanguage",
|
|
||||||
...getExtraOptionsFromEnv(["resolve", "queries"]),
|
|
||||||
];
|
|
||||||
if (extraSearchPath !== undefined) {
|
|
||||||
codeqlArgs.push("--search-path", extraSearchPath);
|
|
||||||
}
|
|
||||||
let output = "";
|
|
||||||
await new toolrunner.ToolRunner(cmd, codeqlArgs, {
|
|
||||||
listeners: {
|
|
||||||
stdout: (data) => {
|
|
||||||
output += data.toString();
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}).exec();
|
|
||||||
return JSON.parse(output);
|
|
||||||
},
|
|
||||||
async databaseAnalyze(databasePath, sarifFile, querySuite, memoryFlag, addSnippetsFlag, threadsFlag) {
|
|
||||||
await new toolrunner.ToolRunner(cmd, [
|
|
||||||
"database",
|
|
||||||
"analyze",
|
|
||||||
memoryFlag,
|
|
||||||
threadsFlag,
|
|
||||||
databasePath,
|
|
||||||
"--min-disk-free=1024",
|
|
||||||
"--format=sarif-latest",
|
|
||||||
"--sarif-multicause-markdown",
|
|
||||||
`--output=${sarifFile}`,
|
|
||||||
addSnippetsFlag,
|
|
||||||
...getExtraOptionsFromEnv(["database", "analyze"]),
|
|
||||||
querySuite,
|
|
||||||
]).exec();
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Gets the options for `path` of `options` as an array of extra option strings.
|
|
||||||
*/
|
|
||||||
function getExtraOptionsFromEnv(paths) {
|
|
||||||
const options = util.getExtraOptionsEnvParam();
|
|
||||||
return getExtraOptions(options, paths, []);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Gets `options` as an array of extra option strings.
|
|
||||||
*
|
|
||||||
* - throws an exception mentioning `pathInfo` if this conversion is impossible.
|
|
||||||
*/
|
|
||||||
function asExtraOptions(options, pathInfo) {
|
|
||||||
if (options === undefined) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
if (!Array.isArray(options)) {
|
|
||||||
const msg = `The extra options for '${pathInfo.join(".")}' ('${JSON.stringify(options)}') are not in an array.`;
|
|
||||||
throw new Error(msg);
|
|
||||||
}
|
|
||||||
return options.map((o) => {
|
|
||||||
const t = typeof o;
|
|
||||||
if (t !== "string" && t !== "number" && t !== "boolean") {
|
|
||||||
const msg = `The extra option for '${pathInfo.join(".")}' ('${JSON.stringify(o)}') is not a primitive value.`;
|
|
||||||
throw new Error(msg);
|
|
||||||
}
|
|
||||||
return `${o}`;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Gets the options for `path` of `options` as an array of extra option strings.
|
|
||||||
*
|
|
||||||
* - the special terminal step name '*' in `options` matches all path steps
|
|
||||||
* - throws an exception if this conversion is impossible.
|
|
||||||
*
|
|
||||||
* Exported for testing.
|
|
||||||
*/
|
|
||||||
function getExtraOptions(options, paths, pathInfo) {
|
|
||||||
var _a, _b, _c;
|
|
||||||
const all = asExtraOptions((_a = options) === null || _a === void 0 ? void 0 : _a["*"], pathInfo.concat("*"));
|
|
||||||
const specific = paths.length === 0
|
|
||||||
? asExtraOptions(options, pathInfo)
|
|
||||||
: getExtraOptions((_b = options) === null || _b === void 0 ? void 0 : _b[paths[0]], (_c = paths) === null || _c === void 0 ? void 0 : _c.slice(1), pathInfo.concat(paths[0]));
|
|
||||||
return all.concat(specific);
|
|
||||||
}
|
|
||||||
exports.getExtraOptions = getExtraOptions;
|
|
||||||
//# sourceMappingURL=codeql.js.map
|
|
||||||
File diff suppressed because one or more lines are too long
189
lib/codeql.test.js
generated
189
lib/codeql.test.js
generated
@@ -1,189 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const path = __importStar(require("path"));
|
|
||||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
|
||||||
const ava_1 = __importDefault(require("ava"));
|
|
||||||
const nock_1 = __importDefault(require("nock"));
|
|
||||||
const codeql = __importStar(require("./codeql"));
|
|
||||||
const defaults = __importStar(require("./defaults.json"));
|
|
||||||
const logging_1 = require("./logging");
|
|
||||||
const testing_utils_1 = require("./testing-utils");
|
|
||||||
const util = __importStar(require("./util"));
|
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
|
||||||
const sampleApiDetails = {
|
|
||||||
auth: "token",
|
|
||||||
url: "https://github.com",
|
|
||||||
};
|
|
||||||
const sampleGHAEApiDetails = {
|
|
||||||
auth: "token",
|
|
||||||
url: "https://example.githubenterprise.com",
|
|
||||||
};
|
|
||||||
ava_1.default("download codeql bundle cache", async (t) => {
|
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
|
||||||
util.setupActionsVars(tmpDir, tmpDir);
|
|
||||||
const versions = ["20200601", "20200610"];
|
|
||||||
for (let i = 0; i < versions.length; i++) {
|
|
||||||
const version = versions[i];
|
|
||||||
nock_1.default("https://example.com")
|
|
||||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
|
||||||
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
|
||||||
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
|
||||||
}
|
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
|
||||||
t.is(cachedVersions.length, 2);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("download codeql bundle cache explicitly requested with pinned different version cached", async (t) => {
|
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
|
||||||
util.setupActionsVars(tmpDir, tmpDir);
|
|
||||||
nock_1.default("https://example.com")
|
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
|
||||||
nock_1.default("https://example.com")
|
|
||||||
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
|
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("don't download codeql bundle cache with pinned different version cached", async (t) => {
|
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
|
||||||
util.setupActionsVars(tmpDir, tmpDir);
|
|
||||||
nock_1.default("https://example.com")
|
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
|
||||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
|
||||||
t.is(cachedVersions.length, 1);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("download codeql bundle cache with different version cached (not pinned)", async (t) => {
|
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
|
||||||
util.setupActionsVars(tmpDir, tmpDir);
|
|
||||||
nock_1.default("https://example.com")
|
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
|
||||||
const platform = process.platform === "win32"
|
|
||||||
? "win64"
|
|
||||||
: process.platform === "linux"
|
|
||||||
? "linux64"
|
|
||||||
: "osx64";
|
|
||||||
nock_1.default("https://github.com")
|
|
||||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
|
||||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
|
||||||
t.is(cachedVersions.length, 2);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default('download codeql bundle cache with pinned different version cached if "latest" tools specified', async (t) => {
|
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
|
||||||
util.setupActionsVars(tmpDir, tmpDir);
|
|
||||||
nock_1.default("https://example.com")
|
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
|
||||||
const platform = process.platform === "win32"
|
|
||||||
? "win64"
|
|
||||||
: process.platform === "linux"
|
|
||||||
? "linux64"
|
|
||||||
: "osx64";
|
|
||||||
nock_1.default("https://github.com")
|
|
||||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
|
||||||
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
|
||||||
t.is(cachedVersions.length, 2);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("download codeql bundle from github ae endpoint", async (t) => {
|
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
|
||||||
util.setupActionsVars(tmpDir, tmpDir);
|
|
||||||
const bundleAssetID = 10;
|
|
||||||
const platform = process.platform === "win32"
|
|
||||||
? "win64"
|
|
||||||
: process.platform === "linux"
|
|
||||||
? "linux64"
|
|
||||||
: "osx64";
|
|
||||||
const codeQLBundleName = `codeql-bundle-${platform}.tar.gz`;
|
|
||||||
nock_1.default("https://example.githubenterprise.com")
|
|
||||||
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/find/${defaults.bundleVersion}`)
|
|
||||||
.reply(200, {
|
|
||||||
assets: { [codeQLBundleName]: bundleAssetID },
|
|
||||||
});
|
|
||||||
nock_1.default("https://example.githubenterprise.com")
|
|
||||||
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/download/${bundleAssetID}`)
|
|
||||||
.reply(200, {
|
|
||||||
url: `https://example.githubenterprise.com/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`,
|
|
||||||
});
|
|
||||||
nock_1.default("https://example.githubenterprise.com")
|
|
||||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
|
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
|
||||||
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, "runner", util.GitHubVariant.GHAE, logging_1.getRunnerLogger(true));
|
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
|
||||||
t.is(cachedVersions.length, 1);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("parse codeql bundle url version", (t) => {
|
|
||||||
t.deepEqual(codeql.getCodeQLURLVersion("https://github.com/.../codeql-bundle-20200601/..."), "20200601");
|
|
||||||
});
|
|
||||||
ava_1.default("convert to semver", (t) => {
|
|
||||||
const tests = {
|
|
||||||
"20200601": "0.0.0-20200601",
|
|
||||||
"20200601.0": "0.0.0-20200601.0",
|
|
||||||
"20200601.0.0": "20200601.0.0",
|
|
||||||
"1.2.3": "1.2.3",
|
|
||||||
"1.2.3-alpha": "1.2.3-alpha",
|
|
||||||
"1.2.3-beta.1": "1.2.3-beta.1",
|
|
||||||
};
|
|
||||||
for (const [version, expectedVersion] of Object.entries(tests)) {
|
|
||||||
try {
|
|
||||||
const parsedVersion = codeql.convertToSemVer(version, logging_1.getRunnerLogger(true));
|
|
||||||
t.deepEqual(parsedVersion, expectedVersion);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
t.fail(e.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
ava_1.default("getExtraOptions works for explicit paths", (t) => {
|
|
||||||
t.deepEqual(codeql.getExtraOptions({}, ["foo"], []), []);
|
|
||||||
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ["foo"], []), ["42"]);
|
|
||||||
t.deepEqual(codeql.getExtraOptions({ foo: { bar: [42] } }, ["foo", "bar"], []), ["42"]);
|
|
||||||
});
|
|
||||||
ava_1.default("getExtraOptions works for wildcards", (t) => {
|
|
||||||
t.deepEqual(codeql.getExtraOptions({ "*": [42] }, ["foo"], []), ["42"]);
|
|
||||||
});
|
|
||||||
ava_1.default("getExtraOptions works for wildcards and explicit paths", (t) => {
|
|
||||||
const o1 = { "*": [42], foo: [87] };
|
|
||||||
t.deepEqual(codeql.getExtraOptions(o1, ["foo"], []), ["42", "87"]);
|
|
||||||
const o2 = { "*": [42], foo: [87] };
|
|
||||||
t.deepEqual(codeql.getExtraOptions(o2, ["foo", "bar"], []), ["42"]);
|
|
||||||
const o3 = { "*": [42], foo: { "*": [87], bar: [99] } };
|
|
||||||
const p = ["foo", "bar"];
|
|
||||||
t.deepEqual(codeql.getExtraOptions(o3, p, []), ["42", "87", "99"]);
|
|
||||||
});
|
|
||||||
ava_1.default("getExtraOptions throws for bad content", (t) => {
|
|
||||||
t.throws(() => codeql.getExtraOptions({ "*": 42 }, ["foo"], []));
|
|
||||||
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ["foo"], []));
|
|
||||||
t.throws(() => codeql.getExtraOptions({ "*": [42], foo: { "*": 87, bar: [99] } }, ["foo", "bar"], []));
|
|
||||||
});
|
|
||||||
//# sourceMappingURL=codeql.test.js.map
|
|
||||||
File diff suppressed because one or more lines are too long
681
lib/config-utils.js
generated
681
lib/config-utils.js
generated
@@ -7,422 +7,166 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
|
const io = __importStar(require("@actions/io"));
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
|
||||||
const yaml = __importStar(require("js-yaml"));
|
const yaml = __importStar(require("js-yaml"));
|
||||||
const api = __importStar(require("./api-client"));
|
const path = __importStar(require("path"));
|
||||||
const externalQueries = __importStar(require("./external-queries"));
|
const util = __importStar(require("./util"));
|
||||||
const languages_1 = require("./languages");
|
const NAME_PROPERTY = 'name';
|
||||||
// Property names from the user-supplied config file.
|
const DISPLAY_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
|
||||||
const NAME_PROPERTY = "name";
|
const QUERIES_PROPERTY = 'queries';
|
||||||
const DISABLE_DEFAULT_QUERIES_PROPERTY = "disable-default-queries";
|
const QUERIES_USES_PROPERTY = 'uses';
|
||||||
const QUERIES_PROPERTY = "queries";
|
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
|
||||||
const QUERIES_USES_PROPERTY = "uses";
|
const PATHS_PROPERTY = 'paths';
|
||||||
const PATHS_IGNORE_PROPERTY = "paths-ignore";
|
class ExternalQuery {
|
||||||
const PATHS_PROPERTY = "paths";
|
constructor(repository, ref) {
|
||||||
/**
|
this.path = '';
|
||||||
* A list of queries from https://github.com/github/codeql that
|
this.repository = repository;
|
||||||
* we don't want to run. Disabling them here is a quicker alternative to
|
this.ref = ref;
|
||||||
* disabling them in the code scanning query suites. Queries should also
|
|
||||||
* be disabled in the suites, and removed from this list here once the
|
|
||||||
* bundle is updated to make those suite changes live.
|
|
||||||
*
|
|
||||||
* Format is a map from language to an array of path suffixes of .ql files.
|
|
||||||
*/
|
|
||||||
const DISABLED_BUILTIN_QUERIES = {
|
|
||||||
csharp: [
|
|
||||||
"ql/src/Security Features/CWE-937/VulnerablePackage.ql",
|
|
||||||
"ql/src/Security Features/CWE-451/MissingXFrameOptions.ql",
|
|
||||||
],
|
|
||||||
};
|
|
||||||
function queryIsDisabled(language, query) {
|
|
||||||
return (DISABLED_BUILTIN_QUERIES[language] || []).some((disabledQuery) => query.endsWith(disabledQuery));
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Asserts that the noDeclaredLanguage and multipleDeclaredLanguages fields are
|
|
||||||
* both empty and errors if they are not.
|
|
||||||
*/
|
|
||||||
function validateQueries(resolvedQueries) {
|
|
||||||
const noDeclaredLanguage = resolvedQueries.noDeclaredLanguage;
|
|
||||||
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
|
||||||
if (noDeclaredLanguageQueries.length !== 0) {
|
|
||||||
throw new Error(`${"The following queries do not declare a language. " +
|
|
||||||
"Their qlpack.yml files are either missing or is invalid.\n"}${noDeclaredLanguageQueries.join("\n")}`);
|
|
||||||
}
|
|
||||||
const multipleDeclaredLanguages = resolvedQueries.multipleDeclaredLanguages;
|
|
||||||
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
|
|
||||||
if (multipleDeclaredLanguagesQueries.length !== 0) {
|
|
||||||
throw new Error(`${"The following queries declare multiple languages. " +
|
|
||||||
"Their qlpack.yml files are either missing or is invalid.\n"}${multipleDeclaredLanguagesQueries.join("\n")}`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/**
|
exports.ExternalQuery = ExternalQuery;
|
||||||
* Run 'codeql resolve queries' and add the results to resultMap
|
|
||||||
*
|
|
||||||
* If a checkout path is given then the queries are assumed to be custom queries
|
|
||||||
* and an error will be thrown if there is anything invalid about the queries.
|
|
||||||
* If a checkout path is not given then the queries are assumed to be builtin
|
|
||||||
* queries, and error checking will be suppressed.
|
|
||||||
*/
|
|
||||||
async function runResolveQueries(codeQL, resultMap, toResolve, extraSearchPath) {
|
|
||||||
const resolvedQueries = await codeQL.resolveQueries(toResolve, extraSearchPath);
|
|
||||||
if (extraSearchPath !== undefined) {
|
|
||||||
validateQueries(resolvedQueries);
|
|
||||||
}
|
|
||||||
for (const [language, queryPaths] of Object.entries(resolvedQueries.byLanguage)) {
|
|
||||||
if (resultMap[language] === undefined) {
|
|
||||||
resultMap[language] = {
|
|
||||||
builtin: [],
|
|
||||||
custom: [],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const queries = Object.keys(queryPaths).filter((q) => !queryIsDisabled(language, q));
|
|
||||||
if (extraSearchPath !== undefined) {
|
|
||||||
resultMap[language].custom.push(...queries);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
resultMap[language].builtin.push(...queries);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get the set of queries included by default.
|
|
||||||
*/
|
|
||||||
async function addDefaultQueries(codeQL, languages, resultMap) {
|
|
||||||
const suites = languages.map((l) => `${l}-code-scanning.qls`);
|
|
||||||
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
|
||||||
}
|
|
||||||
// The set of acceptable values for built-in suites from the codeql bundle
|
// The set of acceptable values for built-in suites from the codeql bundle
|
||||||
const builtinSuites = ["security-extended", "security-and-quality"];
|
const builtinSuites = ['security-extended', 'security-and-quality'];
|
||||||
/**
|
class Config {
|
||||||
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
constructor() {
|
||||||
* Throws an error if suiteName is not a valid builtin suite.
|
this.name = "";
|
||||||
*/
|
this.disableDefaultQueries = false;
|
||||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, suiteName, configFile) {
|
this.additionalQueries = [];
|
||||||
const found = builtinSuites.find((suite) => suite === suiteName);
|
this.externalQueries = [];
|
||||||
if (!found) {
|
this.additionalSuites = [];
|
||||||
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
this.pathsIgnore = [];
|
||||||
|
this.paths = [];
|
||||||
|
}
|
||||||
|
addQuery(configFile, queryUses) {
|
||||||
|
// The logic for parsing the string is based on what actions does for
|
||||||
|
// parsing the 'uses' actions in the workflow file
|
||||||
|
queryUses = queryUses.trim();
|
||||||
|
if (queryUses === "") {
|
||||||
|
throw new Error(getQueryUsesInvalid(configFile));
|
||||||
|
}
|
||||||
|
// Check for the local path case before we start trying to parse the repository name
|
||||||
|
if (queryUses.startsWith("./")) {
|
||||||
|
const localQueryPath = queryUses.slice(2);
|
||||||
|
// Resolve the local path against the workspace so that when this is
|
||||||
|
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
||||||
|
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||||
|
const absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||||
|
// Check the file exists
|
||||||
|
if (!fs.existsSync(absoluteQueryPath)) {
|
||||||
|
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||||
|
}
|
||||||
|
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||||
|
if (!(fs.realpathSync(absoluteQueryPath) + path.sep).startsWith(workspacePath + path.sep)) {
|
||||||
|
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||||
|
}
|
||||||
|
this.additionalQueries.push(absoluteQueryPath);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Check for one of the builtin suites
|
||||||
|
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
|
||||||
|
const suite = builtinSuites.find((suite) => suite === queryUses);
|
||||||
|
if (suite) {
|
||||||
|
this.additionalSuites.push(suite);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let tok = queryUses.split('@');
|
||||||
|
if (tok.length !== 2) {
|
||||||
|
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||||
|
}
|
||||||
|
const ref = tok[1];
|
||||||
|
tok = tok[0].split('/');
|
||||||
|
// The first token is the owner
|
||||||
|
// The second token is the repo
|
||||||
|
// The rest is a path, if there is more than one token combine them to form the full path
|
||||||
|
if (tok.length < 2) {
|
||||||
|
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||||
|
}
|
||||||
|
if (tok.length > 3) {
|
||||||
|
tok = [tok[0], tok[1], tok.slice(2).join('/')];
|
||||||
|
}
|
||||||
|
// Check none of the parts of the repository name are empty
|
||||||
|
if (tok[0].trim() === '' || tok[1].trim() === '') {
|
||||||
|
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||||
|
}
|
||||||
|
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
|
||||||
|
if (tok.length === 3) {
|
||||||
|
external.path = tok[2];
|
||||||
|
}
|
||||||
|
this.externalQueries.push(external);
|
||||||
}
|
}
|
||||||
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
|
|
||||||
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
|
||||||
}
|
}
|
||||||
/**
|
exports.Config = Config;
|
||||||
* Retrieve the set of queries at localQueryPath and add them to resultMap.
|
|
||||||
*/
|
|
||||||
async function addLocalQueries(codeQL, resultMap, localQueryPath, checkoutPath, configFile) {
|
|
||||||
// Resolve the local path against the workspace so that when this is
|
|
||||||
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
|
||||||
let absoluteQueryPath = path.join(checkoutPath, localQueryPath);
|
|
||||||
// Check the file exists
|
|
||||||
if (!fs.existsSync(absoluteQueryPath)) {
|
|
||||||
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
|
||||||
}
|
|
||||||
// Call this after checking file exists, because it'll fail if file doesn't exist
|
|
||||||
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
|
|
||||||
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
|
||||||
if (!(absoluteQueryPath + path.sep).startsWith(fs.realpathSync(checkoutPath) + path.sep)) {
|
|
||||||
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
|
||||||
}
|
|
||||||
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], checkoutPath);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
|
|
||||||
*/
|
|
||||||
async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile) {
|
|
||||||
let tok = queryUses.split("@");
|
|
||||||
if (tok.length !== 2) {
|
|
||||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
|
||||||
}
|
|
||||||
const ref = tok[1];
|
|
||||||
tok = tok[0].split("/");
|
|
||||||
// The first token is the owner
|
|
||||||
// The second token is the repo
|
|
||||||
// The rest is a path, if there is more than one token combine them to form the full path
|
|
||||||
if (tok.length < 2) {
|
|
||||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
|
||||||
}
|
|
||||||
// Check none of the parts of the repository name are empty
|
|
||||||
if (tok[0].trim() === "" || tok[1].trim() === "") {
|
|
||||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
|
||||||
}
|
|
||||||
const nwo = `${tok[0]}/${tok[1]}`;
|
|
||||||
// Checkout the external repository
|
|
||||||
const checkoutPath = await externalQueries.checkoutExternalRepository(nwo, ref, apiDetails, tempDir, logger);
|
|
||||||
const queryPath = tok.length > 2
|
|
||||||
? path.join(checkoutPath, tok.slice(2).join("/"))
|
|
||||||
: checkoutPath;
|
|
||||||
await runResolveQueries(codeQL, resultMap, [queryPath], checkoutPath);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Parse a query 'uses' field to a discrete set of query files and update resultMap.
|
|
||||||
*
|
|
||||||
* The logic for parsing the string is based on what actions does for
|
|
||||||
* parsing the 'uses' actions in the workflow file. So it can handle
|
|
||||||
* local paths starting with './', or references to remote repos, or
|
|
||||||
* a finite set of hardcoded terms for builtin suites.
|
|
||||||
*/
|
|
||||||
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, checkoutPath, apiDetails, logger, configFile) {
|
|
||||||
queryUses = queryUses.trim();
|
|
||||||
if (queryUses === "") {
|
|
||||||
throw new Error(getQueryUsesInvalid(configFile));
|
|
||||||
}
|
|
||||||
// Check for the local path case before we start trying to parse the repository name
|
|
||||||
if (queryUses.startsWith("./")) {
|
|
||||||
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), checkoutPath, configFile);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
// Check for one of the builtin suites
|
|
||||||
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
|
|
||||||
await addBuiltinSuiteQueries(languages, codeQL, resultMap, queryUses, configFile);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
// Otherwise, must be a reference to another repo
|
|
||||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
|
||||||
}
|
|
||||||
// Regex validating stars in paths or paths-ignore entries.
|
|
||||||
// The intention is to only allow ** to appear when immediately
|
|
||||||
// preceded and followed by a slash.
|
|
||||||
const pathStarsRegex = /.*(?:\*\*[^/].*|\*\*$|[^/]\*\*.*)/;
|
|
||||||
// Characters that are supported by filters in workflows, but not by us.
|
|
||||||
// See https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
|
|
||||||
const filterPatternCharactersRegex = /.*[?+[\]!].*/;
|
|
||||||
// Checks that a paths of paths-ignore entry is valid, possibly modifying it
|
|
||||||
// to make it valid, or if not possible then throws an error.
|
|
||||||
function validateAndSanitisePath(originalPath, propertyName, configFile, logger) {
|
|
||||||
// Take a copy so we don't modify the original path, so we can still construct error messages
|
|
||||||
let newPath = originalPath;
|
|
||||||
// All paths are relative to the src root, so strip off leading slashes.
|
|
||||||
while (newPath.charAt(0) === "/") {
|
|
||||||
newPath = newPath.substring(1);
|
|
||||||
}
|
|
||||||
// Trailing ** are redundant, so strip them off
|
|
||||||
if (newPath.endsWith("/**")) {
|
|
||||||
newPath = newPath.substring(0, newPath.length - 2);
|
|
||||||
}
|
|
||||||
// An empty path is not allowed as it's meaningless
|
|
||||||
if (newPath === "") {
|
|
||||||
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" is not an invalid path. ` +
|
|
||||||
`It is not necessary to include it, and it is not allowed to exclude it.`));
|
|
||||||
}
|
|
||||||
// Check for illegal uses of **
|
|
||||||
if (newPath.match(pathStarsRegex)) {
|
|
||||||
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an invalid "**" wildcard. ` +
|
|
||||||
`They must be immediately preceded and followed by a slash as in "/**/", or come at the start or end.`));
|
|
||||||
}
|
|
||||||
// Check for other regex characters that we don't support.
|
|
||||||
// Output a warning so the user knows, but otherwise continue normally.
|
|
||||||
if (newPath.match(filterPatternCharactersRegex)) {
|
|
||||||
logger.warning(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an unsupported character. ` +
|
|
||||||
`The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.`));
|
|
||||||
}
|
|
||||||
// Ban any uses of backslash for now.
|
|
||||||
// This may not play nicely with project layouts.
|
|
||||||
// This restriction can be lifted later if we determine they are ok.
|
|
||||||
if (newPath.indexOf("\\") !== -1) {
|
|
||||||
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an "\\" character. These are not allowed in filters. ` +
|
|
||||||
`If running on windows we recommend using "/" instead for path filters.`));
|
|
||||||
}
|
|
||||||
return newPath;
|
|
||||||
}
|
|
||||||
exports.validateAndSanitisePath = validateAndSanitisePath;
|
|
||||||
// An undefined configFile in some of these functions indicates that
|
|
||||||
// the property was in a workflow file, not a config file
|
|
||||||
function getNameInvalid(configFile) {
|
function getNameInvalid(configFile) {
|
||||||
return getConfigFilePropertyError(configFile, NAME_PROPERTY, "must be a non-empty string");
|
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
|
||||||
}
|
}
|
||||||
exports.getNameInvalid = getNameInvalid;
|
exports.getNameInvalid = getNameInvalid;
|
||||||
function getDisableDefaultQueriesInvalid(configFile) {
|
function getDisableDefaultQueriesInvalid(configFile) {
|
||||||
return getConfigFilePropertyError(configFile, DISABLE_DEFAULT_QUERIES_PROPERTY, "must be a boolean");
|
return getConfigFilePropertyError(configFile, DISPLAY_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
|
||||||
}
|
}
|
||||||
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
|
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
|
||||||
function getQueriesInvalid(configFile) {
|
function getQueriesInvalid(configFile) {
|
||||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, "must be an array");
|
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
|
||||||
}
|
}
|
||||||
exports.getQueriesInvalid = getQueriesInvalid;
|
exports.getQueriesInvalid = getQueriesInvalid;
|
||||||
function getQueryUsesInvalid(configFile, queryUses) {
|
function getQueryUsesInvalid(configFile, queryUses) {
|
||||||
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `must be a built-in suite (${builtinSuites.join(" or ")}), a relative path, or be of the form "owner/repo[/path]@ref"${queryUses !== undefined ? `\n Found: ${queryUses}` : ""}`);
|
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'must be a built-in suite (' + builtinSuites.join(' or ') +
|
||||||
|
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
|
||||||
|
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
|
||||||
}
|
}
|
||||||
exports.getQueryUsesInvalid = getQueryUsesInvalid;
|
exports.getQueryUsesInvalid = getQueryUsesInvalid;
|
||||||
function getPathsIgnoreInvalid(configFile) {
|
function getPathsIgnoreInvalid(configFile) {
|
||||||
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, "must be an array of non-empty strings");
|
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
|
||||||
}
|
}
|
||||||
exports.getPathsIgnoreInvalid = getPathsIgnoreInvalid;
|
exports.getPathsIgnoreInvalid = getPathsIgnoreInvalid;
|
||||||
function getPathsInvalid(configFile) {
|
function getPathsInvalid(configFile) {
|
||||||
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, "must be an array of non-empty strings");
|
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
|
||||||
}
|
}
|
||||||
exports.getPathsInvalid = getPathsInvalid;
|
exports.getPathsInvalid = getPathsInvalid;
|
||||||
function getLocalPathOutsideOfRepository(configFile, localPath) {
|
function getLocalPathOutsideOfRepository(configFile, localPath) {
|
||||||
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `is invalid as the local path "${localPath}" is outside of the repository`);
|
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" is outside of the repository');
|
||||||
}
|
}
|
||||||
exports.getLocalPathOutsideOfRepository = getLocalPathOutsideOfRepository;
|
exports.getLocalPathOutsideOfRepository = getLocalPathOutsideOfRepository;
|
||||||
function getLocalPathDoesNotExist(configFile, localPath) {
|
function getLocalPathDoesNotExist(configFile, localPath) {
|
||||||
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `is invalid as the local path "${localPath}" does not exist in the repository`);
|
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" does not exist in the repository');
|
||||||
}
|
}
|
||||||
exports.getLocalPathDoesNotExist = getLocalPathDoesNotExist;
|
exports.getLocalPathDoesNotExist = getLocalPathDoesNotExist;
|
||||||
function getConfigFileOutsideWorkspaceErrorMessage(configFile) {
|
function getConfigFileOutsideWorkspaceErrorMessage(configFile) {
|
||||||
return `The configuration file "${configFile}" is outside of the workspace`;
|
return 'The configuration file "' + configFile + '" is outside of the workspace';
|
||||||
}
|
}
|
||||||
exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage;
|
exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage;
|
||||||
function getConfigFileDoesNotExistErrorMessage(configFile) {
|
function getConfigFileDoesNotExistErrorMessage(configFile) {
|
||||||
return `The configuration file "${configFile}" does not exist`;
|
return 'The configuration file "' + configFile + '" does not exist';
|
||||||
}
|
}
|
||||||
exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage;
|
exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage;
|
||||||
function getConfigFileRepoFormatInvalidMessage(configFile) {
|
|
||||||
let error = `The configuration file "${configFile}" is not a supported remote file reference.`;
|
|
||||||
error += " Expected format <owner>/<repository>/<file-path>@<ref>";
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
exports.getConfigFileRepoFormatInvalidMessage = getConfigFileRepoFormatInvalidMessage;
|
|
||||||
function getConfigFileFormatInvalidMessage(configFile) {
|
|
||||||
return `The configuration file "${configFile}" could not be read`;
|
|
||||||
}
|
|
||||||
exports.getConfigFileFormatInvalidMessage = getConfigFileFormatInvalidMessage;
|
|
||||||
function getConfigFileDirectoryGivenMessage(configFile) {
|
|
||||||
return `The configuration file "${configFile}" looks like a directory, not a file`;
|
|
||||||
}
|
|
||||||
exports.getConfigFileDirectoryGivenMessage = getConfigFileDirectoryGivenMessage;
|
|
||||||
function getConfigFilePropertyError(configFile, property, error) {
|
function getConfigFilePropertyError(configFile, property, error) {
|
||||||
if (configFile === undefined) {
|
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
|
||||||
return `The workflow property "${property}" is invalid: ${error}`;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return `The configuration file "${configFile}" is invalid: property "${property}" ${error}`;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
function getNoLanguagesError() {
|
function initConfig() {
|
||||||
return ("Did not detect any languages to analyze. " +
|
let configFile = core.getInput('config-file');
|
||||||
"Please update input in workflow or check that GitHub detects the correct languages in your repository.");
|
const config = new Config();
|
||||||
}
|
// If no config file was provided create an empty one
|
||||||
exports.getNoLanguagesError = getNoLanguagesError;
|
if (configFile === '') {
|
||||||
function getUnknownLanguagesError(languages) {
|
core.debug('No configuration file was provided');
|
||||||
return `Did not recognise the following languages: ${languages.join(", ")}`;
|
return config;
|
||||||
}
|
|
||||||
exports.getUnknownLanguagesError = getUnknownLanguagesError;
|
|
||||||
/**
|
|
||||||
* Gets the set of languages in the current repository
|
|
||||||
*/
|
|
||||||
async function getLanguagesInRepo(repository, apiDetails, logger) {
|
|
||||||
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
|
|
||||||
const response = await api
|
|
||||||
.getApiClient(apiDetails, { allowLocalRun: true })
|
|
||||||
.repos.listLanguages({
|
|
||||||
owner: repository.owner,
|
|
||||||
repo: repository.repo,
|
|
||||||
});
|
|
||||||
logger.debug(`Languages API response: ${JSON.stringify(response)}`);
|
|
||||||
// The GitHub API is going to return languages in order of popularity,
|
|
||||||
// When we pick a language to autobuild we want to pick the most popular traced language
|
|
||||||
// Since sets in javascript maintain insertion order, using a set here and then splatting it
|
|
||||||
// into an array gives us an array of languages ordered by popularity
|
|
||||||
const languages = new Set();
|
|
||||||
for (const lang of Object.keys(response.data)) {
|
|
||||||
const parsedLang = languages_1.parseLanguage(lang);
|
|
||||||
if (parsedLang !== undefined) {
|
|
||||||
languages.add(parsedLang);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return [...languages];
|
// Treat the config file as relative to the workspace
|
||||||
}
|
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||||
/**
|
configFile = path.resolve(workspacePath, configFile);
|
||||||
* Get the languages to analyse.
|
// Error if the config file is now outside of the workspace
|
||||||
*
|
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||||
* The result is obtained from the action input parameter 'languages' if that
|
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||||
* has been set, otherwise it is deduced as all languages in the repo that
|
|
||||||
* can be analysed.
|
|
||||||
*
|
|
||||||
* If no languages could be detected from either the workflow or the repository
|
|
||||||
* then throw an error.
|
|
||||||
*/
|
|
||||||
async function getLanguages(languagesInput, repository, apiDetails, logger) {
|
|
||||||
// Obtain from action input 'languages' if set
|
|
||||||
let languages = (languagesInput || "")
|
|
||||||
.split(",")
|
|
||||||
.map((x) => x.trim())
|
|
||||||
.filter((x) => x.length > 0);
|
|
||||||
logger.info(`Languages from configuration: ${JSON.stringify(languages)}`);
|
|
||||||
if (languages.length === 0) {
|
|
||||||
// Obtain languages as all languages in the repo that can be analysed
|
|
||||||
languages = await getLanguagesInRepo(repository, apiDetails, logger);
|
|
||||||
logger.info(`Automatically detected languages: ${JSON.stringify(languages)}`);
|
|
||||||
}
|
}
|
||||||
// If the languages parameter was not given and no languages were
|
// Error if the file does not exist
|
||||||
// detected then fail here as this is a workflow configuration error.
|
if (!fs.existsSync(configFile)) {
|
||||||
if (languages.length === 0) {
|
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
||||||
throw new Error(getNoLanguagesError());
|
|
||||||
}
|
}
|
||||||
// Make sure they are supported
|
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||||
const parsedLanguages = [];
|
|
||||||
const unknownLanguages = [];
|
|
||||||
for (const language of languages) {
|
|
||||||
const parsedLanguage = languages_1.parseLanguage(language);
|
|
||||||
if (parsedLanguage === undefined) {
|
|
||||||
unknownLanguages.push(language);
|
|
||||||
}
|
|
||||||
else if (parsedLanguages.indexOf(parsedLanguage) === -1) {
|
|
||||||
parsedLanguages.push(parsedLanguage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (unknownLanguages.length > 0) {
|
|
||||||
throw new Error(getUnknownLanguagesError(unknownLanguages));
|
|
||||||
}
|
|
||||||
return parsedLanguages;
|
|
||||||
}
|
|
||||||
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, checkoutPath, apiDetails, logger) {
|
|
||||||
queriesInput = queriesInput.trim();
|
|
||||||
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
|
||||||
queriesInput = queriesInput.replace(/^\+/, "");
|
|
||||||
for (const query of queriesInput.split(",")) {
|
|
||||||
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, apiDetails, logger);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Returns true if either no queries were provided in the workflow.
|
|
||||||
// or if the queries in the workflow were provided in "additive" mode,
|
|
||||||
// indicating that they shouldn't override the config queries but
|
|
||||||
// should instead be added in addition
|
|
||||||
function shouldAddConfigFileQueries(queriesInput) {
|
|
||||||
if (queriesInput) {
|
|
||||||
return queriesInput.trimStart().substr(0, 1) === "+";
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get the default config for when the user has not supplied one.
|
|
||||||
*/
|
|
||||||
async function getDefaultConfig(languagesInput, queriesInput, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
|
|
||||||
const languages = await getLanguages(languagesInput, repository, apiDetails, logger);
|
|
||||||
const queries = {};
|
|
||||||
await addDefaultQueries(codeQL, languages, queries);
|
|
||||||
if (queriesInput) {
|
|
||||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails, logger);
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
languages,
|
|
||||||
queries,
|
|
||||||
pathsIgnore: [],
|
|
||||||
paths: [],
|
|
||||||
originalUserInput: {},
|
|
||||||
tempDir,
|
|
||||||
toolCacheDir,
|
|
||||||
codeQLCmd: codeQL.getPath(),
|
|
||||||
gitHubVersion,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
exports.getDefaultConfig = getDefaultConfig;
|
|
||||||
/**
|
|
||||||
* Load the config from the given file.
|
|
||||||
*/
|
|
||||||
async function loadConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
|
|
||||||
let parsedYAML;
|
|
||||||
if (isLocal(configFile)) {
|
|
||||||
// Treat the config file as relative to the workspace
|
|
||||||
configFile = path.resolve(checkoutPath, configFile);
|
|
||||||
parsedYAML = getLocalConfig(configFile, checkoutPath);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
parsedYAML = await getRemoteConfig(configFile, apiDetails);
|
|
||||||
}
|
|
||||||
// Validate that the 'name' property is syntactically correct,
|
|
||||||
// even though we don't use the value yet.
|
|
||||||
if (NAME_PROPERTY in parsedYAML) {
|
if (NAME_PROPERTY in parsedYAML) {
|
||||||
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
|
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
|
||||||
throw new Error(getNameInvalid(configFile));
|
throw new Error(getNameInvalid(configFile));
|
||||||
@@ -430,183 +174,78 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
|
|||||||
if (parsedYAML[NAME_PROPERTY].length === 0) {
|
if (parsedYAML[NAME_PROPERTY].length === 0) {
|
||||||
throw new Error(getNameInvalid(configFile));
|
throw new Error(getNameInvalid(configFile));
|
||||||
}
|
}
|
||||||
|
config.name = parsedYAML[NAME_PROPERTY];
|
||||||
}
|
}
|
||||||
const languages = await getLanguages(languagesInput, repository, apiDetails, logger);
|
if (DISPLAY_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
|
||||||
const queries = {};
|
if (typeof parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
||||||
const pathsIgnore = [];
|
|
||||||
const paths = [];
|
|
||||||
let disableDefaultQueries = false;
|
|
||||||
if (DISABLE_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
|
|
||||||
if (typeof parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
|
||||||
throw new Error(getDisableDefaultQueriesInvalid(configFile));
|
throw new Error(getDisableDefaultQueriesInvalid(configFile));
|
||||||
}
|
}
|
||||||
disableDefaultQueries = parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY];
|
config.disableDefaultQueries = parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY];
|
||||||
}
|
}
|
||||||
if (!disableDefaultQueries) {
|
if (QUERIES_PROPERTY in parsedYAML) {
|
||||||
await addDefaultQueries(codeQL, languages, queries);
|
|
||||||
}
|
|
||||||
// If queries were provided using `with` in the action configuration,
|
|
||||||
// they should take precedence over the queries in the config file
|
|
||||||
// unless they're prefixed with "+", in which case they supplement those
|
|
||||||
// in the config file.
|
|
||||||
if (queriesInput) {
|
|
||||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails, logger);
|
|
||||||
}
|
|
||||||
if (shouldAddConfigFileQueries(queriesInput) &&
|
|
||||||
QUERIES_PROPERTY in parsedYAML) {
|
|
||||||
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
||||||
throw new Error(getQueriesInvalid(configFile));
|
throw new Error(getQueriesInvalid(configFile));
|
||||||
}
|
}
|
||||||
for (const query of parsedYAML[QUERIES_PROPERTY]) {
|
parsedYAML[QUERIES_PROPERTY].forEach(query => {
|
||||||
if (!(QUERIES_USES_PROPERTY in query) ||
|
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||||
typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
|
||||||
throw new Error(getQueryUsesInvalid(configFile));
|
throw new Error(getQueryUsesInvalid(configFile));
|
||||||
}
|
}
|
||||||
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, checkoutPath, apiDetails, logger, configFile);
|
config.addQuery(configFile, query[QUERIES_USES_PROPERTY]);
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||||
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
|
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
|
||||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||||
}
|
}
|
||||||
for (const ignorePath of parsedYAML[PATHS_IGNORE_PROPERTY]) {
|
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
|
||||||
if (typeof ignorePath !== "string" || ignorePath === "") {
|
if (typeof path !== "string" || path === '') {
|
||||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||||
}
|
}
|
||||||
pathsIgnore.push(validateAndSanitisePath(ignorePath, PATHS_IGNORE_PROPERTY, configFile, logger));
|
config.pathsIgnore.push(path);
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
if (PATHS_PROPERTY in parsedYAML) {
|
if (PATHS_PROPERTY in parsedYAML) {
|
||||||
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
|
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
|
||||||
throw new Error(getPathsInvalid(configFile));
|
throw new Error(getPathsInvalid(configFile));
|
||||||
}
|
}
|
||||||
for (const includePath of parsedYAML[PATHS_PROPERTY]) {
|
parsedYAML[PATHS_PROPERTY].forEach(path => {
|
||||||
if (typeof includePath !== "string" || includePath === "") {
|
if (typeof path !== "string" || path === '') {
|
||||||
throw new Error(getPathsInvalid(configFile));
|
throw new Error(getPathsInvalid(configFile));
|
||||||
}
|
}
|
||||||
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
|
config.paths.push(path);
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
// The list of queries should not be empty for any language. If it is then
|
|
||||||
// it is a user configuration error.
|
|
||||||
for (const language of languages) {
|
|
||||||
if (queries[language] === undefined ||
|
|
||||||
(queries[language].builtin.length === 0 &&
|
|
||||||
queries[language].custom.length === 0)) {
|
|
||||||
throw new Error(`Did not detect any queries to run for ${language}. ` +
|
|
||||||
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
languages,
|
|
||||||
queries,
|
|
||||||
pathsIgnore,
|
|
||||||
paths,
|
|
||||||
originalUserInput: parsedYAML,
|
|
||||||
tempDir,
|
|
||||||
toolCacheDir,
|
|
||||||
codeQLCmd: codeQL.getPath(),
|
|
||||||
gitHubVersion,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Load and return the config.
|
|
||||||
*
|
|
||||||
* This will parse the config from the user input if present, or generate
|
|
||||||
* a default config. The parsed config is then stored to a known location.
|
|
||||||
*/
|
|
||||||
async function initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
|
|
||||||
let config;
|
|
||||||
// If no config file was provided create an empty one
|
|
||||||
if (!configFile) {
|
|
||||||
logger.debug("No configuration file was provided");
|
|
||||||
config = await getDefaultConfig(languagesInput, queriesInput, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
config = await loadConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger);
|
|
||||||
}
|
|
||||||
// Save the config so we can easily access it again in the future
|
|
||||||
await saveConfig(config, logger);
|
|
||||||
return config;
|
return config;
|
||||||
}
|
}
|
||||||
exports.initConfig = initConfig;
|
function getConfigFolder() {
|
||||||
function isLocal(configPath) {
|
return util.getRequiredEnvParam('RUNNER_TEMP');
|
||||||
// If the path starts with ./, look locally
|
|
||||||
if (configPath.indexOf("./") === 0) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return configPath.indexOf("@") === -1;
|
|
||||||
}
|
}
|
||||||
function getLocalConfig(configFile, checkoutPath) {
|
function getConfigFile() {
|
||||||
// Error if the config file is now outside of the workspace
|
return path.join(getConfigFolder(), 'config');
|
||||||
if (!(configFile + path.sep).startsWith(checkoutPath + path.sep)) {
|
|
||||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
|
||||||
}
|
|
||||||
// Error if the file does not exist
|
|
||||||
if (!fs.existsSync(configFile)) {
|
|
||||||
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
|
||||||
}
|
|
||||||
return yaml.safeLoad(fs.readFileSync(configFile, "utf8"));
|
|
||||||
}
|
}
|
||||||
async function getRemoteConfig(configFile, apiDetails) {
|
exports.getConfigFile = getConfigFile;
|
||||||
// retrieve the various parts of the config location, and ensure they're present
|
async function saveConfig(config) {
|
||||||
const format = new RegExp("(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)");
|
const configString = JSON.stringify(config);
|
||||||
const pieces = format.exec(configFile);
|
await io.mkdirP(getConfigFolder());
|
||||||
// 5 = 4 groups + the whole expression
|
fs.writeFileSync(getConfigFile(), configString, 'utf8');
|
||||||
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
core.debug('Saved config:');
|
||||||
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
|
core.debug(configString);
|
||||||
}
|
}
|
||||||
const response = await api
|
async function loadConfig() {
|
||||||
.getApiClient(apiDetails, { allowLocalRun: true, allowExternal: true })
|
const configFile = getConfigFile();
|
||||||
.repos.getContent({
|
if (fs.existsSync(configFile)) {
|
||||||
owner: pieces.groups.owner,
|
const configString = fs.readFileSync(configFile, 'utf8');
|
||||||
repo: pieces.groups.repo,
|
core.debug('Loaded config:');
|
||||||
path: pieces.groups.path,
|
core.debug(configString);
|
||||||
ref: pieces.groups.ref,
|
return JSON.parse(configString);
|
||||||
});
|
|
||||||
let fileContents;
|
|
||||||
if ("content" in response.data && response.data.content !== undefined) {
|
|
||||||
fileContents = response.data.content;
|
|
||||||
}
|
|
||||||
else if (Array.isArray(response.data)) {
|
|
||||||
throw new Error(getConfigFileDirectoryGivenMessage(configFile));
|
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
throw new Error(getConfigFileFormatInvalidMessage(configFile));
|
const config = initConfig();
|
||||||
|
core.debug('Initialized config:');
|
||||||
|
core.debug(JSON.stringify(config));
|
||||||
|
await saveConfig(config);
|
||||||
|
return config;
|
||||||
}
|
}
|
||||||
return yaml.safeLoad(Buffer.from(fileContents, "base64").toString("binary"));
|
|
||||||
}
|
}
|
||||||
/**
|
exports.loadConfig = loadConfig;
|
||||||
* Get the file path where the parsed config will be stored.
|
|
||||||
*/
|
|
||||||
function getPathToParsedConfigFile(tempDir) {
|
|
||||||
return path.join(tempDir, "config");
|
|
||||||
}
|
|
||||||
exports.getPathToParsedConfigFile = getPathToParsedConfigFile;
|
|
||||||
/**
|
|
||||||
* Store the given config to the path returned from getPathToParsedConfigFile.
|
|
||||||
*/
|
|
||||||
async function saveConfig(config, logger) {
|
|
||||||
const configString = JSON.stringify(config);
|
|
||||||
const configFile = getPathToParsedConfigFile(config.tempDir);
|
|
||||||
fs.mkdirSync(path.dirname(configFile), { recursive: true });
|
|
||||||
fs.writeFileSync(configFile, configString, "utf8");
|
|
||||||
logger.debug("Saved config:");
|
|
||||||
logger.debug(configString);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get the config that has been saved to the given temp dir.
|
|
||||||
* If the config could not be found then returns undefined.
|
|
||||||
*/
|
|
||||||
async function getConfig(tempDir, logger) {
|
|
||||||
const configFile = getPathToParsedConfigFile(tempDir);
|
|
||||||
if (!fs.existsSync(configFile)) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const configString = fs.readFileSync(configFile, "utf8");
|
|
||||||
logger.debug("Loaded config:");
|
|
||||||
logger.debug(configString);
|
|
||||||
return JSON.parse(configString);
|
|
||||||
}
|
|
||||||
exports.getConfig = getConfig;
|
|
||||||
//# sourceMappingURL=config-utils.js.map
|
//# sourceMappingURL=config-utils.js.map
|
||||||
File diff suppressed because one or more lines are too long
626
lib/config-utils.test.js
generated
626
lib/config-utils.test.js
generated
@@ -1,4 +1,7 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
@@ -6,465 +9,83 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
result["default"] = mod;
|
result["default"] = mod;
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const github = __importStar(require("@actions/github"));
|
|
||||||
const ava_1 = __importDefault(require("ava"));
|
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
|
||||||
const api = __importStar(require("./api-client"));
|
|
||||||
const codeql_1 = require("./codeql");
|
|
||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
const languages_1 = require("./languages");
|
|
||||||
const logging_1 = require("./logging");
|
|
||||||
const testing_utils_1 = require("./testing-utils");
|
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
function setInput(name, value) {
|
||||||
const sampleApiDetails = {
|
// Transformation copied from
|
||||||
auth: "token",
|
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
|
||||||
externalRepoAuth: "token",
|
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
|
||||||
url: "https://github.example.com",
|
if (value !== undefined) {
|
||||||
};
|
process.env[envVar] = value;
|
||||||
const gitHubVersion = { type: util.GitHubVariant.DOTCOM };
|
}
|
||||||
// Returns the filepath of the newly-created file
|
else {
|
||||||
function createConfigFile(inputFileContents, tmpDir) {
|
delete process.env[envVar];
|
||||||
const configFilePath = path.join(tmpDir, "input");
|
|
||||||
fs.writeFileSync(configFilePath, inputFileContents, "utf8");
|
|
||||||
return configFilePath;
|
|
||||||
}
|
|
||||||
function mockGetContents(content) {
|
|
||||||
// Passing an auth token is required, so we just use a dummy value
|
|
||||||
const client = github.getOctokit("123");
|
|
||||||
const response = {
|
|
||||||
data: content,
|
|
||||||
};
|
|
||||||
const spyGetContents = sinon_1.default
|
|
||||||
.stub(client.repos, "getContent")
|
|
||||||
.resolves(response);
|
|
||||||
sinon_1.default.stub(api, "getApiClient").value(() => client);
|
|
||||||
return spyGetContents;
|
|
||||||
}
|
|
||||||
function mockListLanguages(languages) {
|
|
||||||
// Passing an auth token is required, so we just use a dummy value
|
|
||||||
const client = github.getOctokit("123");
|
|
||||||
const response = {
|
|
||||||
data: {},
|
|
||||||
};
|
|
||||||
for (const language of languages) {
|
|
||||||
response.data[language] = 123;
|
|
||||||
}
|
}
|
||||||
sinon_1.default.stub(client.repos, "listLanguages").resolves(response);
|
|
||||||
sinon_1.default.stub(api, "getApiClient").value(() => client);
|
|
||||||
}
|
}
|
||||||
ava_1.default("load empty config", async (t) => {
|
ava_1.default("load empty config", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const logger = logging_1.getRunnerLogger(true);
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
const languages = "javascript,python";
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
const codeQL = codeql_1.setCodeQL({
|
setInput('config-file', undefined);
|
||||||
async resolveQueries() {
|
const config = await configUtils.loadConfig();
|
||||||
return {
|
t.deepEqual(config, new configUtils.Config());
|
||||||
byLanguage: {},
|
|
||||||
noDeclaredLanguage: {},
|
|
||||||
multipleDeclaredLanguages: {},
|
|
||||||
};
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const config = await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
|
|
||||||
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger));
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("loading config saves config", async (t) => {
|
ava_1.default("loading config saves config", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const logger = logging_1.getRunnerLogger(true);
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
const codeQL = codeql_1.setCodeQL({
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
async resolveQueries() {
|
const configFile = configUtils.getConfigFile();
|
||||||
return {
|
|
||||||
byLanguage: {},
|
|
||||||
noDeclaredLanguage: {},
|
|
||||||
multipleDeclaredLanguages: {},
|
|
||||||
};
|
|
||||||
},
|
|
||||||
});
|
|
||||||
// Sanity check the saved config file does not already exist
|
// Sanity check the saved config file does not already exist
|
||||||
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
t.false(fs.existsSync(configFile));
|
||||||
// Sanity check that getConfig returns undefined before we have called initConfig
|
const config = await configUtils.loadConfig();
|
||||||
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
|
|
||||||
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
|
|
||||||
// The saved config file should now exist
|
// The saved config file should now exist
|
||||||
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
t.true(fs.existsSync(configFile));
|
||||||
// And that same newly-initialised config should now be returned by getConfig
|
// And the contents should parse correctly to the config that was returned
|
||||||
const config2 = await configUtils.getConfig(tmpDir, logger);
|
t.deepEqual(fs.readFileSync(configFile, 'utf8'), JSON.stringify(config));
|
||||||
t.deepEqual(config1, config2);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("load input outside of workspace", async (t) => {
|
ava_1.default("load input outside of workspace", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
|
setInput('config-file', '../input');
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, "../input", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.loadConfig();
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error('loadConfig did not throw error');
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, "../input"))));
|
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("load non-local input with invalid repo syntax", async (t) => {
|
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
|
||||||
// no filename given, just a repo
|
|
||||||
const configFile = "octo-org/codeql-config@main";
|
|
||||||
try {
|
|
||||||
await configUtils.initConfig(undefined, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
|
||||||
throw new Error("initConfig did not throw error");
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage("octo-org/codeql-config@main")));
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("load non-existent input", async (t) => {
|
ava_1.default("load non-existent input", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const languages = "javascript";
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
const configFile = "input";
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
t.false(fs.existsSync(path.join(tmpDir, configFile)));
|
t.false(fs.existsSync(path.join(tmpDir, 'input')));
|
||||||
|
setInput('config-file', 'input');
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.loadConfig();
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error('loadConfig did not throw error');
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, "input"))));
|
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("load non-empty input", async (t) => {
|
ava_1.default("load non-empty input", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const codeQL = codeql_1.setCodeQL({
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
async resolveQueries() {
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
return {
|
|
||||||
byLanguage: {
|
|
||||||
javascript: {
|
|
||||||
"/foo/a.ql": {},
|
|
||||||
"/bar/b.ql": {},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
noDeclaredLanguage: {},
|
|
||||||
multipleDeclaredLanguages: {},
|
|
||||||
};
|
|
||||||
},
|
|
||||||
});
|
|
||||||
// Just create a generic config object with non-default values for all fields
|
// Just create a generic config object with non-default values for all fields
|
||||||
const inputFileContents = `
|
const inputFileContents = `
|
||||||
name: my config
|
name: my config
|
||||||
disable-default-queries: true
|
disable-default-queries: true
|
||||||
queries:
|
|
||||||
- uses: ./foo
|
|
||||||
paths-ignore:
|
|
||||||
- a
|
|
||||||
- b
|
|
||||||
paths:
|
|
||||||
- c/d`;
|
|
||||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
|
||||||
// And the config we expect it to parse to
|
|
||||||
const expectedConfig = {
|
|
||||||
languages: [languages_1.Language.javascript],
|
|
||||||
queries: {
|
|
||||||
javascript: {
|
|
||||||
builtin: [],
|
|
||||||
custom: ["/foo/a.ql", "/bar/b.ql"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
pathsIgnore: ["a", "b"],
|
|
||||||
paths: ["c/d"],
|
|
||||||
originalUserInput: {
|
|
||||||
name: "my config",
|
|
||||||
"disable-default-queries": true,
|
|
||||||
queries: [{ uses: "./foo" }],
|
|
||||||
"paths-ignore": ["a", "b"],
|
|
||||||
paths: ["c/d"],
|
|
||||||
},
|
|
||||||
tempDir: tmpDir,
|
|
||||||
toolCacheDir: tmpDir,
|
|
||||||
codeQLCmd: codeQL.getPath(),
|
|
||||||
gitHubVersion,
|
|
||||||
};
|
|
||||||
const languages = "javascript";
|
|
||||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
|
||||||
const actualConfig = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
|
||||||
// Should exactly equal the object we constructed earlier
|
|
||||||
t.deepEqual(actualConfig, expectedConfig);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("Default queries are used", async (t) => {
|
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
|
||||||
// Check that the default behaviour is to add the default queries.
|
|
||||||
// In this case if a config file is specified but does not include
|
|
||||||
// the disable-default-queries field.
|
|
||||||
// We determine this by whether CodeQL.resolveQueries is called
|
|
||||||
// with the correct arguments.
|
|
||||||
const resolveQueriesArgs = [];
|
|
||||||
const codeQL = codeql_1.setCodeQL({
|
|
||||||
async resolveQueries(queries, extraSearchPath) {
|
|
||||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
|
||||||
return {
|
|
||||||
byLanguage: {
|
|
||||||
javascript: {
|
|
||||||
"foo.ql": {},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
noDeclaredLanguage: {},
|
|
||||||
multipleDeclaredLanguages: {},
|
|
||||||
};
|
|
||||||
},
|
|
||||||
});
|
|
||||||
// The important point of this config is that it doesn't specify
|
|
||||||
// the disable-default-queries field.
|
|
||||||
// Any other details are hopefully irrelevant for this test.
|
|
||||||
const inputFileContents = `
|
|
||||||
paths:
|
|
||||||
- foo`;
|
|
||||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
|
||||||
const languages = "javascript";
|
|
||||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
|
||||||
await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
|
||||||
// Check resolve queries was called correctly
|
|
||||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
|
||||||
t.deepEqual(resolveQueriesArgs[0].queries, [
|
|
||||||
"javascript-code-scanning.qls",
|
|
||||||
]);
|
|
||||||
t.deepEqual(resolveQueriesArgs[0].extraSearchPath, undefined);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
/**
|
|
||||||
* Returns the provided queries, just in the right format for a resolved query
|
|
||||||
* This way we can test by seeing which returned items are in the final
|
|
||||||
* configuration.
|
|
||||||
*/
|
|
||||||
function queriesToResolvedQueryForm(queries) {
|
|
||||||
const dummyResolvedQueries = {};
|
|
||||||
for (const q of queries) {
|
|
||||||
dummyResolvedQueries[q] = {};
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
byLanguage: {
|
|
||||||
javascript: dummyResolvedQueries,
|
|
||||||
},
|
|
||||||
noDeclaredLanguage: {},
|
|
||||||
multipleDeclaredLanguages: {},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
ava_1.default("Queries can be specified in config file", async (t) => {
|
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
|
||||||
const inputFileContents = `
|
|
||||||
name: my config
|
|
||||||
queries:
|
|
||||||
- uses: ./foo`;
|
|
||||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
|
||||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
|
||||||
const resolveQueriesArgs = [];
|
|
||||||
const codeQL = codeql_1.setCodeQL({
|
|
||||||
async resolveQueries(queries, extraSearchPath) {
|
|
||||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
|
||||||
return queriesToResolvedQueryForm(queries);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const languages = "javascript";
|
|
||||||
const config = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
|
||||||
// Check resolveQueries was called correctly
|
|
||||||
// It'll be called once for the default queries
|
|
||||||
// and once for `./foo` from the config file.
|
|
||||||
t.deepEqual(resolveQueriesArgs.length, 2);
|
|
||||||
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
|
|
||||||
t.regex(resolveQueriesArgs[1].queries[0], /.*\/foo$/);
|
|
||||||
// Now check that the end result contains the default queries and the query from config
|
|
||||||
t.deepEqual(config.queries["javascript"].builtin.length, 1);
|
|
||||||
t.deepEqual(config.queries["javascript"].custom.length, 1);
|
|
||||||
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
|
|
||||||
t.regex(config.queries["javascript"].custom[0], /.*\/foo$/);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("Queries from config file can be overridden in workflow file", async (t) => {
|
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
|
||||||
const inputFileContents = `
|
|
||||||
name: my config
|
|
||||||
queries:
|
|
||||||
- uses: ./foo`;
|
|
||||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
|
||||||
// This config item should take precedence over the config file but shouldn't affect the default queries.
|
|
||||||
const testQueries = "./override";
|
|
||||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
|
||||||
fs.mkdirSync(path.join(tmpDir, "override"));
|
|
||||||
const resolveQueriesArgs = [];
|
|
||||||
const codeQL = codeql_1.setCodeQL({
|
|
||||||
async resolveQueries(queries, extraSearchPath) {
|
|
||||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
|
||||||
return queriesToResolvedQueryForm(queries);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const languages = "javascript";
|
|
||||||
const config = await configUtils.initConfig(languages, testQueries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
|
||||||
// Check resolveQueries was called correctly
|
|
||||||
// It'll be called once for the default queries and once for `./override`,
|
|
||||||
// but won't be called for './foo' from the config file.
|
|
||||||
t.deepEqual(resolveQueriesArgs.length, 2);
|
|
||||||
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
|
|
||||||
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override$/);
|
|
||||||
// Now check that the end result contains only the default queries and the override query
|
|
||||||
t.deepEqual(config.queries["javascript"].builtin.length, 1);
|
|
||||||
t.deepEqual(config.queries["javascript"].custom.length, 1);
|
|
||||||
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
|
|
||||||
t.regex(config.queries["javascript"].custom[0], /.*\/override$/);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("Queries in workflow file can be used in tandem with the 'disable default queries' option", async (t) => {
|
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
|
||||||
process.env["RUNNER_TEMP"] = tmpDir;
|
|
||||||
process.env["GITHUB_WORKSPACE"] = tmpDir;
|
|
||||||
const inputFileContents = `
|
|
||||||
name: my config
|
|
||||||
disable-default-queries: true`;
|
|
||||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
|
||||||
const testQueries = "./workflow-query";
|
|
||||||
fs.mkdirSync(path.join(tmpDir, "workflow-query"));
|
|
||||||
const resolveQueriesArgs = [];
|
|
||||||
const codeQL = codeql_1.setCodeQL({
|
|
||||||
async resolveQueries(queries, extraSearchPath) {
|
|
||||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
|
||||||
return queriesToResolvedQueryForm(queries);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const languages = "javascript";
|
|
||||||
const config = await configUtils.initConfig(languages, testQueries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
|
||||||
// Check resolveQueries was called correctly
|
|
||||||
// It'll be called once for `./workflow-query`,
|
|
||||||
// but won't be called for the default one since that was disabled
|
|
||||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
|
||||||
t.deepEqual(resolveQueriesArgs[0].queries.length, 1);
|
|
||||||
t.regex(resolveQueriesArgs[0].queries[0], /.*\/workflow-query$/);
|
|
||||||
// Now check that the end result contains only the workflow query, and not the default one
|
|
||||||
t.deepEqual(config.queries["javascript"].builtin.length, 0);
|
|
||||||
t.deepEqual(config.queries["javascript"].custom.length, 1);
|
|
||||||
t.regex(config.queries["javascript"].custom[0], /.*\/workflow-query$/);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("Multiple queries can be specified in workflow file, no config file required", async (t) => {
|
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
|
||||||
fs.mkdirSync(path.join(tmpDir, "override1"));
|
|
||||||
fs.mkdirSync(path.join(tmpDir, "override2"));
|
|
||||||
const testQueries = "./override1,./override2";
|
|
||||||
const resolveQueriesArgs = [];
|
|
||||||
const codeQL = codeql_1.setCodeQL({
|
|
||||||
async resolveQueries(queries, extraSearchPath) {
|
|
||||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
|
||||||
return queriesToResolvedQueryForm(queries);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const languages = "javascript";
|
|
||||||
const config = await configUtils.initConfig(languages, testQueries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
|
||||||
// Check resolveQueries was called correctly:
|
|
||||||
// It'll be called once for the default queries,
|
|
||||||
// and then once for each of the two queries from the workflow
|
|
||||||
t.deepEqual(resolveQueriesArgs.length, 3);
|
|
||||||
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
|
|
||||||
t.deepEqual(resolveQueriesArgs[2].queries.length, 1);
|
|
||||||
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override1$/);
|
|
||||||
t.regex(resolveQueriesArgs[2].queries[0], /.*\/override2$/);
|
|
||||||
// Now check that the end result contains both the queries from the workflow, as well as the defaults
|
|
||||||
t.deepEqual(config.queries["javascript"].builtin.length, 1);
|
|
||||||
t.deepEqual(config.queries["javascript"].custom.length, 2);
|
|
||||||
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
|
|
||||||
t.regex(config.queries["javascript"].custom[0], /.*\/override1$/);
|
|
||||||
t.regex(config.queries["javascript"].custom[1], /.*\/override2$/);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("Queries in workflow file can be added to the set of queries without overriding config file", async (t) => {
|
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
|
||||||
process.env["RUNNER_TEMP"] = tmpDir;
|
|
||||||
process.env["GITHUB_WORKSPACE"] = tmpDir;
|
|
||||||
const inputFileContents = `
|
|
||||||
name: my config
|
|
||||||
queries:
|
|
||||||
- uses: ./foo`;
|
|
||||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
|
||||||
// These queries shouldn't override anything, because the value is prefixed with "+"
|
|
||||||
const testQueries = "+./additional1,./additional2";
|
|
||||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
|
||||||
fs.mkdirSync(path.join(tmpDir, "additional1"));
|
|
||||||
fs.mkdirSync(path.join(tmpDir, "additional2"));
|
|
||||||
const resolveQueriesArgs = [];
|
|
||||||
const codeQL = codeql_1.setCodeQL({
|
|
||||||
async resolveQueries(queries, extraSearchPath) {
|
|
||||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
|
||||||
return queriesToResolvedQueryForm(queries);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const languages = "javascript";
|
|
||||||
const config = await configUtils.initConfig(languages, testQueries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
|
||||||
// Check resolveQueries was called correctly
|
|
||||||
// It'll be called once for the default queries,
|
|
||||||
// once for each of additional1 and additional2,
|
|
||||||
// and once for './foo' from the config file
|
|
||||||
t.deepEqual(resolveQueriesArgs.length, 4);
|
|
||||||
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
|
|
||||||
t.regex(resolveQueriesArgs[1].queries[0], /.*\/additional1$/);
|
|
||||||
t.deepEqual(resolveQueriesArgs[2].queries.length, 1);
|
|
||||||
t.regex(resolveQueriesArgs[2].queries[0], /.*\/additional2$/);
|
|
||||||
t.deepEqual(resolveQueriesArgs[3].queries.length, 1);
|
|
||||||
t.regex(resolveQueriesArgs[3].queries[0], /.*\/foo$/);
|
|
||||||
// Now check that the end result contains all the queries
|
|
||||||
t.deepEqual(config.queries["javascript"].builtin.length, 1);
|
|
||||||
t.deepEqual(config.queries["javascript"].custom.length, 3);
|
|
||||||
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
|
|
||||||
t.regex(config.queries["javascript"].custom[0], /.*\/additional1$/);
|
|
||||||
t.regex(config.queries["javascript"].custom[1], /.*\/additional2$/);
|
|
||||||
t.regex(config.queries["javascript"].custom[2], /.*\/foo$/);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("Invalid queries in workflow file handled correctly", async (t) => {
|
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
|
||||||
const queries = "foo/bar@v1@v3";
|
|
||||||
const languages = "javascript";
|
|
||||||
// This function just needs to be type-correct; it doesn't need to do anything,
|
|
||||||
// since we're deliberately passing in invalid data
|
|
||||||
const codeQL = codeql_1.setCodeQL({
|
|
||||||
async resolveQueries() {
|
|
||||||
return {
|
|
||||||
byLanguage: {
|
|
||||||
javascript: {},
|
|
||||||
},
|
|
||||||
noDeclaredLanguage: {},
|
|
||||||
multipleDeclaredLanguages: {},
|
|
||||||
};
|
|
||||||
},
|
|
||||||
});
|
|
||||||
try {
|
|
||||||
await configUtils.initConfig(languages, queries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
|
||||||
t.fail("initConfig did not throw error");
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
t.deepEqual(err, new Error(configUtils.getQueryUsesInvalid(undefined, "foo/bar@v1@v3")));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("API client used when reading remote config", async (t) => {
|
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
|
||||||
const codeQL = codeql_1.setCodeQL({
|
|
||||||
async resolveQueries() {
|
|
||||||
return {
|
|
||||||
byLanguage: {
|
|
||||||
javascript: {
|
|
||||||
"foo.ql": {},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
noDeclaredLanguage: {},
|
|
||||||
multipleDeclaredLanguages: {},
|
|
||||||
};
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const inputFileContents = `
|
|
||||||
name: my config
|
|
||||||
disable-default-queries: true
|
|
||||||
queries:
|
queries:
|
||||||
- uses: ./
|
- uses: ./
|
||||||
- uses: ./foo
|
- uses: ./foo
|
||||||
@@ -474,91 +95,34 @@ ava_1.default("API client used when reading remote config", async (t) => {
|
|||||||
- b
|
- b
|
||||||
paths:
|
paths:
|
||||||
- c/d`;
|
- c/d`;
|
||||||
const dummyResponse = {
|
// And the config we expect it to parse to
|
||||||
content: Buffer.from(inputFileContents).toString("base64"),
|
const expectedConfig = new configUtils.Config();
|
||||||
};
|
expectedConfig.name = 'my config';
|
||||||
const spyGetContents = mockGetContents(dummyResponse);
|
expectedConfig.disableDefaultQueries = true;
|
||||||
// Create checkout directory for remote queries repository
|
expectedConfig.additionalQueries.push(tmpDir);
|
||||||
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
|
expectedConfig.additionalQueries.push(path.join(tmpDir, 'foo'));
|
||||||
const configFile = "octo-org/codeql-config/config.yaml@main";
|
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
|
||||||
const languages = "javascript";
|
expectedConfig.pathsIgnore = ['a', 'b'];
|
||||||
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
expectedConfig.paths = ['c/d'];
|
||||||
t.assert(spyGetContents.called);
|
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||||
});
|
setInput('config-file', 'input');
|
||||||
});
|
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||||
ava_1.default("Remote config handles the case where a directory is provided", async (t) => {
|
const actualConfig = await configUtils.loadConfig();
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
// Should exactly equal the object we constructed earlier
|
||||||
const dummyResponse = []; // directories are returned as arrays
|
t.deepEqual(actualConfig, expectedConfig);
|
||||||
mockGetContents(dummyResponse);
|
|
||||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
|
||||||
try {
|
|
||||||
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
|
||||||
throw new Error("initConfig did not throw error");
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
t.deepEqual(err, new Error(configUtils.getConfigFileDirectoryGivenMessage(repoReference)));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("Invalid format of remote config handled correctly", async (t) => {
|
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
|
||||||
const dummyResponse = {
|
|
||||||
// note no "content" property here
|
|
||||||
};
|
|
||||||
mockGetContents(dummyResponse);
|
|
||||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
|
||||||
try {
|
|
||||||
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
|
||||||
throw new Error("initConfig did not throw error");
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference)));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("No detected languages", async (t) => {
|
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
|
||||||
mockListLanguages([]);
|
|
||||||
try {
|
|
||||||
await configUtils.initConfig(undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
|
||||||
throw new Error("initConfig did not throw error");
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
t.deepEqual(err, new Error(configUtils.getNoLanguagesError()));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("Unknown languages", async (t) => {
|
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
|
||||||
const languages = "ruby,english";
|
|
||||||
try {
|
|
||||||
await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
|
||||||
throw new Error("initConfig did not throw error");
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
t.deepEqual(err, new Error(configUtils.getUnknownLanguagesError(["ruby", "english"])));
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
|
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
|
||||||
ava_1.default(`load invalid input - ${testName}`, async (t) => {
|
ava_1.default("load invalid input - " + testName, async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const codeQL = codeql_1.setCodeQL({
|
process.env['RUNNER_TEMP'] = tmpDir;
|
||||||
async resolveQueries() {
|
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||||
return {
|
const inputFile = path.join(tmpDir, 'input');
|
||||||
byLanguage: {},
|
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
|
||||||
noDeclaredLanguage: {},
|
setInput('config-file', 'input');
|
||||||
multipleDeclaredLanguages: {},
|
|
||||||
};
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const languages = "javascript";
|
|
||||||
const configFile = "input";
|
|
||||||
const inputFile = path.join(tmpDir, configFile);
|
|
||||||
fs.writeFileSync(inputFile, inputFileContents, "utf8");
|
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.loadConfig();
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error('loadConfig did not throw error');
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
|
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
|
||||||
@@ -566,14 +130,14 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
doInvalidInputTest("name invalid type", `
|
doInvalidInputTest('name invalid type', `
|
||||||
name:
|
name:
|
||||||
- foo: bar`, configUtils.getNameInvalid);
|
- foo: bar`, configUtils.getNameInvalid);
|
||||||
doInvalidInputTest("disable-default-queries invalid type", `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
|
doInvalidInputTest('disable-default-queries invalid type', `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
|
||||||
doInvalidInputTest("queries invalid type", `queries: foo`, configUtils.getQueriesInvalid);
|
doInvalidInputTest('queries invalid type', `queries: foo`, configUtils.getQueriesInvalid);
|
||||||
doInvalidInputTest("paths-ignore invalid type", `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
|
doInvalidInputTest('paths-ignore invalid type', `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
|
||||||
doInvalidInputTest("paths invalid type", `paths: 17`, configUtils.getPathsInvalid);
|
doInvalidInputTest('paths invalid type', `paths: 17`, configUtils.getPathsInvalid);
|
||||||
doInvalidInputTest("queries uses invalid type", `
|
doInvalidInputTest('queries uses invalid type', `
|
||||||
queries:
|
queries:
|
||||||
- uses:
|
- uses:
|
||||||
- hello: world`, configUtils.getQueryUsesInvalid);
|
- hello: world`, configUtils.getQueryUsesInvalid);
|
||||||
@@ -584,47 +148,15 @@ function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) {
|
|||||||
name: my config
|
name: my config
|
||||||
queries:
|
queries:
|
||||||
- name: foo
|
- name: foo
|
||||||
uses: ${input}`;
|
uses: ` + input;
|
||||||
doInvalidInputTest(`queries uses "${input}"`, inputFileContents, expectedErrorMessageGenerator);
|
doInvalidInputTest("queries uses \"" + input + "\"", inputFileContents, expectedErrorMessageGenerator);
|
||||||
}
|
}
|
||||||
// Various "uses" fields, and the errors they should produce
|
// Various "uses" fields, and the errors they should produce
|
||||||
doInvalidQueryUsesTest("''", (c) => configUtils.getQueryUsesInvalid(c, undefined));
|
doInvalidQueryUsesTest("''", c => configUtils.getQueryUsesInvalid(c, undefined));
|
||||||
doInvalidQueryUsesTest("foo/bar", (c) => configUtils.getQueryUsesInvalid(c, "foo/bar"));
|
doInvalidQueryUsesTest("foo/bar", c => configUtils.getQueryUsesInvalid(c, "foo/bar"));
|
||||||
doInvalidQueryUsesTest("foo/bar@v1@v2", (c) => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
|
doInvalidQueryUsesTest("foo/bar@v1@v2", c => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
|
||||||
doInvalidQueryUsesTest("foo@master", (c) => configUtils.getQueryUsesInvalid(c, "foo@master"));
|
doInvalidQueryUsesTest("foo@master", c => configUtils.getQueryUsesInvalid(c, "foo@master"));
|
||||||
doInvalidQueryUsesTest("https://github.com/foo/bar@master", (c) => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
|
doInvalidQueryUsesTest("https://github.com/foo/bar@master", c => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
|
||||||
doInvalidQueryUsesTest("./foo", (c) => configUtils.getLocalPathDoesNotExist(c, "foo"));
|
doInvalidQueryUsesTest("./foo", c => configUtils.getLocalPathDoesNotExist(c, "foo"));
|
||||||
doInvalidQueryUsesTest("./..", (c) => configUtils.getLocalPathOutsideOfRepository(c, ".."));
|
doInvalidQueryUsesTest("./..", c => configUtils.getLocalPathOutsideOfRepository(c, ".."));
|
||||||
const validPaths = [
|
|
||||||
"foo",
|
|
||||||
"foo/",
|
|
||||||
"foo/**",
|
|
||||||
"foo/**/",
|
|
||||||
"foo/**/**",
|
|
||||||
"foo/**/bar/**/baz",
|
|
||||||
"**/",
|
|
||||||
"**/foo",
|
|
||||||
"/foo",
|
|
||||||
];
|
|
||||||
const invalidPaths = ["a/***/b", "a/**b", "a/b**", "**"];
|
|
||||||
ava_1.default("path validations", (t) => {
|
|
||||||
// Dummy values to pass to validateAndSanitisePath
|
|
||||||
const propertyName = "paths";
|
|
||||||
const configFile = "./.github/codeql/config.yml";
|
|
||||||
for (const validPath of validPaths) {
|
|
||||||
t.truthy(configUtils.validateAndSanitisePath(validPath, propertyName, configFile, logging_1.getRunnerLogger(true)));
|
|
||||||
}
|
|
||||||
for (const invalidPath of invalidPaths) {
|
|
||||||
t.throws(() => configUtils.validateAndSanitisePath(invalidPath, propertyName, configFile, logging_1.getRunnerLogger(true)));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
ava_1.default("path sanitisation", (t) => {
|
|
||||||
// Dummy values to pass to validateAndSanitisePath
|
|
||||||
const propertyName = "paths";
|
|
||||||
const configFile = "./.github/codeql/config.yml";
|
|
||||||
// Valid paths are not modified
|
|
||||||
t.deepEqual(configUtils.validateAndSanitisePath("foo/bar", propertyName, configFile, logging_1.getRunnerLogger(true)), "foo/bar");
|
|
||||||
// Trailing stars are stripped
|
|
||||||
t.deepEqual(configUtils.validateAndSanitisePath("foo/**", propertyName, configFile, logging_1.getRunnerLogger(true)), "foo/");
|
|
||||||
});
|
|
||||||
//# sourceMappingURL=config-utils.test.js.map
|
//# sourceMappingURL=config-utils.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
@@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"bundleVersion": "codeql-bundle-20210326"
|
|
||||||
}
|
|
||||||
17
lib/error-matcher.js
generated
17
lib/error-matcher.js
generated
@@ -1,17 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
// exported only for testing purposes
|
|
||||||
exports.namedMatchersForTesting = {
|
|
||||||
/*
|
|
||||||
In due course it may be possible to remove the regex, if/when javascript also exits with code 32.
|
|
||||||
*/
|
|
||||||
noSourceCodeFound: {
|
|
||||||
exitCode: 32,
|
|
||||||
outputRegex: new RegExp("No JavaScript or TypeScript code found\\."),
|
|
||||||
message: "No code found during the build. Please see:\n" +
|
|
||||||
"https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning#no-code-found-during-the-build",
|
|
||||||
},
|
|
||||||
};
|
|
||||||
// we collapse the matches into an array for use in execErrorCatcher
|
|
||||||
exports.errorMatchers = Object.values(exports.namedMatchersForTesting);
|
|
||||||
//# sourceMappingURL=error-matcher.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"error-matcher.js","sourceRoot":"","sources":["../src/error-matcher.ts"],"names":[],"mappings":";;AAQA,qCAAqC;AACxB,QAAA,uBAAuB,GAAoC;IACtE;;MAEE;IACF,iBAAiB,EAAE;QACjB,QAAQ,EAAE,EAAE;QACZ,WAAW,EAAE,IAAI,MAAM,CAAC,2CAA2C,CAAC;QACpE,OAAO,EACL,+CAA+C;YAC/C,yJAAyJ;KAC5J;CACF,CAAC;AAEF,oEAAoE;AACvD,QAAA,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC,+BAAuB,CAAC,CAAC"}
|
|
||||||
29
lib/error-matcher.test.js
generated
29
lib/error-matcher.test.js
generated
@@ -1,29 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const ava_1 = __importDefault(require("ava"));
|
|
||||||
const error_matcher_1 = require("./error-matcher");
|
|
||||||
/*
|
|
||||||
NB We test the regexes for all the matchers against example log output snippets.
|
|
||||||
*/
|
|
||||||
ava_1.default("noSourceCodeFound matches against example javascript output", async (t) => {
|
|
||||||
t.assert(testErrorMatcher("noSourceCodeFound", `
|
|
||||||
2020-09-07T17:39:53.9050522Z [2020-09-07 17:39:53] [build] Done extracting /opt/hostedtoolcache/CodeQL/0.0.0-20200630/x64/codeql/javascript/tools/data/externs/web/ie_vml.js (3 ms)
|
|
||||||
2020-09-07T17:39:53.9051849Z [2020-09-07 17:39:53] [build-err] No JavaScript or TypeScript code found.
|
|
||||||
2020-09-07T17:39:53.9052444Z [2020-09-07 17:39:53] [build-err] No JavaScript or TypeScript code found.
|
|
||||||
2020-09-07T17:39:53.9251124Z [2020-09-07 17:39:53] [ERROR] Spawned process exited abnormally (code 255; tried to run: [/opt/hostedtoolcache/CodeQL/0.0.0-20200630/x64/codeql/javascript/tools/autobuild.sh])
|
|
||||||
`));
|
|
||||||
});
|
|
||||||
function testErrorMatcher(matcherName, logSample) {
|
|
||||||
if (!(matcherName in error_matcher_1.namedMatchersForTesting)) {
|
|
||||||
throw new Error(`Unknown matcher ${matcherName}`);
|
|
||||||
}
|
|
||||||
const regex = error_matcher_1.namedMatchersForTesting[matcherName].outputRegex;
|
|
||||||
if (regex === undefined) {
|
|
||||||
throw new Error(`Cannot test matcher ${matcherName} with null regex`);
|
|
||||||
}
|
|
||||||
return regex.test(logSample);
|
|
||||||
}
|
|
||||||
//# sourceMappingURL=error-matcher.test.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"error-matcher.test.js","sourceRoot":"","sources":["../src/error-matcher.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,mDAA0D;AAE1D;;EAEE;AAEF,aAAI,CAAC,6DAA6D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9E,CAAC,CAAC,MAAM,CACN,gBAAgB,CACd,mBAAmB,EACnB;;;;;GAKH,CACE,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,gBAAgB,CAAC,WAAmB,EAAE,SAAiB;IAC9D,IAAI,CAAC,CAAC,WAAW,IAAI,uCAAuB,CAAC,EAAE;QAC7C,MAAM,IAAI,KAAK,CAAC,mBAAmB,WAAW,EAAE,CAAC,CAAC;KACnD;IACD,MAAM,KAAK,GAAG,uCAAuB,CAAC,WAAW,CAAC,CAAC,WAAW,CAAC;IAC/D,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,uBAAuB,WAAW,kBAAkB,CAAC,CAAC;KACvE;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AAC/B,CAAC"}
|
|
||||||
59
lib/external-queries.js
generated
59
lib/external-queries.js
generated
@@ -7,48 +7,27 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
|
const exec = __importStar(require("@actions/exec"));
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const util = __importStar(require("./util"));
|
||||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
async function checkoutExternalQueries(config) {
|
||||||
/**
|
const folder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||||
* Check out repository at the given ref, and return the directory of the checkout.
|
for (const externalQuery of config.externalQueries) {
|
||||||
*/
|
core.info('Checking out ' + externalQuery.repository);
|
||||||
async function checkoutExternalRepository(repository, ref, apiDetails, tempDir, logger) {
|
const checkoutLocation = path.join(folder, externalQuery.repository);
|
||||||
logger.info(`Checking out ${repository}`);
|
if (!fs.existsSync(checkoutLocation)) {
|
||||||
const checkoutLocation = path.join(tempDir, repository, ref);
|
const repoURL = 'https://github.com/' + externalQuery.repository + '.git';
|
||||||
if (!checkoutLocation.startsWith(tempDir)) {
|
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
|
||||||
// this still permits locations that mess with sibling repositories in `tempDir`, but that is acceptable
|
await exec.exec('git', [
|
||||||
throw new Error(`'${repository}@${ref}' is not a valid repository and reference.`);
|
'--work-tree=' + checkoutLocation,
|
||||||
|
'--git-dir=' + checkoutLocation + '/.git',
|
||||||
|
'checkout', externalQuery.ref,
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
config.additionalQueries.push(path.join(checkoutLocation, externalQuery.path));
|
||||||
}
|
}
|
||||||
if (!fs.existsSync(checkoutLocation)) {
|
|
||||||
const repoCloneURL = buildCheckoutURL(repository, apiDetails);
|
|
||||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
|
|
||||||
"clone",
|
|
||||||
repoCloneURL,
|
|
||||||
checkoutLocation,
|
|
||||||
]).exec();
|
|
||||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
|
|
||||||
`--work-tree=${checkoutLocation}`,
|
|
||||||
`--git-dir=${checkoutLocation}/.git`,
|
|
||||||
"checkout",
|
|
||||||
ref,
|
|
||||||
]).exec();
|
|
||||||
}
|
|
||||||
return checkoutLocation;
|
|
||||||
}
|
}
|
||||||
exports.checkoutExternalRepository = checkoutExternalRepository;
|
exports.checkoutExternalQueries = checkoutExternalQueries;
|
||||||
function buildCheckoutURL(repository, apiDetails) {
|
|
||||||
const repoCloneURL = new URL(apiDetails.url);
|
|
||||||
if (apiDetails.externalRepoAuth !== undefined) {
|
|
||||||
repoCloneURL.username = "x-access-token";
|
|
||||||
repoCloneURL.password = apiDetails.externalRepoAuth;
|
|
||||||
}
|
|
||||||
if (!repoCloneURL.pathname.endsWith("/")) {
|
|
||||||
repoCloneURL.pathname += "/";
|
|
||||||
}
|
|
||||||
repoCloneURL.pathname += `${repository}`;
|
|
||||||
return repoCloneURL.toString();
|
|
||||||
}
|
|
||||||
exports.buildCheckoutURL = buildCheckoutURL;
|
|
||||||
//# sourceMappingURL=external-queries.js.map
|
//# sourceMappingURL=external-queries.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAKpD;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,UAAwC,EACxC,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,YAAY,GAAG,gBAAgB,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAC9D,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,OAAO;YACP,YAAY;YACZ,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC;AAED,SAAgB,gBAAgB,CAC9B,UAAkB,EAClB,UAAwC;IAExC,MAAM,YAAY,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC7C,IAAI,UAAU,CAAC,gBAAgB,KAAK,SAAS,EAAE;QAC7C,YAAY,CAAC,QAAQ,GAAG,gBAAgB,CAAC;QACzC,YAAY,CAAC,QAAQ,GAAG,UAAU,CAAC,gBAAgB,CAAC;KACrD;IACD,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;QACxC,YAAY,CAAC,QAAQ,IAAI,GAAG,CAAC;KAC9B;IACD,YAAY,CAAC,QAAQ,IAAI,GAAG,UAAU,EAAE,CAAC;IACzC,OAAO,YAAY,CAAC,QAAQ,EAAE,CAAC;AACjC,CAAC;AAdD,4CAcC"}
|
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAG7B,6CAA+B;AAExB,KAAK,UAAU,uBAAuB,CAAC,MAA0B;IACtE,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC;IAEvD,KAAK,MAAM,aAAa,IAAI,MAAM,CAAC,eAAe,EAAE;QAClD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,UAAU,CAAC,CAAC;QAEtD,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,aAAa,CAAC,UAAU,CAAC,CAAC;QACrE,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;YACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,aAAa,CAAC,UAAU,GAAG,MAAM,CAAC;YAC1E,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;YAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;gBACrB,cAAc,GAAG,gBAAgB;gBACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;gBACzC,UAAU,EAAE,aAAa,CAAC,GAAG;aAC9B,CAAC,CAAC;SACJ;QAED,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;KAChF;AACH,CAAC;AAnBD,0DAmBC"}
|
||||||
110
lib/external-queries.test.js
generated
110
lib/external-queries.test.js
generated
@@ -1,4 +1,7 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
@@ -6,110 +9,23 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
result["default"] = mod;
|
result["default"] = mod;
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
|
||||||
const ava_1 = __importDefault(require("ava"));
|
|
||||||
const externalQueries = __importStar(require("./external-queries"));
|
const externalQueries = __importStar(require("./external-queries"));
|
||||||
const logging_1 = require("./logging");
|
|
||||||
const testing_utils_1 = require("./testing-utils");
|
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
|
||||||
ava_1.default("checkoutExternalQueries", async (t) => {
|
ava_1.default("checkoutExternalQueries", async (t) => {
|
||||||
|
let config = new configUtils.Config();
|
||||||
|
config.externalQueries = [
|
||||||
|
new configUtils.ExternalQuery("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b"),
|
||||||
|
];
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
// Create a test repo in a subdir of the temp dir.
|
process.env["RUNNER_TEMP"] = tmpDir;
|
||||||
// It should have a default branch with two commits after the initial commit, where
|
await externalQueries.checkoutExternalQueries(config);
|
||||||
// - the first commit contains files 'a' and 'b'
|
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
|
||||||
// - the second commit contains only 'a'
|
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
|
||||||
// Place the repo in a subdir because we're going to checkout a copy in tmpDir
|
|
||||||
const testRepoBaseDir = path.join(tmpDir, "test-repo-dir");
|
|
||||||
const repoName = "some/repo";
|
|
||||||
const repoPath = path.join(testRepoBaseDir, repoName);
|
|
||||||
const repoGitDir = path.join(repoPath, ".git");
|
|
||||||
// Run the given git command, and return the output.
|
|
||||||
// Passes --git-dir and --work-tree.
|
|
||||||
// Any stderr output is suppressed until the command fails.
|
|
||||||
const runGit = async function (command) {
|
|
||||||
let stdout = "";
|
|
||||||
let stderr = "";
|
|
||||||
command = [
|
|
||||||
`--git-dir=${repoGitDir}`,
|
|
||||||
`--work-tree=${repoPath}`,
|
|
||||||
...command,
|
|
||||||
];
|
|
||||||
console.log(`Running: git ${command.join(" ")}`);
|
|
||||||
try {
|
|
||||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), command, {
|
|
||||||
silent: true,
|
|
||||||
listeners: {
|
|
||||||
stdout: (data) => {
|
|
||||||
stdout += data.toString();
|
|
||||||
},
|
|
||||||
stderr: (data) => {
|
|
||||||
stderr += data.toString();
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}).exec();
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
console.log(`Command failed: git ${command.join(" ")}`);
|
|
||||||
process.stderr.write(stderr);
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
return stdout.trim();
|
|
||||||
};
|
|
||||||
fs.mkdirSync(repoPath, { recursive: true });
|
|
||||||
await runGit(["init", repoPath]);
|
|
||||||
await runGit(["config", "user.email", "test@github.com"]);
|
|
||||||
await runGit(["config", "user.name", "Test Test"]);
|
|
||||||
await runGit(["config", "commit.gpgsign", "false"]);
|
|
||||||
fs.writeFileSync(path.join(repoPath, "a"), "a content");
|
|
||||||
await runGit(["add", "a"]);
|
|
||||||
await runGit(["commit", "-m", "commit1"]);
|
|
||||||
fs.writeFileSync(path.join(repoPath, "b"), "b content");
|
|
||||||
await runGit(["add", "b"]);
|
|
||||||
await runGit(["commit", "-m", "commit1"]);
|
|
||||||
const commit1Sha = await runGit(["rev-parse", "HEAD"]);
|
|
||||||
fs.unlinkSync(path.join(repoPath, "b"));
|
|
||||||
await runGit(["add", "b"]);
|
|
||||||
await runGit(["commit", "-m", "commit2"]);
|
|
||||||
const commit2Sha = await runGit(["rev-parse", "HEAD"]);
|
|
||||||
// Checkout the first commit, which should contain 'a' and 'b'
|
|
||||||
t.false(fs.existsSync(path.join(tmpDir, repoName)));
|
|
||||||
await externalQueries.checkoutExternalRepository(repoName, commit1Sha, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" }, tmpDir, logging_1.getRunnerLogger(true));
|
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName)));
|
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha)));
|
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "a")));
|
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "b")));
|
|
||||||
// Checkout the second commit as well, which should only contain 'a'
|
|
||||||
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
|
|
||||||
await externalQueries.checkoutExternalRepository(repoName, commit2Sha, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" }, tmpDir, logging_1.getRunnerLogger(true));
|
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
|
|
||||||
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "a")));
|
|
||||||
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("buildCheckoutURL", (t) => {
|
|
||||||
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
|
||||||
url: "https://github.com",
|
|
||||||
externalRepoAuth: undefined,
|
|
||||||
}), "https://github.com/foo/bar");
|
|
||||||
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
|
||||||
url: "https://github.example.com/",
|
|
||||||
externalRepoAuth: undefined,
|
|
||||||
}), "https://github.example.com/foo/bar");
|
|
||||||
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
|
||||||
url: "https://github.com",
|
|
||||||
externalRepoAuth: "abc",
|
|
||||||
}), "https://x-access-token:abc@github.com/foo/bar");
|
|
||||||
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
|
||||||
url: "https://github.example.com/",
|
|
||||||
externalRepoAuth: "abc",
|
|
||||||
}), "https://x-access-token:abc@github.example.com/foo/bar");
|
|
||||||
});
|
|
||||||
//# sourceMappingURL=external-queries.test.js.map
|
//# sourceMappingURL=external-queries.test.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AACpD,8CAAuB;AAEvB,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAChC,OAAO,EACP;oBACE,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CACF,CAAC,IAAI,EAAE,CAAC;aACV;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,gBAAgB,EAAE,OAAO,CAAC,CAAC,CAAC;QAEpD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,EAAE;IAC7B,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,4BAA4B,CAC7B,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,oCAAoC,CACrC,CAAC;IAEF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,+CAA+C,CAChD,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,uDAAuD,CACxD,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,4DAA8C;AAC9C,oEAAsD;AACtD,6CAA+B;AAE/B,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACtC,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,eAAe,GAAG;QACrB,IAAI,WAAW,CAAC,aAAa,CAAC,kBAAkB,EAAE,0CAA0C,CAAC;KAChG,CAAC;IAEF,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACjC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QACpC,MAAM,eAAe,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtD,uFAAuF;QACvF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IACjF,CAAC,CAAC,CAAC;AACP,CAAC,CAAC,CAAC"}
|
||||||
212
lib/finalize-db.js
generated
Normal file
212
lib/finalize-db.js
generated
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
|
const exec = __importStar(require("@actions/exec"));
|
||||||
|
const io = __importStar(require("@actions/io"));
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const os = __importStar(require("os"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
|
const externalQueries = __importStar(require("./external-queries"));
|
||||||
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
|
const upload_lib = __importStar(require("./upload-lib"));
|
||||||
|
const util = __importStar(require("./util"));
|
||||||
|
/**
|
||||||
|
* A list of queries from https://github.com/github/codeql that
|
||||||
|
* we don't want to run. Disabling them here is a quicker alternative to
|
||||||
|
* disabling them in the code scanning query suites. Queries should also
|
||||||
|
* be disabled in the suites, and removed from this list here once the
|
||||||
|
* bundle is updated to make those suite changes live.
|
||||||
|
*
|
||||||
|
* Format is a map from language to an array of path suffixes of .ql files.
|
||||||
|
*/
|
||||||
|
const DISABLED_BUILTIN_QUERIES = {
|
||||||
|
'csharp': [
|
||||||
|
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
|
||||||
|
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
|
||||||
|
]
|
||||||
|
};
|
||||||
|
function queryIsDisabled(language, query) {
|
||||||
|
return (DISABLED_BUILTIN_QUERIES[language] || [])
|
||||||
|
.some(disabledQuery => query.endsWith(disabledQuery));
|
||||||
|
}
|
||||||
|
function getMemoryFlag() {
|
||||||
|
let memoryToUseMegaBytes;
|
||||||
|
const memoryToUseString = core.getInput("ram");
|
||||||
|
if (memoryToUseString) {
|
||||||
|
memoryToUseMegaBytes = Number(memoryToUseString);
|
||||||
|
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
|
||||||
|
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const totalMemoryBytes = os.totalmem();
|
||||||
|
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||||
|
const systemReservedMemoryMegaBytes = 256;
|
||||||
|
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
|
||||||
|
}
|
||||||
|
return "--ram=" + Math.floor(memoryToUseMegaBytes);
|
||||||
|
}
|
||||||
|
async function createdDBForScannedLanguages(codeqlCmd, databaseFolder) {
|
||||||
|
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
||||||
|
if (scannedLanguages) {
|
||||||
|
for (const language of scannedLanguages.split(',')) {
|
||||||
|
core.startGroup('Extracting ' + language);
|
||||||
|
// Get extractor location
|
||||||
|
let extractorPath = '';
|
||||||
|
await exec.exec(codeqlCmd, ['resolve', 'extractor', '--format=json', '--language=' + language], {
|
||||||
|
silent: true,
|
||||||
|
listeners: {
|
||||||
|
stdout: (data) => { extractorPath += data.toString(); },
|
||||||
|
stderr: (data) => { process.stderr.write(data); }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Set trace command
|
||||||
|
const ext = process.platform === 'win32' ? '.cmd' : '.sh';
|
||||||
|
const traceCommand = path.resolve(JSON.parse(extractorPath), 'tools', 'autobuild' + ext);
|
||||||
|
// Run trace command
|
||||||
|
await exec.exec(codeqlCmd, ['database', 'trace-command', path.join(databaseFolder, language), '--', traceCommand]);
|
||||||
|
core.endGroup();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async function finalizeDatabaseCreation(codeqlCmd, databaseFolder) {
|
||||||
|
await createdDBForScannedLanguages(codeqlCmd, databaseFolder);
|
||||||
|
const languages = process.env[sharedEnv.CODEQL_ACTION_LANGUAGES] || '';
|
||||||
|
for (const language of languages.split(',')) {
|
||||||
|
core.startGroup('Finalizing ' + language);
|
||||||
|
await exec.exec(codeqlCmd, ['database', 'finalize', path.join(databaseFolder, language)]);
|
||||||
|
core.endGroup();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async function runResolveQueries(codeqlCmd, queries) {
|
||||||
|
let output = '';
|
||||||
|
const options = {
|
||||||
|
listeners: {
|
||||||
|
stdout: (data) => {
|
||||||
|
output += data.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
await exec.exec(codeqlCmd, [
|
||||||
|
'resolve',
|
||||||
|
'queries',
|
||||||
|
...queries,
|
||||||
|
'--format=bylanguage'
|
||||||
|
], options);
|
||||||
|
return JSON.parse(output);
|
||||||
|
}
|
||||||
|
async function resolveQueryLanguages(codeqlCmd, config) {
|
||||||
|
let res = new Map();
|
||||||
|
if (!config.disableDefaultQueries || config.additionalSuites.length !== 0) {
|
||||||
|
const suites = [];
|
||||||
|
for (const language of await util.getLanguages()) {
|
||||||
|
if (!config.disableDefaultQueries) {
|
||||||
|
suites.push(language + '-code-scanning.qls');
|
||||||
|
}
|
||||||
|
for (const additionalSuite of config.additionalSuites) {
|
||||||
|
suites.push(language + '-' + additionalSuite + '.qls');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const resolveQueriesOutputObject = await runResolveQueries(codeqlCmd, suites);
|
||||||
|
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||||
|
if (res[language] === undefined) {
|
||||||
|
res[language] = [];
|
||||||
|
}
|
||||||
|
res[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (config.additionalQueries.length !== 0) {
|
||||||
|
const resolveQueriesOutputObject = await runResolveQueries(codeqlCmd, config.additionalQueries);
|
||||||
|
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||||
|
if (res[language] === undefined) {
|
||||||
|
res[language] = [];
|
||||||
|
}
|
||||||
|
res[language].push(...Object.keys(queries));
|
||||||
|
}
|
||||||
|
const noDeclaredLanguage = resolveQueriesOutputObject.noDeclaredLanguage;
|
||||||
|
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
||||||
|
if (noDeclaredLanguageQueries.length !== 0) {
|
||||||
|
throw new Error('Some queries do not declare a language, their qlpack.yml file is missing or is invalid');
|
||||||
|
}
|
||||||
|
const multipleDeclaredLanguages = resolveQueriesOutputObject.multipleDeclaredLanguages;
|
||||||
|
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
|
||||||
|
if (multipleDeclaredLanguagesQueries.length !== 0) {
|
||||||
|
throw new Error('Some queries declare multiple languages, their qlpack.yml file is missing or is invalid');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
// Runs queries and creates sarif files in the given folder
|
||||||
|
async function runQueries(codeqlCmd, databaseFolder, sarifFolder, config) {
|
||||||
|
const queriesPerLanguage = await resolveQueryLanguages(codeqlCmd, config);
|
||||||
|
for (let database of fs.readdirSync(databaseFolder)) {
|
||||||
|
core.startGroup('Analyzing ' + database);
|
||||||
|
const queries = queriesPerLanguage[database] || [];
|
||||||
|
if (queries.length === 0) {
|
||||||
|
throw new Error('Unable to analyse ' + database + ' as no queries were selected for this language');
|
||||||
|
}
|
||||||
|
// Pass the queries to codeql using a file instead of using the command
|
||||||
|
// line to avoid command line length restrictions, particularly on windows.
|
||||||
|
const querySuite = path.join(databaseFolder, database + '-queries.qls');
|
||||||
|
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||||
|
fs.writeFileSync(querySuite, querySuiteContents);
|
||||||
|
core.debug('Query suite file for ' + database + '...\n' + querySuiteContents);
|
||||||
|
const sarifFile = path.join(sarifFolder, database + '.sarif');
|
||||||
|
await exec.exec(codeqlCmd, [
|
||||||
|
'database',
|
||||||
|
'analyze',
|
||||||
|
getMemoryFlag(),
|
||||||
|
path.join(databaseFolder, database),
|
||||||
|
'--format=sarif-latest',
|
||||||
|
'--output=' + sarifFile,
|
||||||
|
'--no-sarif-add-snippets',
|
||||||
|
querySuite
|
||||||
|
]);
|
||||||
|
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
|
||||||
|
core.endGroup();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async function run() {
|
||||||
|
try {
|
||||||
|
if (util.should_abort('finish', true) || !await util.reportActionStarting('finish')) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const config = await configUtils.loadConfig();
|
||||||
|
core.exportVariable(sharedEnv.ODASA_TRACER_CONFIGURATION, '');
|
||||||
|
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||||
|
const codeqlCmd = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_CMD);
|
||||||
|
const databaseFolder = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_DATABASE_DIR);
|
||||||
|
const sarifFolder = core.getInput('output');
|
||||||
|
await io.mkdirP(sarifFolder);
|
||||||
|
core.info('Finalizing database creation');
|
||||||
|
await finalizeDatabaseCreation(codeqlCmd, databaseFolder);
|
||||||
|
await externalQueries.checkoutExternalQueries(config);
|
||||||
|
core.info('Analyzing database');
|
||||||
|
await runQueries(codeqlCmd, databaseFolder, sarifFolder, config);
|
||||||
|
if ('true' === core.getInput('upload')) {
|
||||||
|
if (!await upload_lib.upload(sarifFolder)) {
|
||||||
|
await util.reportActionFailed('finish', 'upload');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
core.setFailed(error.message);
|
||||||
|
await util.reportActionFailed('finish', error.message, error.stack);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await util.reportActionSucceeded('finish');
|
||||||
|
}
|
||||||
|
run().catch(e => {
|
||||||
|
core.setFailed("analyze action failed: " + e);
|
||||||
|
console.log(e);
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=finalize-db.js.map
|
||||||
1
lib/finalize-db.js.map
Normal file
1
lib/finalize-db.js.map
Normal file
File diff suppressed because one or more lines are too long
89
lib/fingerprints.js
generated
89
lib/fingerprints.js
generated
@@ -10,12 +10,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const long_1 = __importDefault(require("long"));
|
const long_1 = __importDefault(require("long"));
|
||||||
const tab = "\t".charCodeAt(0);
|
const tab = '\t'.charCodeAt(0);
|
||||||
const space = " ".charCodeAt(0);
|
const space = ' '.charCodeAt(0);
|
||||||
const lf = "\n".charCodeAt(0);
|
const lf = '\n'.charCodeAt(0);
|
||||||
const cr = "\r".charCodeAt(0);
|
const cr = '\r'.charCodeAt(0);
|
||||||
const BLOCK_SIZE = 100;
|
const BLOCK_SIZE = 100;
|
||||||
const MOD = long_1.default.fromInt(37); // L
|
const MOD = long_1.default.fromInt(37); // L
|
||||||
// Compute the starting point for the hash mod
|
// Compute the starting point for the hash mod
|
||||||
@@ -46,8 +47,8 @@ function hash(callback, input) {
|
|||||||
// Indexes match up with those from the window variable.
|
// Indexes match up with those from the window variable.
|
||||||
const lineNumbers = Array(BLOCK_SIZE).fill(-1);
|
const lineNumbers = Array(BLOCK_SIZE).fill(-1);
|
||||||
// The current hash value, updated as we read each character
|
// The current hash value, updated as we read each character
|
||||||
let hashRaw = long_1.default.ZERO;
|
let hash = long_1.default.ZERO;
|
||||||
const firstMod = computeFirstMod();
|
let firstMod = computeFirstMod();
|
||||||
// The current index in the window, will wrap around to zero when we reach BLOCK_SIZE
|
// The current index in the window, will wrap around to zero when we reach BLOCK_SIZE
|
||||||
let index = 0;
|
let index = 0;
|
||||||
// The line number of the character we are currently processing from the input
|
// The line number of the character we are currently processing from the input
|
||||||
@@ -61,19 +62,19 @@ function hash(callback, input) {
|
|||||||
const hashCounts = {};
|
const hashCounts = {};
|
||||||
// Output the current hash and line number to the callback function
|
// Output the current hash and line number to the callback function
|
||||||
const outputHash = function () {
|
const outputHash = function () {
|
||||||
const hashValue = hashRaw.toUnsigned().toString(16);
|
let hashValue = hash.toUnsigned().toString(16);
|
||||||
if (!hashCounts[hashValue]) {
|
if (!hashCounts[hashValue]) {
|
||||||
hashCounts[hashValue] = 0;
|
hashCounts[hashValue] = 0;
|
||||||
}
|
}
|
||||||
hashCounts[hashValue]++;
|
hashCounts[hashValue]++;
|
||||||
callback(lineNumbers[index], `${hashValue}:${hashCounts[hashValue]}`);
|
callback(lineNumbers[index], hashValue + ":" + hashCounts[hashValue]);
|
||||||
lineNumbers[index] = -1;
|
lineNumbers[index] = -1;
|
||||||
};
|
};
|
||||||
// Update the current hash value and increment the index in the window
|
// Update the current hash value and increment the index in the window
|
||||||
const updateHash = function (current) {
|
const updateHash = function (current) {
|
||||||
const begin = window[index];
|
const begin = window[index];
|
||||||
window[index] = current;
|
window[index] = current;
|
||||||
hashRaw = MOD.multiply(hashRaw)
|
hash = MOD.multiply(hash)
|
||||||
.add(long_1.default.fromInt(current))
|
.add(long_1.default.fromInt(current))
|
||||||
.subtract(firstMod.multiply(long_1.default.fromInt(begin)));
|
.subtract(firstMod.multiply(long_1.default.fromInt(begin)));
|
||||||
index = (index + 1) % BLOCK_SIZE;
|
index = (index + 1) % BLOCK_SIZE;
|
||||||
@@ -120,8 +121,8 @@ function hash(callback, input) {
|
|||||||
}
|
}
|
||||||
exports.hash = hash;
|
exports.hash = hash;
|
||||||
// Generate a hash callback function that updates the given result in-place
|
// Generate a hash callback function that updates the given result in-place
|
||||||
// when it receives a hash for the correct line number. Ignores hashes for other lines.
|
// when it recieves a hash for the correct line number. Ignores hashes for other lines.
|
||||||
function locationUpdateCallback(result, location, logger) {
|
function locationUpdateCallback(result, location) {
|
||||||
var _a, _b;
|
var _a, _b;
|
||||||
let locationStartLine = (_b = (_a = location.physicalLocation) === null || _a === void 0 ? void 0 : _a.region) === null || _b === void 0 ? void 0 : _b.startLine;
|
let locationStartLine = (_b = (_a = location.physicalLocation) === null || _a === void 0 ? void 0 : _a.region) === null || _b === void 0 ? void 0 : _b.startLine;
|
||||||
if (locationStartLine === undefined) {
|
if (locationStartLine === undefined) {
|
||||||
@@ -130,7 +131,7 @@ function locationUpdateCallback(result, location, logger) {
|
|||||||
// using the hash of the first line of the file.
|
// using the hash of the first line of the file.
|
||||||
locationStartLine = 1;
|
locationStartLine = 1;
|
||||||
}
|
}
|
||||||
return function (lineNumber, hashValue) {
|
return function (lineNumber, hash) {
|
||||||
// Ignore hashes for lines that don't concern us
|
// Ignore hashes for lines that don't concern us
|
||||||
if (locationStartLine !== lineNumber) {
|
if (locationStartLine !== lineNumber) {
|
||||||
return;
|
return;
|
||||||
@@ -142,10 +143,13 @@ function locationUpdateCallback(result, location, logger) {
|
|||||||
// If the hash doesn't match the existing fingerprint then
|
// If the hash doesn't match the existing fingerprint then
|
||||||
// output a warning and don't overwrite it.
|
// output a warning and don't overwrite it.
|
||||||
if (!existingFingerprint) {
|
if (!existingFingerprint) {
|
||||||
result.partialFingerprints.primaryLocationLineHash = hashValue;
|
result.partialFingerprints.primaryLocationLineHash = hash;
|
||||||
}
|
}
|
||||||
else if (existingFingerprint !== hashValue) {
|
else if (existingFingerprint !== hash) {
|
||||||
logger.warning(`Calculated fingerprint of ${hashValue} for file ${location.physicalLocation.artifactLocation.uri} line ${lineNumber}, but found existing inconsistent fingerprint value ${existingFingerprint}`);
|
core.warning("Calculated fingerprint of " + hash +
|
||||||
|
" for file " + location.physicalLocation.artifactLocation.uri +
|
||||||
|
" line " + lineNumber +
|
||||||
|
", but found existing inconsistent fingerprint value " + existingFingerprint);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -153,48 +157,48 @@ function locationUpdateCallback(result, location, logger) {
|
|||||||
// the source file so we can hash it.
|
// the source file so we can hash it.
|
||||||
// If possible returns a absolute file path for the source file,
|
// If possible returns a absolute file path for the source file,
|
||||||
// or if not possible then returns undefined.
|
// or if not possible then returns undefined.
|
||||||
function resolveUriToFile(location, artifacts, checkoutPath, logger) {
|
function resolveUriToFile(location, artifacts) {
|
||||||
// This may be referencing an artifact
|
// This may be referencing an artifact
|
||||||
if (!location.uri && location.index !== undefined) {
|
if (!location.uri && location.index !== undefined) {
|
||||||
if (typeof location.index !== "number" ||
|
if (typeof location.index !== 'number' ||
|
||||||
location.index < 0 ||
|
location.index < 0 ||
|
||||||
location.index >= artifacts.length ||
|
location.index >= artifacts.length ||
|
||||||
typeof artifacts[location.index].location !== "object") {
|
typeof artifacts[location.index].location !== 'object') {
|
||||||
logger.debug(`Ignoring location as URI "${location.index}" is invalid`);
|
core.debug('Ignoring location as index "' + location.index + '" is invalid');
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
location = artifacts[location.index].location;
|
location = artifacts[location.index].location;
|
||||||
}
|
}
|
||||||
// Get the URI and decode
|
// Get the URI and decode
|
||||||
if (typeof location.uri !== "string") {
|
if (typeof location.uri !== 'string') {
|
||||||
logger.debug(`Ignoring location as index "${location.uri}" is invalid`);
|
core.debug('Ignoring location as uri "' + location.uri + '" is invalid');
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
let uri = decodeURIComponent(location.uri);
|
let uri = decodeURIComponent(location.uri);
|
||||||
// Remove a file scheme, and abort if the scheme is anything else
|
// Remove a file scheme, and abort if the scheme is anything else
|
||||||
const fileUriPrefix = "file://";
|
const fileUriPrefix = 'file://';
|
||||||
if (uri.startsWith(fileUriPrefix)) {
|
if (uri.startsWith(fileUriPrefix)) {
|
||||||
uri = uri.substring(fileUriPrefix.length);
|
uri = uri.substring(fileUriPrefix.length);
|
||||||
}
|
}
|
||||||
if (uri.indexOf("://") !== -1) {
|
if (uri.indexOf('://') !== -1) {
|
||||||
logger.debug(`Ignoring location URI "${uri}" as the scheme is not recognised`);
|
core.debug('Ignoring location URI "' + uri + "' as the scheme is not recognised");
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
// Discard any absolute paths that aren't in the src root
|
// Discard any absolute paths that aren't in the src root
|
||||||
const srcRootPrefix = `${checkoutPath}/`;
|
const srcRootPrefix = process.env['GITHUB_WORKSPACE'] + '/';
|
||||||
if (uri.startsWith("/") && !uri.startsWith(srcRootPrefix)) {
|
if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
|
||||||
logger.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
|
core.debug('Ignoring location URI "' + uri + "' as it is outside of the src root");
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
// Just assume a relative path is relative to the src root.
|
// Just assume a relative path is relative to the src root.
|
||||||
// This is not necessarily true but should be a good approximation
|
// This is not necessarily true but should be a good approximation
|
||||||
// and here we likely want to err on the side of handling more cases.
|
// and here we likely want to err on the side of handling more cases.
|
||||||
if (!uri.startsWith("/")) {
|
if (!uri.startsWith('/')) {
|
||||||
uri = srcRootPrefix + uri;
|
uri = srcRootPrefix + uri;
|
||||||
}
|
}
|
||||||
// Check the file exists
|
// Check the file exists
|
||||||
if (!fs.existsSync(uri)) {
|
if (!fs.existsSync(uri)) {
|
||||||
logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
|
core.debug("Unable to compute fingerprint for non-existent file: " + uri);
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
return uri;
|
return uri;
|
||||||
@@ -202,43 +206,42 @@ function resolveUriToFile(location, artifacts, checkoutPath, logger) {
|
|||||||
exports.resolveUriToFile = resolveUriToFile;
|
exports.resolveUriToFile = resolveUriToFile;
|
||||||
// Compute fingerprints for results in the given sarif file
|
// Compute fingerprints for results in the given sarif file
|
||||||
// and return an updated sarif file contents.
|
// and return an updated sarif file contents.
|
||||||
function addFingerprints(sarifContents, checkoutPath, logger) {
|
function addFingerprints(sarifContents) {
|
||||||
var _a, _b;
|
let sarif = JSON.parse(sarifContents);
|
||||||
const sarif = JSON.parse(sarifContents);
|
|
||||||
// Gather together results for the same file and construct
|
// Gather together results for the same file and construct
|
||||||
// callbacks to accept hashes for that file and update the location
|
// callbacks to accept hashes for that file and update the location
|
||||||
const callbacksByFile = {};
|
const callbacksByFile = {};
|
||||||
for (const run of sarif.runs || []) {
|
for (const run of sarif.runs || []) {
|
||||||
// We may need the list of artifacts to resolve against
|
// We may need the list of artifacts to resolve against
|
||||||
const artifacts = run.artifacts || [];
|
let artifacts = run.artifacts || [];
|
||||||
for (const result of run.results || []) {
|
for (const result of run.results || []) {
|
||||||
// Check the primary location is defined correctly and is in the src root
|
// Check the primary location is defined correctly and is in the src root
|
||||||
const primaryLocation = (result.locations || [])[0];
|
const primaryLocation = (result.locations || [])[0];
|
||||||
if (!((_b = (_a = primaryLocation) === null || _a === void 0 ? void 0 : _a.physicalLocation) === null || _b === void 0 ? void 0 : _b.artifactLocation)) {
|
if (!primaryLocation ||
|
||||||
logger.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`);
|
!primaryLocation.physicalLocation ||
|
||||||
|
!primaryLocation.physicalLocation.artifactLocation) {
|
||||||
|
core.debug("Unable to compute fingerprint for invalid location: " + JSON.stringify(primaryLocation));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts, checkoutPath, logger);
|
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts);
|
||||||
if (!filepath) {
|
if (!filepath) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (!callbacksByFile[filepath]) {
|
if (!callbacksByFile[filepath]) {
|
||||||
callbacksByFile[filepath] = [];
|
callbacksByFile[filepath] = [];
|
||||||
}
|
}
|
||||||
callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation, logger));
|
callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Now hash each file that was found
|
// Now hash each file that was found
|
||||||
for (const [filepath, callbacks] of Object.entries(callbacksByFile)) {
|
Object.entries(callbacksByFile).forEach(([filepath, callbacks]) => {
|
||||||
// A callback that forwards the hash to all other callbacks for that file
|
// A callback that forwards the hash to all other callbacks for that file
|
||||||
const teeCallback = function (lineNumber, hashValue) {
|
const teeCallback = function (lineNumber, hash) {
|
||||||
for (const c of Object.values(callbacks)) {
|
Object.values(callbacks).forEach(c => c(lineNumber, hash));
|
||||||
c(lineNumber, hashValue);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
const fileContents = fs.readFileSync(filepath).toString();
|
const fileContents = fs.readFileSync(filepath).toString();
|
||||||
hash(teeCallback, fileContents);
|
hash(teeCallback, fileContents);
|
||||||
}
|
});
|
||||||
return JSON.stringify(sarif);
|
return JSON.stringify(sarif);
|
||||||
}
|
}
|
||||||
exports.addFingerprints = addFingerprints;
|
exports.addFingerprints = addFingerprints;
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
94
lib/fingerprints.test.js
generated
94
lib/fingerprints.test.js
generated
@@ -1,4 +1,7 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
@@ -6,20 +9,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
result["default"] = mod;
|
result["default"] = mod;
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
|
||||||
const fingerprints = __importStar(require("./fingerprints"));
|
const fingerprints = __importStar(require("./fingerprints"));
|
||||||
const logging_1 = require("./logging");
|
|
||||||
const testing_utils_1 = require("./testing-utils");
|
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
|
||||||
function testHash(t, input, expectedHashes) {
|
function testHash(t, input, expectedHashes) {
|
||||||
let index = 0;
|
let index = 0;
|
||||||
const callback = function (lineNumber, hash) {
|
let callback = function (lineNumber, hash) {
|
||||||
t.is(lineNumber, index + 1);
|
t.is(lineNumber, index + 1);
|
||||||
t.is(hash, expectedHashes[index]);
|
t.is(hash, expectedHashes[index]);
|
||||||
index++;
|
index++;
|
||||||
@@ -27,7 +24,7 @@ function testHash(t, input, expectedHashes) {
|
|||||||
fingerprints.hash(callback, input);
|
fingerprints.hash(callback, input);
|
||||||
t.is(index, input.split(/\r\n|\r|\n/).length);
|
t.is(index, input.split(/\r\n|\r|\n/).length);
|
||||||
}
|
}
|
||||||
ava_1.default("hash", (t) => {
|
ava_1.default('hash', (t) => {
|
||||||
// Try empty file
|
// Try empty file
|
||||||
testHash(t, "", ["c129715d7a2bc9a3:1"]);
|
testHash(t, "", ["c129715d7a2bc9a3:1"]);
|
||||||
// Try various combinations of newline characters
|
// Try various combinations of newline characters
|
||||||
@@ -35,7 +32,7 @@ ava_1.default("hash", (t) => {
|
|||||||
"271789c17abda88f:1",
|
"271789c17abda88f:1",
|
||||||
"54703d4cd895b18:1",
|
"54703d4cd895b18:1",
|
||||||
"180aee12dab6264:1",
|
"180aee12dab6264:1",
|
||||||
"a23a3dc5e078b07b:1",
|
"a23a3dc5e078b07b:1"
|
||||||
]);
|
]);
|
||||||
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
|
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
|
||||||
"8b7cf3e952e7aeb2:1",
|
"8b7cf3e952e7aeb2:1",
|
||||||
@@ -93,87 +90,68 @@ ava_1.default("hash", (t) => {
|
|||||||
"a9cf91f7bbf1862b:1",
|
"a9cf91f7bbf1862b:1",
|
||||||
"55ec222b86bcae53:1",
|
"55ec222b86bcae53:1",
|
||||||
"cc97dc7b1d7d8f7b:1",
|
"cc97dc7b1d7d8f7b:1",
|
||||||
"c129715d7a2bc9a3:1",
|
"c129715d7a2bc9a3:1"
|
||||||
]);
|
|
||||||
testHash(t, "x = 2\nx = 1\nprint(x)\nx = 3\nprint(x)\nx = 4\nprint(x)\n", [
|
|
||||||
"e54938cc54b302f1:1",
|
|
||||||
"bb609acbe9138d60:1",
|
|
||||||
"1131fd5871777f34:1",
|
|
||||||
"5c482a0f8b35ea28:1",
|
|
||||||
"54517377da7028d2:1",
|
|
||||||
"2c644846cb18d53e:1",
|
|
||||||
"f1b89f20de0d133:1",
|
|
||||||
"c129715d7a2bc9a3:1",
|
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
function testResolveUriToFile(uri, index, artifactsURIs) {
|
function testResolveUriToFile(uri, index, artifactsURIs) {
|
||||||
const location = { uri, index };
|
const location = { "uri": uri, "index": index };
|
||||||
const artifacts = artifactsURIs.map((artifactURI) => ({
|
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
|
||||||
location: { uri: artifactURI },
|
return fingerprints.resolveUriToFile(location, artifacts);
|
||||||
}));
|
|
||||||
return fingerprints.resolveUriToFile(location, artifacts, process.cwd(), logging_1.getRunnerLogger(true));
|
|
||||||
}
|
}
|
||||||
ava_1.default("resolveUriToFile", (t) => {
|
ava_1.default('resolveUriToFile', t => {
|
||||||
// The resolveUriToFile method checks that the file exists and is in the right directory
|
// The resolveUriToFile method checks that the file exists and is in the right directory
|
||||||
// so we need to give it real files to look at. We will use this file as an example.
|
// so we need to give it real files to look at. We will use this file as an example.
|
||||||
// For this to work we require the current working directory to be a parent, but this
|
// For this to work we require the current working directory to be a parent, but this
|
||||||
// should generally always be the case so this is fine.
|
// should generally always be the case so this is fine.
|
||||||
const cwd = process.cwd();
|
const cwd = process.cwd();
|
||||||
const filepath = __filename;
|
const filepath = __filename;
|
||||||
t.true(filepath.startsWith(`${cwd}/`));
|
t.true(filepath.startsWith(cwd + '/'));
|
||||||
const relativeFilepath = filepath.substring(cwd.length + 1);
|
const relativeFilepaht = filepath.substring(cwd.length + 1);
|
||||||
|
process.env['GITHUB_WORKSPACE'] = cwd;
|
||||||
// Absolute paths are unmodified
|
// Absolute paths are unmodified
|
||||||
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
|
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
|
||||||
t.is(testResolveUriToFile(`file://${filepath}`, undefined, []), filepath);
|
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
|
||||||
// Relative paths are made absolute
|
// Relative paths are made absolute
|
||||||
t.is(testResolveUriToFile(relativeFilepath, undefined, []), filepath);
|
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
|
||||||
t.is(testResolveUriToFile(`file://${relativeFilepath}`, undefined, []), filepath);
|
t.is(testResolveUriToFile('file://' + relativeFilepaht, undefined, []), filepath);
|
||||||
// Absolute paths outside the src root are discarded
|
// Absolute paths outside the src root are discarded
|
||||||
t.is(testResolveUriToFile("/src/foo/bar.js", undefined, []), undefined);
|
t.is(testResolveUriToFile('/src/foo/bar.js', undefined, []), undefined);
|
||||||
t.is(testResolveUriToFile("file:///src/foo/bar.js", undefined, []), undefined);
|
t.is(testResolveUriToFile('file:///src/foo/bar.js', undefined, []), undefined);
|
||||||
// Other schemes are discarded
|
// Other schemes are discarded
|
||||||
t.is(testResolveUriToFile(`https://${filepath}`, undefined, []), undefined);
|
t.is(testResolveUriToFile('https://' + filepath, undefined, []), undefined);
|
||||||
t.is(testResolveUriToFile(`ftp://${filepath}`, undefined, []), undefined);
|
t.is(testResolveUriToFile('ftp://' + filepath, undefined, []), undefined);
|
||||||
// Invalid URIs are discarded
|
// Invalid URIs are discarded
|
||||||
t.is(testResolveUriToFile(1, undefined, []), undefined);
|
t.is(testResolveUriToFile(1, undefined, []), undefined);
|
||||||
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
|
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
|
||||||
// Non-existent files are discarded
|
// Non-existant files are discarded
|
||||||
t.is(testResolveUriToFile(`${filepath}2`, undefined, []), undefined);
|
t.is(testResolveUriToFile(filepath + '2', undefined, []), undefined);
|
||||||
// Index is resolved
|
// Index is resolved
|
||||||
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
|
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
|
||||||
t.is(testResolveUriToFile(undefined, 1, ["foo", filepath]), filepath);
|
t.is(testResolveUriToFile(undefined, 1, ['foo', filepath]), filepath);
|
||||||
// Invalid indexes are discarded
|
// Invalid indexes are discarded
|
||||||
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
|
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
|
||||||
t.is(testResolveUriToFile(undefined, "0", [filepath]), undefined);
|
t.is(testResolveUriToFile(undefined, '0', [filepath]), undefined);
|
||||||
});
|
});
|
||||||
ava_1.default("addFingerprints", (t) => {
|
ava_1.default('addFingerprints', t => {
|
||||||
// Run an end-to-end test on a test file
|
// Run an end-to-end test on a test file
|
||||||
let input = fs
|
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.input.sarif').toString();
|
||||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
|
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.expected.sarif').toString();
|
||||||
.toString();
|
|
||||||
let expected = fs
|
|
||||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.expected.sarif`)
|
|
||||||
.toString();
|
|
||||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||||
input = JSON.stringify(JSON.parse(input));
|
input = JSON.stringify(JSON.parse(input));
|
||||||
expected = JSON.stringify(JSON.parse(expected));
|
expected = JSON.stringify(JSON.parse(expected));
|
||||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||||
const checkoutPath = path.normalize(`${__dirname}/../src/testdata`);
|
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||||
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, logging_1.getRunnerLogger(true)), expected);
|
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||||
});
|
});
|
||||||
ava_1.default("missingRegions", (t) => {
|
ava_1.default('missingRegions', t => {
|
||||||
// Run an end-to-end test on a test file
|
// Run an end-to-end test on a test file
|
||||||
let input = fs
|
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.input.sarif').toString();
|
||||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
|
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.expected.sarif').toString();
|
||||||
.toString();
|
|
||||||
let expected = fs
|
|
||||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.expected.sarif`)
|
|
||||||
.toString();
|
|
||||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||||
input = JSON.stringify(JSON.parse(input));
|
input = JSON.stringify(JSON.parse(input));
|
||||||
expected = JSON.stringify(JSON.parse(expected));
|
expected = JSON.stringify(JSON.parse(expected));
|
||||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||||
const checkoutPath = path.normalize(`${__dirname}/../src/testdata`);
|
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||||
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, logging_1.getRunnerLogger(true)), expected);
|
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||||
});
|
});
|
||||||
//# sourceMappingURL=fingerprints.test.js.map
|
//# sourceMappingURL=fingerprints.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
129
lib/init-action.js
generated
129
lib/init-action.js
generated
@@ -1,129 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const core = __importStar(require("@actions/core"));
|
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
|
||||||
const init_1 = require("./init");
|
|
||||||
const languages_1 = require("./languages");
|
|
||||||
const logging_1 = require("./logging");
|
|
||||||
const repository_1 = require("./repository");
|
|
||||||
const util_1 = require("./util");
|
|
||||||
async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
|
|
||||||
var _a;
|
|
||||||
const statusReportBase = await actionsUtil.createStatusReportBase("init", "success", startedAt);
|
|
||||||
const languages = config.languages.join(",");
|
|
||||||
const workflowLanguages = actionsUtil.getOptionalInput("languages");
|
|
||||||
const paths = (config.originalUserInput.paths || []).join(",");
|
|
||||||
const pathsIgnore = (config.originalUserInput["paths-ignore"] || []).join(",");
|
|
||||||
const disableDefaultQueries = config.originalUserInput["disable-default-queries"]
|
|
||||||
? languages
|
|
||||||
: "";
|
|
||||||
const queries = [];
|
|
||||||
let queriesInput = (_a = actionsUtil.getOptionalInput("queries")) === null || _a === void 0 ? void 0 : _a.trim();
|
|
||||||
if (queriesInput === undefined || queriesInput.startsWith("+")) {
|
|
||||||
queries.push(...(config.originalUserInput.queries || []).map((q) => q.uses));
|
|
||||||
}
|
|
||||||
if (queriesInput !== undefined) {
|
|
||||||
queriesInput = queriesInput.startsWith("+")
|
|
||||||
? queriesInput.substr(1)
|
|
||||||
: queriesInput;
|
|
||||||
queries.push(...queriesInput.split(","));
|
|
||||||
}
|
|
||||||
const statusReport = {
|
|
||||||
...statusReportBase,
|
|
||||||
languages,
|
|
||||||
workflow_languages: workflowLanguages || "",
|
|
||||||
paths,
|
|
||||||
paths_ignore: pathsIgnore,
|
|
||||||
disable_default_queries: disableDefaultQueries,
|
|
||||||
queries: queries.join(","),
|
|
||||||
tools_input: actionsUtil.getOptionalInput("tools") || "",
|
|
||||||
tools_resolved_version: toolsVersion,
|
|
||||||
};
|
|
||||||
await actionsUtil.sendStatusReport(statusReport);
|
|
||||||
}
|
|
||||||
async function run() {
|
|
||||||
const startedAt = new Date();
|
|
||||||
const logger = logging_1.getActionsLogger();
|
|
||||||
let config;
|
|
||||||
let codeql;
|
|
||||||
let toolsVersion;
|
|
||||||
const apiDetails = {
|
|
||||||
auth: actionsUtil.getRequiredInput("token"),
|
|
||||||
externalRepoAuth: actionsUtil.getOptionalInput("external-repository-token"),
|
|
||||||
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
|
||||||
};
|
|
||||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
|
||||||
util_1.checkGitHubVersionInRange(gitHubVersion, "actions", logger);
|
|
||||||
try {
|
|
||||||
actionsUtil.prepareLocalRunEnvironment();
|
|
||||||
const workflowErrors = await actionsUtil.validateWorkflow();
|
|
||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "starting", startedAt, workflowErrors)))) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const initCodeQLResult = await init_1.initCodeQL(actionsUtil.getOptionalInput("tools"), apiDetails, actionsUtil.getTemporaryDirectory(), "actions", gitHubVersion.type, logger);
|
|
||||||
codeql = initCodeQLResult.codeql;
|
|
||||||
toolsVersion = initCodeQLResult.toolsVersion;
|
|
||||||
config = await init_1.initConfig(actionsUtil.getOptionalInput("languages"), actionsUtil.getOptionalInput("queries"), actionsUtil.getOptionalInput("config-file"), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), actionsUtil.getTemporaryDirectory(), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), codeql, actionsUtil.getRequiredEnvParam("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, logger);
|
|
||||||
if (config.languages.includes(languages_1.Language.python) &&
|
|
||||||
actionsUtil.getRequiredInput("setup-python-dependencies") === "true") {
|
|
||||||
try {
|
|
||||||
await init_1.installPythonDeps(codeql, logger);
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
logger.warning(`${err.message} You can call this action with 'setup-python-dependencies: false' to disable this process`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
core.setFailed(e.message);
|
|
||||||
console.log(e);
|
|
||||||
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "aborted", startedAt, e.message));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
// Forward Go flags
|
|
||||||
const goFlags = process.env["GOFLAGS"];
|
|
||||||
if (goFlags) {
|
|
||||||
core.exportVariable("GOFLAGS", goFlags);
|
|
||||||
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
|
|
||||||
}
|
|
||||||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
|
||||||
const codeqlRam = process.env["CODEQL_RAM"] || "6500";
|
|
||||||
core.exportVariable("CODEQL_RAM", codeqlRam);
|
|
||||||
const tracerConfig = await init_1.runInit(codeql, config);
|
|
||||||
if (tracerConfig !== undefined) {
|
|
||||||
for (const [key, value] of Object.entries(tracerConfig.env)) {
|
|
||||||
core.exportVariable(key, value);
|
|
||||||
}
|
|
||||||
if (process.platform === "win32") {
|
|
||||||
await init_1.injectWindowsTracer("Runner.Worker.exe", undefined, config, codeql, tracerConfig);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
core.setOutput("codeql-path", config.codeQLCmd);
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
core.setFailed(error.message);
|
|
||||||
console.log(error);
|
|
||||||
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "failure", startedAt, error.message, error.stack));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
await sendSuccessStatusReport(startedAt, config, toolsVersion);
|
|
||||||
}
|
|
||||||
async function runWrapper() {
|
|
||||||
try {
|
|
||||||
await run();
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
core.setFailed(`init action failed: ${error}`);
|
|
||||||
console.log(error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
void runWrapper();
|
|
||||||
//# sourceMappingURL=init-action.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"init-action.js","sourceRoot":"","sources":["../src/init-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAG9C,iCAMgB;AAChB,2CAAuC;AACvC,uCAA6C;AAC7C,6CAAkD;AAClD,iCAAqE;AAsBrE,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,MAA0B,EAC1B,YAAoB;;IAEpB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,MAAM,EACN,SAAS,EACT,SAAS,CACV,CAAC;IAEF,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC7C,MAAM,iBAAiB,GAAG,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAC;IACpE,MAAM,KAAK,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,WAAW,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CACvE,GAAG,CACJ,CAAC;IACF,MAAM,qBAAqB,GAAG,MAAM,CAAC,iBAAiB,CACpD,yBAAyB,CAC1B;QACC,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,EAAE,CAAC;IAEP,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,SAAG,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,0CAAE,IAAI,EAAE,CAAC;IACnE,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;QAC9D,OAAO,CAAC,IAAI,CACV,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,OAAO,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAC/D,CAAC;KACH;IACD,IAAI,YAAY,KAAK,SAAS,EAAE;QAC9B,YAAY,GAAG,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC;YACzC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC;YACxB,CAAC,CAAC,YAAY,CAAC;QACjB,OAAO,CAAC,IAAI,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;KAC1C;IAED,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,SAAS;QACT,kBAAkB,EAAE,iBAAiB,IAAI,EAAE;QAC3C,KAAK;QACL,YAAY,EAAE,WAAW;QACzB,uBAAuB,EAAE,qBAAqB;QAC9C,OAAO,EAAE,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC;QAC1B,WAAW,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,IAAI,EAAE;QACxD,sBAAsB,EAAE,YAAY;KACrC,CAAC;IAEF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,IAAI,MAA0B,CAAC;IAC/B,IAAI,MAAc,CAAC;IACnB,IAAI,YAAoB,CAAC;IAEzB,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;QAC3C,gBAAgB,EAAE,WAAW,CAAC,gBAAgB,CAAC,2BAA2B,CAAC;QAC3E,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;KAC1D,CAAC;IAEF,MAAM,aAAa,GAAG,MAAM,uBAAgB,CAAC,UAAU,CAAC,CAAC;IACzD,gCAAyB,CAAC,aAAa,EAAE,SAAS,EAAE,MAAM,CAAC,CAAC;IAE5D,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QAEzC,MAAM,cAAc,GAAG,MAAM,WAAW,CAAC,gBAAgB,EAAE,CAAC;QAE5D,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,UAAU,EACV,SAAS,EACT,cAAc,CACf,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,gBAAgB,GAAG,MAAM,iBAAU,CACvC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,UAAU,EACV,WAAW,CAAC,qBAAqB,EAAE,EACnC,SAAS,EACT,aAAa,CAAC,IAAI,EAClB,MAAM,CACP,CAAC;QACF,MAAM,GAAG,gBAAgB,CAAC,MAAM,CAAC;QACjC,YAAY,GAAG,gBAAgB,CAAC,YAAY,CAAC;QAE7C,MAAM,GAAG,MAAM,iBAAU,CACvB,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,EACzC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EACvC,WAAW,CAAC,gBAAgB,CAAC,aAAa,CAAC,EAC3C,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,WAAW,CAAC,qBAAqB,EAAE,EACnC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,MAAM,EACN,WAAW,CAAC,mBAAmB,CAAC,kBAAkB,CAAC,EACnD,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;QAEF,IACE,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,oBAAQ,CAAC,MAAM,CAAC;YAC1C,WAAW,CAAC,gBAAgB,CAAC,2BAA2B,CAAC,KAAK,MAAM,EACpE;YACA,IAAI;gBACF,MAAM,wBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;aACzC;YAAC,OAAO,GAAG,EAAE;gBACZ,MAAM,CAAC,OAAO,CACZ,GAAG,GAAG,CAAC,OAAO,2FAA2F,CAC1G,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,CAAC,CAAC,OAAO,CACV,CACF,CAAC;QACF,OAAO;KACR;IAED,IAAI;QACF,mBAAmB;QACnB,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QACvC,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YACxC,IAAI,CAAC,OAAO,CACV,6GAA6G,CAC9G,CAAC;SACH;QAED,mGAAmG;QACnG,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,MAAM,CAAC;QACtD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;QAE7C,MAAM,YAAY,GAAG,MAAM,cAAO,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACnD,IAAI,YAAY,KAAK,SAAS,EAAE;YAC9B,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,EAAE;gBAC3D,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;aACjC;YAED,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,0BAAmB,CACvB,mBAAmB,EACnB,SAAS,EACT,MAAM,EACN,MAAM,EACN,YAAY,CACb,CAAC;aACH;SACF;QAED,IAAI,CAAC,SAAS,CAAC,aAAa,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;KACjD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;AACjE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,uBAAuB,KAAK,EAAE,CAAC,CAAC;QAC/C,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
|
||||||
174
lib/init.js
generated
174
lib/init.js
generated
@@ -1,174 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const fs = __importStar(require("fs"));
|
|
||||||
const path = __importStar(require("path"));
|
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
|
||||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
|
||||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
|
||||||
const codeql_1 = require("./codeql");
|
|
||||||
const configUtils = __importStar(require("./config-utils"));
|
|
||||||
const tracer_config_1 = require("./tracer-config");
|
|
||||||
const util = __importStar(require("./util"));
|
|
||||||
async function initCodeQL(codeqlURL, apiDetails, tempDir, mode, variant, logger) {
|
|
||||||
logger.startGroup("Setup CodeQL tools");
|
|
||||||
const { codeql, toolsVersion } = await codeql_1.setupCodeQL(codeqlURL, apiDetails, tempDir, mode, variant, logger);
|
|
||||||
await codeql.printVersion();
|
|
||||||
logger.endGroup();
|
|
||||||
return { codeql, toolsVersion };
|
|
||||||
}
|
|
||||||
exports.initCodeQL = initCodeQL;
|
|
||||||
async function initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
|
|
||||||
logger.startGroup("Load language configuration");
|
|
||||||
const config = await configUtils.initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger);
|
|
||||||
analysisPaths.printPathFiltersWarning(config, logger);
|
|
||||||
logger.endGroup();
|
|
||||||
return config;
|
|
||||||
}
|
|
||||||
exports.initConfig = initConfig;
|
|
||||||
async function runInit(codeql, config) {
|
|
||||||
const sourceRoot = path.resolve();
|
|
||||||
fs.mkdirSync(util.getCodeQLDatabasesDir(config.tempDir), { recursive: true });
|
|
||||||
// TODO: replace this code once CodeQL supports multi-language tracing
|
|
||||||
for (const language of config.languages) {
|
|
||||||
// Init language database
|
|
||||||
await codeql.databaseInit(util.getCodeQLDatabasePath(config.tempDir, language), language, sourceRoot);
|
|
||||||
}
|
|
||||||
return await tracer_config_1.getCombinedTracerConfig(config, codeql);
|
|
||||||
}
|
|
||||||
exports.runInit = runInit;
|
|
||||||
// Runs a powershell script to inject the tracer into a parent process
|
|
||||||
// so it can tracer future processes, hopefully including the build process.
|
|
||||||
// If processName is given then injects into the nearest parent process with
|
|
||||||
// this name, otherwise uses the processLevel-th parent if defined, otherwise
|
|
||||||
// defaults to the 3rd parent as a rough guess.
|
|
||||||
async function injectWindowsTracer(processName, processLevel, config, codeql, tracerConfig) {
|
|
||||||
let script;
|
|
||||||
if (processName !== undefined) {
|
|
||||||
script = `
|
|
||||||
Param(
|
|
||||||
[Parameter(Position=0)]
|
|
||||||
[String]
|
|
||||||
$tracer
|
|
||||||
)
|
|
||||||
|
|
||||||
$id = $PID
|
|
||||||
while ($true) {
|
|
||||||
$p = Get-CimInstance -Class Win32_Process -Filter "ProcessId = $id"
|
|
||||||
Write-Host "Found process: $p"
|
|
||||||
if ($p -eq $null) {
|
|
||||||
throw "Could not determine ${processName} process"
|
|
||||||
}
|
|
||||||
if ($p[0].Name -eq "${processName}") {
|
|
||||||
Break
|
|
||||||
} else {
|
|
||||||
$id = $p[0].ParentProcessId
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Write-Host "Final process: $p"
|
|
||||||
|
|
||||||
Invoke-Expression "&$tracer --inject=$id"`;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// If the level is not defined then guess at the 3rd parent process.
|
|
||||||
// This won't be correct in every setting but it should be enough in most settings,
|
|
||||||
// and overestimating is likely better in this situation so we definitely trace
|
|
||||||
// what we want, though this does run the risk of interfering with future CI jobs.
|
|
||||||
// Note that the default of 3 doesn't work on github actions, so we include a
|
|
||||||
// special case in the script that checks for Runner.Worker.exe so we can still work
|
|
||||||
// on actions if the runner is invoked there.
|
|
||||||
processLevel = processLevel || 3;
|
|
||||||
script = `
|
|
||||||
Param(
|
|
||||||
[Parameter(Position=0)]
|
|
||||||
[String]
|
|
||||||
$tracer
|
|
||||||
)
|
|
||||||
|
|
||||||
$id = $PID
|
|
||||||
for ($i = 0; $i -le ${processLevel}; $i++) {
|
|
||||||
$p = Get-CimInstance -Class Win32_Process -Filter "ProcessId = $id"
|
|
||||||
Write-Host "Parent process \${i}: $p"
|
|
||||||
if ($p -eq $null) {
|
|
||||||
throw "Process tree ended before reaching required level"
|
|
||||||
}
|
|
||||||
# Special case just in case the runner is used on actions
|
|
||||||
if ($p[0].Name -eq "Runner.Worker.exe") {
|
|
||||||
Write-Host "Found Runner.Worker.exe process which means we are running on GitHub Actions"
|
|
||||||
Write-Host "Aborting search early and using process: $p"
|
|
||||||
Break
|
|
||||||
} elseif ($p[0].Name -eq "Agent.Worker.exe") {
|
|
||||||
Write-Host "Found Agent.Worker.exe process which means we are running on Azure Pipelines"
|
|
||||||
Write-Host "Aborting search early and using process: $p"
|
|
||||||
Break
|
|
||||||
} else {
|
|
||||||
$id = $p[0].ParentProcessId
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Write-Host "Final process: $p"
|
|
||||||
|
|
||||||
Invoke-Expression "&$tracer --inject=$id"`;
|
|
||||||
}
|
|
||||||
const injectTracerPath = path.join(config.tempDir, "inject-tracer.ps1");
|
|
||||||
fs.writeFileSync(injectTracerPath, script);
|
|
||||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("powershell"), [
|
|
||||||
"-ExecutionPolicy",
|
|
||||||
"Bypass",
|
|
||||||
"-file",
|
|
||||||
injectTracerPath,
|
|
||||||
path.resolve(path.dirname(codeql.getPath()), "tools", "win64", "tracer.exe"),
|
|
||||||
], { env: { ODASA_TRACER_CONFIGURATION: tracerConfig.spec } }).exec();
|
|
||||||
}
|
|
||||||
exports.injectWindowsTracer = injectWindowsTracer;
|
|
||||||
async function installPythonDeps(codeql, logger) {
|
|
||||||
logger.startGroup("Setup Python dependencies");
|
|
||||||
const scriptsFolder = path.resolve(__dirname, "../python-setup");
|
|
||||||
// Setup tools on the GitHub hosted runners
|
|
||||||
if (process.env["ImageOS"] !== undefined) {
|
|
||||||
try {
|
|
||||||
if (process.platform === "win32") {
|
|
||||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("powershell"), [path.join(scriptsFolder, "install_tools.ps1")]).exec();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
await new toolrunner.ToolRunner(path.join(scriptsFolder, "install_tools.sh")).exec();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
// This script tries to install some needed tools in the runner. It should not fail, but if it does
|
|
||||||
// we just abort the process without failing the action
|
|
||||||
logger.endGroup();
|
|
||||||
logger.warning("Unable to download and extract the tools needed for installing the python dependencies. You can call this action with 'setup-python-dependencies: false' to disable this process.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Install dependencies
|
|
||||||
try {
|
|
||||||
const script = "auto_install_packages.py";
|
|
||||||
if (process.platform === "win32") {
|
|
||||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("py"), [
|
|
||||||
"-3",
|
|
||||||
path.join(scriptsFolder, script),
|
|
||||||
path.dirname(codeql.getPath()),
|
|
||||||
]).exec();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
await new toolrunner.ToolRunner(path.join(scriptsFolder, script), [
|
|
||||||
path.dirname(codeql.getPath()),
|
|
||||||
]).exec();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
logger.endGroup();
|
|
||||||
logger.warning("We were unable to install your python dependencies. You can call this action with 'setup-python-dependencies: false' to disable this process.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
logger.endGroup();
|
|
||||||
}
|
|
||||||
exports.installPythonDeps = installPythonDeps;
|
|
||||||
//# sourceMappingURL=init.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,IAAe,EACf,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,oBAAW,CAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,IAAI,EACJ,OAAO,EACP,MAAM,CACP,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AApBD,gCAoBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,aAAiC,EACjC,UAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9BD,gCA8BC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE9E,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnBD,0BAmBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,2CAA2C;IAC3C,IAAI,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,SAAS,EAAE;QACxC,IAAI;YACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC,CAAC,CAChD,CAAC,IAAI,EAAE,CAAC;aACV;iBAAM;gBACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;aACV;SACF;QAAC,OAAO,CAAC,EAAE;YACV,mGAAmG;YACnG,uDAAuD;YACvD,MAAM,CAAC,QAAQ,EAAE,CAAC;YAClB,MAAM,CAAC,OAAO,CACZ,mLAAmL,CACpL,CAAC;YACF,OAAO;SACR;KACF;IAED,uBAAuB;IACvB,IAAI;QACF,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,+IAA+I,CAChJ,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAnDD,8CAmDC"}
|
|
||||||
45
lib/languages.js
generated
45
lib/languages.js
generated
@@ -1,45 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
// All the languages supported by CodeQL
|
|
||||||
var Language;
|
|
||||||
(function (Language) {
|
|
||||||
Language["csharp"] = "csharp";
|
|
||||||
Language["cpp"] = "cpp";
|
|
||||||
Language["go"] = "go";
|
|
||||||
Language["java"] = "java";
|
|
||||||
Language["javascript"] = "javascript";
|
|
||||||
Language["python"] = "python";
|
|
||||||
})(Language = exports.Language || (exports.Language = {}));
|
|
||||||
// Additional names for languages
|
|
||||||
const LANGUAGE_ALIASES = {
|
|
||||||
c: Language.cpp,
|
|
||||||
"c++": Language.cpp,
|
|
||||||
"c#": Language.csharp,
|
|
||||||
typescript: Language.javascript,
|
|
||||||
};
|
|
||||||
// Translate from user input or GitHub's API names for languages to CodeQL's names for languages
|
|
||||||
function parseLanguage(language) {
|
|
||||||
// Normalise to lower case
|
|
||||||
language = language.toLowerCase();
|
|
||||||
// See if it's an exact match
|
|
||||||
if (language in Language) {
|
|
||||||
return language;
|
|
||||||
}
|
|
||||||
// Check language aliases
|
|
||||||
if (language in LANGUAGE_ALIASES) {
|
|
||||||
return LANGUAGE_ALIASES[language];
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
exports.parseLanguage = parseLanguage;
|
|
||||||
function isTracedLanguage(language) {
|
|
||||||
return (["cpp", "java", "csharp"].includes(language) ||
|
|
||||||
(process.env["CODEQL_EXTRACTOR_GO_BUILD_TRACING"] === "on" &&
|
|
||||||
language === Language.go));
|
|
||||||
}
|
|
||||||
exports.isTracedLanguage = isTracedLanguage;
|
|
||||||
function isScannedLanguage(language) {
|
|
||||||
return !isTracedLanguage(language);
|
|
||||||
}
|
|
||||||
exports.isScannedLanguage = isScannedLanguage;
|
|
||||||
//# sourceMappingURL=languages.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;AAAA,wCAAwC;AACxC,IAAY,QAOX;AAPD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;AACnB,CAAC,EAPW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAOnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAAiC;IACrD,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO,CACL,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC;QAC5C,CAAC,OAAO,CAAC,GAAG,CAAC,mCAAmC,CAAC,KAAK,IAAI;YACxD,QAAQ,KAAK,QAAQ,CAAC,EAAE,CAAC,CAC5B,CAAC;AACJ,CAAC;AAND,4CAMC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
|
|
||||||
44
lib/languages.test.js
generated
44
lib/languages.test.js
generated
@@ -1,44 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const ava_1 = __importDefault(require("ava"));
|
|
||||||
const languages_1 = require("./languages");
|
|
||||||
const testing_utils_1 = require("./testing-utils");
|
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
|
||||||
ava_1.default("parseLanguage", async (t) => {
|
|
||||||
// Exact matches
|
|
||||||
t.deepEqual(languages_1.parseLanguage("csharp"), languages_1.Language.csharp);
|
|
||||||
t.deepEqual(languages_1.parseLanguage("cpp"), languages_1.Language.cpp);
|
|
||||||
t.deepEqual(languages_1.parseLanguage("go"), languages_1.Language.go);
|
|
||||||
t.deepEqual(languages_1.parseLanguage("java"), languages_1.Language.java);
|
|
||||||
t.deepEqual(languages_1.parseLanguage("javascript"), languages_1.Language.javascript);
|
|
||||||
t.deepEqual(languages_1.parseLanguage("python"), languages_1.Language.python);
|
|
||||||
// Aliases
|
|
||||||
t.deepEqual(languages_1.parseLanguage("c"), languages_1.Language.cpp);
|
|
||||||
t.deepEqual(languages_1.parseLanguage("c++"), languages_1.Language.cpp);
|
|
||||||
t.deepEqual(languages_1.parseLanguage("c#"), languages_1.Language.csharp);
|
|
||||||
t.deepEqual(languages_1.parseLanguage("typescript"), languages_1.Language.javascript);
|
|
||||||
// Not matches
|
|
||||||
t.deepEqual(languages_1.parseLanguage("foo"), undefined);
|
|
||||||
t.deepEqual(languages_1.parseLanguage(" "), undefined);
|
|
||||||
t.deepEqual(languages_1.parseLanguage(""), undefined);
|
|
||||||
});
|
|
||||||
ava_1.default("isTracedLanguage", async (t) => {
|
|
||||||
t.true(languages_1.isTracedLanguage(languages_1.Language.cpp));
|
|
||||||
t.true(languages_1.isTracedLanguage(languages_1.Language.java));
|
|
||||||
t.true(languages_1.isTracedLanguage(languages_1.Language.csharp));
|
|
||||||
t.false(languages_1.isTracedLanguage(languages_1.Language.go));
|
|
||||||
t.false(languages_1.isTracedLanguage(languages_1.Language.javascript));
|
|
||||||
t.false(languages_1.isTracedLanguage(languages_1.Language.python));
|
|
||||||
});
|
|
||||||
ava_1.default("isScannedLanguage", async (t) => {
|
|
||||||
t.false(languages_1.isScannedLanguage(languages_1.Language.cpp));
|
|
||||||
t.false(languages_1.isScannedLanguage(languages_1.Language.java));
|
|
||||||
t.false(languages_1.isScannedLanguage(languages_1.Language.csharp));
|
|
||||||
t.true(languages_1.isScannedLanguage(languages_1.Language.go));
|
|
||||||
t.true(languages_1.isScannedLanguage(languages_1.Language.javascript));
|
|
||||||
t.true(languages_1.isScannedLanguage(languages_1.Language.python));
|
|
||||||
});
|
|
||||||
//# sourceMappingURL=languages.test.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"languages.test.js","sourceRoot":"","sources":["../src/languages.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,2CAKqB;AACrB,mDAA6C;AAE7C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,gBAAgB;IAChB,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IACtD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,MAAM,CAAC,EAAE,oBAAQ,CAAC,IAAI,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAC9D,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAEtD,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAE9D,cAAc;IACd,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC7C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,SAAS,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAC;AAC5C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IACxC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE1C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mBAAmB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACpC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACzC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE5C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC"}
|
|
||||||
26
lib/logging.js
generated
26
lib/logging.js
generated
@@ -1,26 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const core = __importStar(require("@actions/core"));
|
|
||||||
function getActionsLogger() {
|
|
||||||
return core;
|
|
||||||
}
|
|
||||||
exports.getActionsLogger = getActionsLogger;
|
|
||||||
function getRunnerLogger(debugMode) {
|
|
||||||
return {
|
|
||||||
debug: debugMode ? console.debug : () => undefined,
|
|
||||||
info: console.info,
|
|
||||||
warning: console.warn,
|
|
||||||
error: console.error,
|
|
||||||
startGroup: () => undefined,
|
|
||||||
endGroup: () => undefined,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
exports.getRunnerLogger = getRunnerLogger;
|
|
||||||
//# sourceMappingURL=logging.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../src/logging.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAYtC,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC;AACd,CAAC;AAFD,4CAEC;AAED,SAAgB,eAAe,CAAC,SAAkB;IAChD,OAAO;QACL,KAAK,EAAE,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,SAAS;QAClD,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,OAAO,EAAE,OAAO,CAAC,IAAI;QACrB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AATD,0CASC"}
|
|
||||||
14
lib/repository.js
generated
14
lib/repository.js
generated
@@ -1,14 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
function parseRepositoryNwo(input) {
|
|
||||||
const parts = input.split("/");
|
|
||||||
if (parts.length !== 2) {
|
|
||||||
throw new Error(`"${input}" is not a valid repository name`);
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
owner: parts[0],
|
|
||||||
repo: parts[1],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
exports.parseRepositoryNwo = parseRepositoryNwo;
|
|
||||||
//# sourceMappingURL=repository.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"repository.js","sourceRoot":"","sources":["../src/repository.ts"],"names":[],"mappings":";;AAMA,SAAgB,kBAAkB,CAAC,KAAa;IAC9C,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QACtB,MAAM,IAAI,KAAK,CAAC,IAAI,KAAK,kCAAkC,CAAC,CAAC;KAC9D;IACD,OAAO;QACL,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC;QACf,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;KACf,CAAC;AACJ,CAAC;AATD,gDASC"}
|
|
||||||
281
lib/runner.js
generated
281
lib/runner.js
generated
@@ -1,281 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const fs = __importStar(require("fs"));
|
|
||||||
const os = __importStar(require("os"));
|
|
||||||
const path = __importStar(require("path"));
|
|
||||||
const commander_1 = require("commander");
|
|
||||||
const analyze_1 = require("./analyze");
|
|
||||||
const autobuild_1 = require("./autobuild");
|
|
||||||
const codeql_1 = require("./codeql");
|
|
||||||
const config_utils_1 = require("./config-utils");
|
|
||||||
const init_1 = require("./init");
|
|
||||||
const languages_1 = require("./languages");
|
|
||||||
const logging_1 = require("./logging");
|
|
||||||
const repository_1 = require("./repository");
|
|
||||||
const upload_lib = __importStar(require("./upload-lib"));
|
|
||||||
const util_1 = require("./util");
|
|
||||||
const program = new commander_1.Command();
|
|
||||||
program.version("0.0.1");
|
|
||||||
function getTempDir(userInput) {
|
|
||||||
const tempDir = path.join(userInput || process.cwd(), "codeql-runner");
|
|
||||||
if (!fs.existsSync(tempDir)) {
|
|
||||||
fs.mkdirSync(tempDir, { recursive: true });
|
|
||||||
}
|
|
||||||
return tempDir;
|
|
||||||
}
|
|
||||||
function getToolsDir(userInput) {
|
|
||||||
const toolsDir = userInput || path.join(os.homedir(), "codeql-runner-tools");
|
|
||||||
if (!fs.existsSync(toolsDir)) {
|
|
||||||
fs.mkdirSync(toolsDir, { recursive: true });
|
|
||||||
}
|
|
||||||
return toolsDir;
|
|
||||||
}
|
|
||||||
const codeqlEnvJsonFilename = "codeql-env.json";
|
|
||||||
// Imports the environment from codeqlEnvJsonFilename if not already present
|
|
||||||
function importTracerEnvironment(config) {
|
|
||||||
if (!("ODASA_TRACER_CONFIGURATION" in process.env)) {
|
|
||||||
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
|
|
||||||
const env = JSON.parse(fs.readFileSync(jsonEnvFile).toString("utf-8"));
|
|
||||||
for (const key of Object.keys(env)) {
|
|
||||||
process.env[key] = env[key];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Allow the user to specify refs in full refs/heads/branch format
|
|
||||||
// or just the short branch name and prepend "refs/heads/" to it.
|
|
||||||
function parseRef(userInput) {
|
|
||||||
if (userInput.startsWith("refs/")) {
|
|
||||||
return userInput;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return `refs/heads/${userInput}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Parses the --trace-process-name arg from process.argv, or returns undefined
|
|
||||||
function parseTraceProcessName() {
|
|
||||||
for (let i = 0; i < process.argv.length - 1; i++) {
|
|
||||||
if (process.argv[i] === "--trace-process-name") {
|
|
||||||
return process.argv[i + 1];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
// Parses the --trace-process-level arg from process.argv, or returns undefined
|
|
||||||
function parseTraceProcessLevel() {
|
|
||||||
for (let i = 0; i < process.argv.length - 1; i++) {
|
|
||||||
if (process.argv[i] === "--trace-process-level") {
|
|
||||||
const v = parseInt(process.argv[i + 1], 10);
|
|
||||||
return isNaN(v) ? undefined : v;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
program
|
|
||||||
.command("init")
|
|
||||||
.description("Initializes CodeQL")
|
|
||||||
.requiredOption("--repository <repository>", "Repository name. (Required)")
|
|
||||||
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
|
|
||||||
.option("--github-auth <auth>", "GitHub Apps token or personal access token. This option is insecure and deprecated, please use `--github-auth-stdin` instead.")
|
|
||||||
.option("--github-auth-stdin", "Read GitHub Apps token or personal access token from stdin.")
|
|
||||||
.option("--languages <languages>", "Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.")
|
|
||||||
.option("--queries <queries>", "Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.")
|
|
||||||
.option("--config-file <file>", "Path to config file.")
|
|
||||||
.option("--codeql-path <path>", "Path to a copy of the CodeQL CLI executable to use. Otherwise downloads a copy.")
|
|
||||||
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
|
|
||||||
.option("--tools-dir <dir>", "Directory to use for CodeQL tools and other files to store between runs. Default is a subdirectory of the home directory.")
|
|
||||||
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
|
|
||||||
.option("--debug", "Print more verbose output", false)
|
|
||||||
.option("--trace-process-name <string>", "(Advanced, windows-only) Inject a windows tracer of this process into a process with the given process name.")
|
|
||||||
.option("--trace-process-level <number>", "(Advanced, windows-only) Inject a windows tracer of this process into a parent process <number> levels up.")
|
|
||||||
.action(async (cmd) => {
|
|
||||||
const logger = logging_1.getRunnerLogger(cmd.debug);
|
|
||||||
try {
|
|
||||||
const tempDir = getTempDir(cmd.tempDir);
|
|
||||||
const toolsDir = getToolsDir(cmd.toolsDir);
|
|
||||||
util_1.setupActionsVars(tempDir, toolsDir);
|
|
||||||
// Wipe the temp dir
|
|
||||||
logger.info(`Cleaning temp directory ${tempDir}`);
|
|
||||||
fs.rmdirSync(tempDir, { recursive: true });
|
|
||||||
fs.mkdirSync(tempDir, { recursive: true });
|
|
||||||
const auth = await util_1.getGitHubAuth(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
|
||||||
const apiDetails = {
|
|
||||||
auth,
|
|
||||||
externalRepoAuth: auth,
|
|
||||||
url: util_1.parseGitHubUrl(cmd.githubUrl),
|
|
||||||
};
|
|
||||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
|
||||||
util_1.checkGitHubVersionInRange(gitHubVersion, "runner", logger);
|
|
||||||
let codeql;
|
|
||||||
if (cmd.codeqlPath !== undefined) {
|
|
||||||
codeql = codeql_1.getCodeQL(cmd.codeqlPath);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
codeql = (await init_1.initCodeQL(undefined, apiDetails, tempDir, "runner", gitHubVersion.type, logger)).codeql;
|
|
||||||
}
|
|
||||||
const config = await init_1.initConfig(cmd.languages, cmd.queries, cmd.configFile, repository_1.parseRepositoryNwo(cmd.repository), tempDir, toolsDir, codeql, cmd.checkoutPath || process.cwd(), gitHubVersion, apiDetails, logger);
|
|
||||||
const tracerConfig = await init_1.runInit(codeql, config);
|
|
||||||
if (tracerConfig === undefined) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (process.platform === "win32") {
|
|
||||||
await init_1.injectWindowsTracer(parseTraceProcessName(), parseTraceProcessLevel(), config, codeql, tracerConfig);
|
|
||||||
}
|
|
||||||
// Always output a json file of the env that can be consumed programmatically
|
|
||||||
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
|
|
||||||
fs.writeFileSync(jsonEnvFile, JSON.stringify(tracerConfig.env));
|
|
||||||
if (process.platform === "win32") {
|
|
||||||
const batEnvFile = path.join(config.tempDir, "codeql-env.bat");
|
|
||||||
const batEnvFileContents = Object.entries(tracerConfig.env)
|
|
||||||
.map(([key, value]) => `Set ${key}=${value}`)
|
|
||||||
.join("\n");
|
|
||||||
fs.writeFileSync(batEnvFile, batEnvFileContents);
|
|
||||||
const powershellEnvFile = path.join(config.tempDir, "codeql-env.sh");
|
|
||||||
const powershellEnvFileContents = Object.entries(tracerConfig.env)
|
|
||||||
.map(([key, value]) => `$env:${key}="${value}"`)
|
|
||||||
.join("\n");
|
|
||||||
fs.writeFileSync(powershellEnvFile, powershellEnvFileContents);
|
|
||||||
logger.info(`\nCodeQL environment output to "${jsonEnvFile}", "${batEnvFile}" and "${powershellEnvFile}". ` +
|
|
||||||
`Please export these variables to future processes so that CodeQL can monitor the build. ` +
|
|
||||||
`If using cmd/batch run "call ${batEnvFile}" ` +
|
|
||||||
`or if using PowerShell run "cat ${powershellEnvFile} | Invoke-Expression".`);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// Assume that anything that's not windows is using a unix-style shell
|
|
||||||
const shEnvFile = path.join(config.tempDir, "codeql-env.sh");
|
|
||||||
const shEnvFileContents = Object.entries(tracerConfig.env)
|
|
||||||
// Some vars contain ${LIB} that we do not want to be expanded when executing this script
|
|
||||||
.map(([key, value]) => `export ${key}="${value.replace(/\$/g, "\\$")}"`)
|
|
||||||
.join("\n");
|
|
||||||
fs.writeFileSync(shEnvFile, shEnvFileContents);
|
|
||||||
logger.info(`\nCodeQL environment output to "${jsonEnvFile}" and "${shEnvFile}". ` +
|
|
||||||
`Please export these variables to future processes so that CodeQL can monitor the build, ` +
|
|
||||||
`for example by running ". ${shEnvFile}".`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
logger.error("Init failed");
|
|
||||||
logger.error(e);
|
|
||||||
process.exitCode = 1;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
program
|
|
||||||
.command("autobuild")
|
|
||||||
.description("Attempts to automatically build code")
|
|
||||||
.option("--language <language>", "The language to build. Otherwise will detect the dominant compiled language.")
|
|
||||||
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
|
|
||||||
.option("--debug", "Print more verbose output", false)
|
|
||||||
.action(async (cmd) => {
|
|
||||||
const logger = logging_1.getRunnerLogger(cmd.debug);
|
|
||||||
try {
|
|
||||||
const config = await config_utils_1.getConfig(getTempDir(cmd.tempDir), logger);
|
|
||||||
if (config === undefined) {
|
|
||||||
throw new Error("Config file could not be found at expected location. " +
|
|
||||||
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
|
|
||||||
}
|
|
||||||
util_1.setupActionsVars(config.tempDir, config.toolCacheDir);
|
|
||||||
importTracerEnvironment(config);
|
|
||||||
let language = undefined;
|
|
||||||
if (cmd.language !== undefined) {
|
|
||||||
language = languages_1.parseLanguage(cmd.language);
|
|
||||||
if (language === undefined || !config.languages.includes(language)) {
|
|
||||||
throw new Error(`"${cmd.language}" is not a recognised language. ` +
|
|
||||||
`Known languages in this project are ${config.languages.join(", ")}.`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
language = autobuild_1.determineAutobuildLanguage(config, logger);
|
|
||||||
}
|
|
||||||
if (language !== undefined) {
|
|
||||||
await autobuild_1.runAutobuild(language, config, logger);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
logger.error("Autobuild failed");
|
|
||||||
logger.error(e);
|
|
||||||
process.exitCode = 1;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
program
|
|
||||||
.command("analyze")
|
|
||||||
.description("Finishes extracting code and runs CodeQL queries")
|
|
||||||
.requiredOption("--repository <repository>", "Repository name. (Required)")
|
|
||||||
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
|
|
||||||
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
|
|
||||||
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
|
|
||||||
.option("--github-auth <auth>", "GitHub Apps token or personal access token. This option is insecure and deprecated, please use `--github-auth-stdin` instead.")
|
|
||||||
.option("--github-auth-stdin", "Read GitHub Apps token or personal access token from stdin.")
|
|
||||||
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
|
|
||||||
.option("--no-upload", "Do not upload results after analysis.")
|
|
||||||
.option("--output-dir <dir>", "Directory to output SARIF files to. Default is in the temp directory.")
|
|
||||||
.option("--ram <ram>", "Amount of memory to use when running queries. Default is to use all available memory.")
|
|
||||||
.option("--no-add-snippets", "Specify whether to include code snippets in the sarif output.")
|
|
||||||
.option("--threads <threads>", "Number of threads to use when running queries. " +
|
|
||||||
"Default is to use all available cores.")
|
|
||||||
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
|
|
||||||
.option("--debug", "Print more verbose output", false)
|
|
||||||
.action(async (cmd) => {
|
|
||||||
const logger = logging_1.getRunnerLogger(cmd.debug);
|
|
||||||
try {
|
|
||||||
const config = await config_utils_1.getConfig(getTempDir(cmd.tempDir), logger);
|
|
||||||
if (config === undefined) {
|
|
||||||
throw new Error("Config file could not be found at expected location. " +
|
|
||||||
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
|
|
||||||
}
|
|
||||||
util_1.setupActionsVars(config.tempDir, config.toolCacheDir);
|
|
||||||
const auth = await util_1.getGitHubAuth(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
|
||||||
const apiDetails = {
|
|
||||||
auth,
|
|
||||||
url: util_1.parseGitHubUrl(cmd.githubUrl),
|
|
||||||
};
|
|
||||||
const outputDir = cmd.outputDir || path.join(config.tempDir, "codeql-sarif");
|
|
||||||
await analyze_1.runAnalyze(outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), util_1.getThreadsFlag(cmd.threads, logger), config, logger);
|
|
||||||
if (!cmd.upload) {
|
|
||||||
logger.info("Not uploading results");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
await upload_lib.uploadFromRunner(outputDir, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.checkoutPath || process.cwd(), config.gitHubVersion, apiDetails, logger);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
logger.error("Analyze failed");
|
|
||||||
logger.error(e);
|
|
||||||
process.exitCode = 1;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
program
|
|
||||||
.command("upload")
|
|
||||||
.description("Uploads a SARIF file, or all SARIF files from a directory, to code scanning")
|
|
||||||
.requiredOption("--sarif-file <file>", "SARIF file to upload, or a directory containing multiple SARIF files. (Required)")
|
|
||||||
.requiredOption("--repository <repository>", "Repository name. (Required)")
|
|
||||||
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
|
|
||||||
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
|
|
||||||
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
|
|
||||||
.option("--github-auth <auth>", "GitHub Apps token or personal access token. This option is insecure and deprecated, please use `--github-auth-stdin` instead.")
|
|
||||||
.option("--github-auth-stdin", "Read GitHub Apps token or personal access token from stdin.")
|
|
||||||
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
|
|
||||||
.option("--debug", "Print more verbose output", false)
|
|
||||||
.action(async (cmd) => {
|
|
||||||
const logger = logging_1.getRunnerLogger(cmd.debug);
|
|
||||||
const auth = await util_1.getGitHubAuth(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
|
||||||
const apiDetails = {
|
|
||||||
auth,
|
|
||||||
url: util_1.parseGitHubUrl(cmd.githubUrl),
|
|
||||||
};
|
|
||||||
try {
|
|
||||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
|
||||||
await upload_lib.uploadFromRunner(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.checkoutPath || process.cwd(), gitHubVersion, apiDetails, logger);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
logger.error("Upload failed");
|
|
||||||
logger.error(e);
|
|
||||||
process.exitCode = 1;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
program.parse(process.argv);
|
|
||||||
//# sourceMappingURL=runner.js.map
|
|
||||||
File diff suppressed because one or more lines are too long
58
lib/setup-tools.js
generated
Normal file
58
lib/setup-tools.js
generated
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
|
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
class CodeQLSetup {
|
||||||
|
constructor(codeqlDist) {
|
||||||
|
this.dist = codeqlDist;
|
||||||
|
this.tools = path.join(this.dist, 'tools');
|
||||||
|
this.cmd = path.join(codeqlDist, 'codeql');
|
||||||
|
// TODO check process.arch ?
|
||||||
|
if (process.platform === 'win32') {
|
||||||
|
this.platform = 'win64';
|
||||||
|
if (this.cmd.endsWith('codeql')) {
|
||||||
|
this.cmd += ".cmd";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (process.platform === 'linux') {
|
||||||
|
this.platform = 'linux64';
|
||||||
|
}
|
||||||
|
else if (process.platform === 'darwin') {
|
||||||
|
this.platform = 'osx64';
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new Error("Unsupported plaform: " + process.platform);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.CodeQLSetup = CodeQLSetup;
|
||||||
|
async function setupCodeQL() {
|
||||||
|
const version = '1.0.0';
|
||||||
|
const codeqlURL = core.getInput('tools', { required: true });
|
||||||
|
try {
|
||||||
|
let codeqlFolder = toolcache.find('CodeQL', version);
|
||||||
|
if (codeqlFolder) {
|
||||||
|
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||||
|
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||||
|
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', version);
|
||||||
|
}
|
||||||
|
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
core.error(e);
|
||||||
|
throw new Error("Unable to download and extract CodeQL CLI");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.setupCodeQL = setupCodeQL;
|
||||||
|
//# sourceMappingURL=setup-tools.js.map
|
||||||
1
lib/setup-tools.js.map
Normal file
1
lib/setup-tools.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"setup-tools.js","sourceRoot":"","sources":["../src/setup-tools.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,+DAAiD;AACjD,2CAA6B;AAE7B,MAAa,WAAW;IAMpB,YAAY,UAAkB;QAC1B,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAC3C,4BAA4B;QAC5B,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAC9B,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;YACxB,IAAI,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBAC7B,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC;aACtB;SACJ;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YACrC,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;SAC7B;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACtC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;SAC3B;aAAM;YACH,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;SAC/D;IACL,CAAC;CACJ;AAxBD,kCAwBC;AAEM,KAAK,UAAU,WAAW;IAC7B,MAAM,OAAO,GAAG,OAAO,CAAC;IACxB,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7D,IAAI;QACA,IAAI,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QACrD,IAAI,YAAY,EAAE;YACd,IAAI,CAAC,KAAK,CAAC,yBAAyB,YAAY,EAAE,CAAC,CAAC;SACvD;aAAM;YACH,MAAM,UAAU,GAAG,MAAM,SAAS,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,eAAe,GAAG,MAAM,SAAS,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC/D,YAAY,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,eAAe,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC;SAC/E;QACD,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAE7D;IAAC,OAAO,CAAC,EAAE;QACR,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACd,MAAM,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC;KAChE;AACL,CAAC;AAnBD,kCAmBC"}
|
||||||
216
lib/setup-tracer.js
generated
Normal file
216
lib/setup-tracer.js
generated
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
|
const exec = __importStar(require("@actions/exec"));
|
||||||
|
const io = __importStar(require("@actions/io"));
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||||
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
|
const setuptools = __importStar(require("./setup-tools"));
|
||||||
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
|
const util = __importStar(require("./util"));
|
||||||
|
const CRITICAL_TRACER_VARS = new Set(['SEMMLE_PRELOAD_libtrace',
|
||||||
|
,
|
||||||
|
'SEMMLE_RUNNER',
|
||||||
|
,
|
||||||
|
'SEMMLE_COPY_EXECUTABLES_ROOT',
|
||||||
|
,
|
||||||
|
'SEMMLE_DEPTRACE_SOCKET',
|
||||||
|
,
|
||||||
|
'SEMMLE_JAVA_TOOL_OPTIONS'
|
||||||
|
]);
|
||||||
|
async function tracerConfig(codeql, database, compilerSpec) {
|
||||||
|
const compilerSpecArg = compilerSpec ? ["--compiler-spec=" + compilerSpec] : [];
|
||||||
|
let envFile = path.resolve(database, 'working', 'env.tmp');
|
||||||
|
await exec.exec(codeql.cmd, ['database', 'trace-command', database,
|
||||||
|
...compilerSpecArg,
|
||||||
|
process.execPath, path.resolve(__dirname, 'tracer-env.js'), envFile]);
|
||||||
|
const env = JSON.parse(fs.readFileSync(envFile, 'utf-8'));
|
||||||
|
const config = env['ODASA_TRACER_CONFIGURATION'];
|
||||||
|
const info = { spec: config, env: {} };
|
||||||
|
// Extract critical tracer variables from the environment
|
||||||
|
for (let entry of Object.entries(env)) {
|
||||||
|
const key = entry[0];
|
||||||
|
const value = entry[1];
|
||||||
|
// skip ODASA_TRACER_CONFIGURATION as it is handled separately
|
||||||
|
if (key === 'ODASA_TRACER_CONFIGURATION') {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// skip undefined values
|
||||||
|
if (typeof value === 'undefined') {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// Keep variables that do not exist in current environment. In addition always keep
|
||||||
|
// critical and CODEQL_ variables
|
||||||
|
if (typeof process.env[key] === 'undefined' || CRITICAL_TRACER_VARS.has(key) || key.startsWith('CODEQL_')) {
|
||||||
|
info.env[key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return info;
|
||||||
|
}
|
||||||
|
function concatTracerConfigs(configs) {
|
||||||
|
// A tracer config is a map containing additional environment variables and a tracer 'spec' file.
|
||||||
|
// A tracer 'spec' file has the following format [log_file, number_of_blocks, blocks_text]
|
||||||
|
// Merge the environments
|
||||||
|
const env = {};
|
||||||
|
let copyExecutables = false;
|
||||||
|
let envSize = 0;
|
||||||
|
for (let v of Object.values(configs)) {
|
||||||
|
for (let e of Object.entries(v.env)) {
|
||||||
|
const name = e[0];
|
||||||
|
const value = e[1];
|
||||||
|
// skip SEMMLE_COPY_EXECUTABLES_ROOT as it is handled separately
|
||||||
|
if (name === 'SEMMLE_COPY_EXECUTABLES_ROOT') {
|
||||||
|
copyExecutables = true;
|
||||||
|
}
|
||||||
|
else if (name in env) {
|
||||||
|
if (env[name] !== value) {
|
||||||
|
throw Error('Incompatible values in environment parameter ' +
|
||||||
|
name + ': ' + env[name] + ' and ' + value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
env[name] = value;
|
||||||
|
envSize += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Concatenate spec files into a new spec file
|
||||||
|
let languages = Object.keys(configs);
|
||||||
|
const cppIndex = languages.indexOf('cpp');
|
||||||
|
// Make sure cpp is the last language, if it's present since it must be concatenated last
|
||||||
|
if (cppIndex !== -1) {
|
||||||
|
let lastLang = languages[languages.length - 1];
|
||||||
|
languages[languages.length - 1] = languages[cppIndex];
|
||||||
|
languages[cppIndex] = lastLang;
|
||||||
|
}
|
||||||
|
let totalLines = [];
|
||||||
|
let totalCount = 0;
|
||||||
|
for (let lang of languages) {
|
||||||
|
const lines = fs.readFileSync(configs[lang].spec, 'utf8').split(/\r?\n/);
|
||||||
|
const count = parseInt(lines[1], 10);
|
||||||
|
totalCount += count;
|
||||||
|
totalLines.push(...lines.slice(2));
|
||||||
|
}
|
||||||
|
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||||
|
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
|
||||||
|
const spec = path.resolve(tempFolder, 'compound-spec');
|
||||||
|
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
|
||||||
|
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
|
||||||
|
if (copyExecutables) {
|
||||||
|
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
|
||||||
|
envSize += 1;
|
||||||
|
}
|
||||||
|
fs.writeFileSync(spec, newSpecContent.join('\n'));
|
||||||
|
// Prepare the content of the compound environment file
|
||||||
|
let buffer = Buffer.alloc(4);
|
||||||
|
buffer.writeInt32LE(envSize, 0);
|
||||||
|
for (let e of Object.entries(env)) {
|
||||||
|
const key = e[0];
|
||||||
|
const value = e[1];
|
||||||
|
const lineBuffer = new Buffer(key + '=' + value + '\0', 'utf8');
|
||||||
|
const sizeBuffer = Buffer.alloc(4);
|
||||||
|
sizeBuffer.writeInt32LE(lineBuffer.length, 0);
|
||||||
|
buffer = Buffer.concat([buffer, sizeBuffer, lineBuffer]);
|
||||||
|
}
|
||||||
|
// Write the compound environment
|
||||||
|
const envPath = spec + '.environment';
|
||||||
|
fs.writeFileSync(envPath, buffer);
|
||||||
|
return { env, spec };
|
||||||
|
}
|
||||||
|
async function run() {
|
||||||
|
try {
|
||||||
|
if (util.should_abort('init', false) || !await util.reportActionStarting('init')) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// The config file MUST be parsed in the init action
|
||||||
|
const config = await configUtils.loadConfig();
|
||||||
|
core.startGroup('Load language configuration');
|
||||||
|
const languages = await util.getLanguages();
|
||||||
|
// If the languages parameter was not given and no languages were
|
||||||
|
// detected then fail here as this is a workflow configuration error.
|
||||||
|
if (languages.length === 0) {
|
||||||
|
core.setFailed("Did not detect any languages to analyze. Please update input in workflow.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
core.endGroup();
|
||||||
|
analysisPaths.includeAndExcludeAnalysisPaths(config, languages);
|
||||||
|
const sourceRoot = path.resolve();
|
||||||
|
core.startGroup('Setup CodeQL tools');
|
||||||
|
const codeqlSetup = await setuptools.setupCodeQL();
|
||||||
|
await exec.exec(codeqlSetup.cmd, ['version', '--format=json']);
|
||||||
|
core.endGroup();
|
||||||
|
// Forward Go flags
|
||||||
|
const goFlags = process.env['GOFLAGS'];
|
||||||
|
if (goFlags) {
|
||||||
|
core.exportVariable('GOFLAGS', goFlags);
|
||||||
|
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
|
||||||
|
}
|
||||||
|
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||||
|
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
||||||
|
core.exportVariable('CODEQL_RAM', codeqlRam);
|
||||||
|
const databaseFolder = path.resolve(util.getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
|
||||||
|
await io.mkdirP(databaseFolder);
|
||||||
|
let tracedLanguages = {};
|
||||||
|
let scannedLanguages = [];
|
||||||
|
// TODO: replace this code once CodeQL supports multi-language tracing
|
||||||
|
for (let language of languages) {
|
||||||
|
const languageDatabase = path.join(databaseFolder, language);
|
||||||
|
// Init language database
|
||||||
|
await exec.exec(codeqlSetup.cmd, ['database', 'init', languageDatabase, '--language=' + language, '--source-root=' + sourceRoot]);
|
||||||
|
// TODO: add better detection of 'traced languages' instead of using a hard coded list
|
||||||
|
if (['cpp', 'java', 'csharp'].includes(language)) {
|
||||||
|
const config = await tracerConfig(codeqlSetup, languageDatabase);
|
||||||
|
tracedLanguages[language] = config;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
scannedLanguages.push(language);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const tracedLanguageKeys = Object.keys(tracedLanguages);
|
||||||
|
if (tracedLanguageKeys.length > 0) {
|
||||||
|
const mainTracerConfig = concatTracerConfigs(tracedLanguages);
|
||||||
|
if (mainTracerConfig.spec) {
|
||||||
|
for (let entry of Object.entries(mainTracerConfig.env)) {
|
||||||
|
core.exportVariable(entry[0], entry[1]);
|
||||||
|
}
|
||||||
|
core.exportVariable('ODASA_TRACER_CONFIGURATION', mainTracerConfig.spec);
|
||||||
|
if (process.platform === 'darwin') {
|
||||||
|
core.exportVariable('DYLD_INSERT_LIBRARIES', path.join(codeqlSetup.tools, 'osx64', 'libtrace.dylib'));
|
||||||
|
}
|
||||||
|
else if (process.platform === 'win32') {
|
||||||
|
await exec.exec('powershell', [path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
||||||
|
path.resolve(codeqlSetup.tools, 'win64', 'tracer.exe')], { env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.exportVariable('LD_PRELOAD', path.join(codeqlSetup.tools, 'linux64', '${LIB}trace.so'));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
core.exportVariable(sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES, scannedLanguages.join(','));
|
||||||
|
core.exportVariable(sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES, tracedLanguageKeys.join(','));
|
||||||
|
// TODO: make this a "private" environment variable of the action
|
||||||
|
core.exportVariable(sharedEnv.CODEQL_ACTION_DATABASE_DIR, databaseFolder);
|
||||||
|
core.exportVariable(sharedEnv.CODEQL_ACTION_CMD, codeqlSetup.cmd);
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
core.setFailed(error.message);
|
||||||
|
await util.reportActionFailed('init', error.message, error.stack);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await util.reportActionSucceeded('init');
|
||||||
|
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||||
|
}
|
||||||
|
run().catch(e => {
|
||||||
|
core.setFailed("init action failed: " + e);
|
||||||
|
console.log(e);
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=setup-tracer.js.map
|
||||||
1
lib/setup-tracer.js.map
Normal file
1
lib/setup-tracer.js.map
Normal file
File diff suppressed because one or more lines are too long
12
lib/shared-environment.js
generated
12
lib/shared-environment.js
generated
@@ -1,10 +1,18 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
exports.CODEQL_ACTION_CMD = 'CODEQL_ACTION_CMD';
|
||||||
|
exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
|
||||||
|
exports.CODEQL_ACTION_LANGUAGES = 'CODEQL_ACTION_LANGUAGES';
|
||||||
|
exports.CODEQL_ACTION_ANALYSIS_KEY = 'CODEQL_ACTION_ANALYSIS_KEY';
|
||||||
|
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
|
||||||
|
exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
|
||||||
|
exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
||||||
// The time at which the first action (normally init) started executing.
|
// The time at which the first action (normally init) started executing.
|
||||||
// If a workflow invokes a different action without first invoking the init
|
// If a workflow invokes a different action without first invoking the init
|
||||||
// action (i.e. the upload action is being used by a third-party integrator)
|
// action (i.e. the upload action is being used by a third-party integrator)
|
||||||
// then this variable will be assigned the start time of the action invoked
|
// then this variable will be assigned the start time of the action invoked
|
||||||
// rather that the init action.
|
// rather that the init action.
|
||||||
exports.CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
|
exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
|
||||||
|
// Populated when the init action completes successfully
|
||||||
|
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
|
||||||
//# sourceMappingURL=shared-environment.js.map
|
//# sourceMappingURL=shared-environment.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"}
|
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,iBAAiB,GAAG,mBAAmB,CAAC;AACxC,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,uBAAuB,GAAG,yBAAyB,CAAC;AACpD,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,+BAA+B,GAAG,iCAAiC,CAAC;AACpE,QAAA,8BAA8B,GAAG,gCAAgC,CAAC;AAC/E,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,wBAAwB,GAAG,0BAA0B,CAAC;AACnE,wDAAwD;AAC3C,QAAA,4BAA4B,GAAG,8BAA8B,CAAC"}
|
||||||
75
lib/testing-utils.js
generated
75
lib/testing-utils.js
generated
@@ -1,75 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
|
||||||
const CodeQL = __importStar(require("./codeql"));
|
|
||||||
function wrapOutput(context) {
|
|
||||||
// Function signature taken from Socket.write.
|
|
||||||
// Note there are two overloads:
|
|
||||||
// write(buffer: Uint8Array | string, cb?: (err?: Error) => void): boolean;
|
|
||||||
// write(str: Uint8Array | string, encoding?: string, cb?: (err?: Error) => void): boolean;
|
|
||||||
return (chunk, encoding, cb) => {
|
|
||||||
// Work out which method overload we are in
|
|
||||||
if (cb === undefined && typeof encoding === "function") {
|
|
||||||
cb = encoding;
|
|
||||||
encoding = undefined;
|
|
||||||
}
|
|
||||||
// Record the output
|
|
||||||
if (typeof chunk === "string") {
|
|
||||||
context.testOutput += chunk;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
context.testOutput += new TextDecoder(encoding || "utf-8").decode(chunk);
|
|
||||||
}
|
|
||||||
// Satisfy contract by calling callback when done
|
|
||||||
if (cb !== undefined && typeof cb === "function") {
|
|
||||||
cb();
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
function setupTests(test) {
|
|
||||||
const typedTest = test;
|
|
||||||
typedTest.beforeEach((t) => {
|
|
||||||
// Set an empty CodeQL object so that all method calls will fail
|
|
||||||
// unless the test explicitly sets one up.
|
|
||||||
CodeQL.setCodeQL({});
|
|
||||||
// Replace stdout and stderr so we can record output during tests
|
|
||||||
t.context.testOutput = "";
|
|
||||||
const processStdoutWrite = process.stdout.write.bind(process.stdout);
|
|
||||||
t.context.stdoutWrite = processStdoutWrite;
|
|
||||||
process.stdout.write = wrapOutput(t.context);
|
|
||||||
const processStderrWrite = process.stderr.write.bind(process.stderr);
|
|
||||||
t.context.stderrWrite = processStderrWrite;
|
|
||||||
process.stderr.write = wrapOutput(t.context);
|
|
||||||
// Many tests modify environment variables. Take a copy now so that
|
|
||||||
// we reset them after the test to keep tests independent of each other.
|
|
||||||
// process.env only has strings fields, so a shallow copy is fine.
|
|
||||||
t.context.env = {};
|
|
||||||
Object.assign(t.context.env, process.env);
|
|
||||||
});
|
|
||||||
typedTest.afterEach.always((t) => {
|
|
||||||
// Restore stdout and stderr
|
|
||||||
// The captured output is only replayed if the test failed
|
|
||||||
process.stdout.write = t.context.stdoutWrite;
|
|
||||||
process.stderr.write = t.context.stderrWrite;
|
|
||||||
if (!t.passed) {
|
|
||||||
process.stdout.write(t.context.testOutput);
|
|
||||||
}
|
|
||||||
// Undo any modifications made by sinon
|
|
||||||
sinon_1.default.restore();
|
|
||||||
// Undo any modifications to the env
|
|
||||||
process.env = t.context.env;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.setupTests = setupTests;
|
|
||||||
//# sourceMappingURL=testing-utils.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AACA,kDAA0B;AAE1B,iDAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,eAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC"}
|
|
||||||
87
lib/toolrunner-error-catcher.js
generated
87
lib/toolrunner-error-catcher.js
generated
@@ -1,87 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
|
||||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
|
||||||
/**
|
|
||||||
* Wrapper for toolrunner.Toolrunner which checks for specific return code and/or regex matches in console output.
|
|
||||||
* Output will be streamed to the live console as well as captured for subsequent processing.
|
|
||||||
* Returns promise with return code
|
|
||||||
*
|
|
||||||
* @param commandLine command to execute
|
|
||||||
* @param args optional arguments for tool. Escaping is handled by the lib.
|
|
||||||
* @param matchers defines specific codes and/or regexes that should lead to return of a custom error
|
|
||||||
* @param options optional exec options. See ExecOptions
|
|
||||||
* @returns Promise<number> exit code
|
|
||||||
*/
|
|
||||||
async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
|
|
||||||
var _a, _b, _c;
|
|
||||||
let stdout = "";
|
|
||||||
let stderr = "";
|
|
||||||
const listeners = {
|
|
||||||
stdout: (data) => {
|
|
||||||
var _a, _b;
|
|
||||||
stdout += data.toString();
|
|
||||||
if (((_b = (_a = options) === null || _a === void 0 ? void 0 : _a.listeners) === null || _b === void 0 ? void 0 : _b.stdout) !== undefined) {
|
|
||||||
options.listeners.stdout(data);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// if no stdout listener was originally defined then we match default behavior of Toolrunner
|
|
||||||
process.stdout.write(data);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
stderr: (data) => {
|
|
||||||
var _a, _b;
|
|
||||||
stderr += data.toString();
|
|
||||||
if (((_b = (_a = options) === null || _a === void 0 ? void 0 : _a.listeners) === null || _b === void 0 ? void 0 : _b.stderr) !== undefined) {
|
|
||||||
options.listeners.stderr(data);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// if no stderr listener was originally defined then we match default behavior of Toolrunner
|
|
||||||
process.stderr.write(data);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
|
||||||
// we capture the original return code or error so that if no match is found we can duplicate the behavior
|
|
||||||
let returnState;
|
|
||||||
try {
|
|
||||||
returnState = await new toolrunner.ToolRunner(await safeWhich.safeWhich(commandLine), args, {
|
|
||||||
...options,
|
|
||||||
listeners,
|
|
||||||
ignoreReturnCode: true,
|
|
||||||
}).exec();
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
returnState = e;
|
|
||||||
}
|
|
||||||
// if there is a zero return code then we do not apply the matchers
|
|
||||||
if (returnState === 0)
|
|
||||||
return returnState;
|
|
||||||
if (matchers) {
|
|
||||||
for (const matcher of matchers) {
|
|
||||||
if (matcher.exitCode === returnState || ((_a = matcher.outputRegex) === null || _a === void 0 ? void 0 : _a.test(stderr)) || ((_b = matcher.outputRegex) === null || _b === void 0 ? void 0 : _b.test(stdout))) {
|
|
||||||
throw new Error(matcher.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (typeof returnState === "number") {
|
|
||||||
// only if we were instructed to ignore the return code do we ever return it non-zero
|
|
||||||
if ((_c = options) === null || _c === void 0 ? void 0 : _c.ignoreReturnCode) {
|
|
||||||
return returnState;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw new Error(`The process '${commandLine}' failed with exit code ${returnState}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw returnState;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.toolrunnerErrorCatcher = toolrunnerErrorCatcher;
|
|
||||||
//# sourceMappingURL=toolrunner-error-catcher.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,aAAA,OAAO,0CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,aAAA,OAAO,0CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI;SACvB,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,CAAC;KACjB;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW,WAChC,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,EAAC,WACjC,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,EAAC,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,UAAI,OAAO,0CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAzED,wDAyEC"}
|
|
||||||
145
lib/toolrunner-error-catcher.test.js
generated
145
lib/toolrunner-error-catcher.test.js
generated
@@ -1,145 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const exec = __importStar(require("@actions/exec"));
|
|
||||||
const ava_1 = __importDefault(require("ava"));
|
|
||||||
const testing_utils_1 = require("./testing-utils");
|
|
||||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
|
||||||
ava_1.default("matchers are never applied if non-error exit", async (t) => {
|
|
||||||
const testArgs = buildDummyArgs("foo bar\\nblort qux", "foo bar\\nblort qux", "", 0);
|
|
||||||
const matchers = [
|
|
||||||
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "error!!!" },
|
|
||||||
];
|
|
||||||
t.deepEqual(await exec.exec("node", testArgs), 0);
|
|
||||||
t.deepEqual(await toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), 0);
|
|
||||||
});
|
|
||||||
ava_1.default("regex matchers are applied to stdout for non-zero exit code", async (t) => {
|
|
||||||
const testArgs = buildDummyArgs("foo bar\\nblort qux", "", "", 1);
|
|
||||||
const matchers = [
|
|
||||||
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
|
|
||||||
];
|
|
||||||
await t.throwsAsync(exec.exec("node", testArgs), {
|
|
||||||
instanceOf: Error,
|
|
||||||
message: "The process 'node' failed with exit code 1",
|
|
||||||
});
|
|
||||||
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
|
|
||||||
instanceOf: Error,
|
|
||||||
message: "🦄",
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("regex matchers are applied to stderr for non-zero exit code", async (t) => {
|
|
||||||
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
|
|
||||||
const matchers = [
|
|
||||||
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
|
|
||||||
];
|
|
||||||
await t.throwsAsync(exec.exec("node", testArgs), {
|
|
||||||
instanceOf: Error,
|
|
||||||
message: "The process 'node' failed with exit code 1",
|
|
||||||
});
|
|
||||||
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
|
|
||||||
instanceOf: Error,
|
|
||||||
message: "🦄",
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("matcher returns correct error message when multiple matchers defined", async (t) => {
|
|
||||||
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
|
|
||||||
const matchers = [
|
|
||||||
{ exitCode: 456, outputRegex: new RegExp("lorem ipsum"), message: "😩" },
|
|
||||||
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
|
|
||||||
{ exitCode: 789, outputRegex: new RegExp("blah blah"), message: "🤦♂️" },
|
|
||||||
];
|
|
||||||
await t.throwsAsync(exec.exec("node", testArgs), {
|
|
||||||
instanceOf: Error,
|
|
||||||
message: "The process 'node' failed with exit code 1",
|
|
||||||
});
|
|
||||||
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
|
|
||||||
instanceOf: Error,
|
|
||||||
message: "🦄",
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("matcher returns first match to regex when multiple matches", async (t) => {
|
|
||||||
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
|
|
||||||
const matchers = [
|
|
||||||
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
|
|
||||||
{ exitCode: 789, outputRegex: new RegExp("blah blah"), message: "🤦♂️" },
|
|
||||||
{ exitCode: 987, outputRegex: new RegExp("foo bar"), message: "🚫" },
|
|
||||||
];
|
|
||||||
await t.throwsAsync(exec.exec("node", testArgs), {
|
|
||||||
instanceOf: Error,
|
|
||||||
message: "The process 'node' failed with exit code 1",
|
|
||||||
});
|
|
||||||
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
|
|
||||||
instanceOf: Error,
|
|
||||||
message: "🦄",
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("exit code matchers are applied", async (t) => {
|
|
||||||
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 123);
|
|
||||||
const matchers = [
|
|
||||||
{
|
|
||||||
exitCode: 123,
|
|
||||||
outputRegex: new RegExp("this will not match"),
|
|
||||||
message: "🦄",
|
|
||||||
},
|
|
||||||
];
|
|
||||||
await t.throwsAsync(exec.exec("node", testArgs), {
|
|
||||||
instanceOf: Error,
|
|
||||||
message: "The process 'node' failed with exit code 123",
|
|
||||||
});
|
|
||||||
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
|
|
||||||
instanceOf: Error,
|
|
||||||
message: "🦄",
|
|
||||||
});
|
|
||||||
});
|
|
||||||
ava_1.default("execErrorCatcher respects the ignoreReturnValue option", async (t) => {
|
|
||||||
const testArgs = buildDummyArgs("standard output", "error output", "", 199);
|
|
||||||
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, [], { ignoreReturnCode: false }), { instanceOf: Error });
|
|
||||||
t.deepEqual(await toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, [], {
|
|
||||||
ignoreReturnCode: true,
|
|
||||||
}), 199);
|
|
||||||
});
|
|
||||||
ava_1.default("execErrorCatcher preserves behavior of provided listeners", async (t) => {
|
|
||||||
const stdoutExpected = "standard output";
|
|
||||||
const stderrExpected = "error output";
|
|
||||||
let stdoutActual = "";
|
|
||||||
let stderrActual = "";
|
|
||||||
const listeners = {
|
|
||||||
stdout: (data) => {
|
|
||||||
stdoutActual += data.toString();
|
|
||||||
},
|
|
||||||
stderr: (data) => {
|
|
||||||
stderrActual += data.toString();
|
|
||||||
},
|
|
||||||
};
|
|
||||||
const testArgs = buildDummyArgs(stdoutExpected, stderrExpected, "", 0);
|
|
||||||
t.deepEqual(await toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, [], {
|
|
||||||
listeners,
|
|
||||||
}), 0);
|
|
||||||
t.deepEqual(stdoutActual, `${stdoutExpected}\n`);
|
|
||||||
t.deepEqual(stderrActual, `${stderrExpected}\n`);
|
|
||||||
});
|
|
||||||
function buildDummyArgs(stdoutContents, stderrContents, desiredErrorMessage, desiredExitCode) {
|
|
||||||
let command = "";
|
|
||||||
if (stdoutContents)
|
|
||||||
command += `console.log("${stdoutContents}");`;
|
|
||||||
if (stderrContents)
|
|
||||||
command += `console.error("${stderrContents}");`;
|
|
||||||
if (command.length === 0)
|
|
||||||
throw new Error("Must provide contents for either stdout or stderr");
|
|
||||||
if (desiredErrorMessage)
|
|
||||||
command += `throw new Error("${desiredErrorMessage}");`;
|
|
||||||
if (desiredExitCode)
|
|
||||||
command += `process.exitCode = ${desiredExitCode};`;
|
|
||||||
return ["-e", command];
|
|
||||||
}
|
|
||||||
//# sourceMappingURL=toolrunner-error-catcher.test.js.map
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user