Compare commits

..

11 Commits

Author SHA1 Message Date
Aditya Sharad
2431162ee9 Autobuild: Prefix invocations with CODEQL_RUNNER 2021-12-07 18:42:59 -08:00
Aditya Sharad
fef9ef14e5 Create csharp.log 2021-12-07 15:27:55 -08:00
Aditya Sharad
53a6841dab Upload debug artifacts 2021-12-07 15:20:40 -08:00
Aditya Sharad
05067c8c64 Try setting CODEQL_EXTRACTOR_CSHARP_LOG_DIR 2021-12-07 15:18:58 -08:00
Aditya Sharad
2882de5343 Clean up workflow 2021-12-07 15:09:31 -08:00
Aditya Sharad
0193961814 Add call to env 2021-12-07 15:08:09 -08:00
Aditya Sharad
cf6bffdeea Update pr-checks.yml 2021-12-07 14:46:49 -08:00
Aditya Sharad
f495010edd Matrix over Mac versions 2021-12-07 14:41:50 -08:00
Aditya Sharad
1264b28baa Update pr-checks.yml 2021-12-07 14:33:54 -08:00
Andrew Eisenberg
e811ebfbb0 This should put the action into debug mode 2021-12-07 13:51:37 -08:00
Andrew Eisenberg
458b30ebc6 Initialize dotnet in pr-check 2021-12-07 13:01:50 -08:00
2182 changed files with 142507 additions and 62565 deletions

View File

@@ -10,8 +10,7 @@
"plugin:@typescript-eslint/recommended",
"plugin:@typescript-eslint/recommended-requiring-type-checking",
"plugin:github/recommended",
"plugin:github/typescript",
"plugin:import/typescript"
"plugin:github/typescript"
],
"rules": {
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],

View File

@@ -28,6 +28,7 @@ runs:
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz"
elif [[ ${{ inputs.version }} == *"stable"* ]]; then
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
echo "Hello $VERSION"
echo "::set-output name=tools-url::https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz"
elif [[ ${{ inputs.version }} == "latest" ]]; then
echo "::set-output name=tools-url::latest"

View File

@@ -1,9 +1,12 @@
import argparse
import datetime
from github import Github
import json
import os
import random
import requests
import subprocess
import sys
import json
import datetime
import os
EMPTY_CHANGELOG = """# CodeQL Action and CodeQL Runner Changelog
@@ -13,25 +16,21 @@ No user facing changes.
"""
# Value of the mode flag for a v1 release
V1_MODE = 'v1-release'
# Value of the mode flag for a v2 release
V2_MODE = 'v2-release'
SOURCE_BRANCH_FOR_MODE = { V1_MODE: 'releases/v2', V2_MODE: 'main' }
TARGET_BRANCH_FOR_MODE = { V1_MODE: 'releases/v1', V2_MODE: 'releases/v2' }
# The branch being merged from.
# This is the one that contains day-to-day development work.
MAIN_BRANCH = 'main'
# The branch being merged into.
# This is the release branch that users reference.
LATEST_RELEASE_BRANCH = 'v1'
# Name of the remote
ORIGIN = 'origin'
# Runs git with the given args and returns the stdout.
# Raises an error if git does not exit successfully (unless passed
# allow_non_zero_exit_code=True).
def run_git(*args, allow_non_zero_exit_code=False):
# Raises an error if git does not exit successfully.
def run_git(*args):
cmd = ['git', *args]
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if not allow_non_zero_exit_code and p.returncode != 0:
if (p.returncode != 0):
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
return p.stdout.decode('ascii')
@@ -39,10 +38,8 @@ def run_git(*args, allow_non_zero_exit_code=False):
def branch_exists_on_remote(branch_name):
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
# Opens a PR from the given branch to the target branch
def open_pr(
repo, all_commits, source_branch_short_sha, new_branch_name, source_branch, target_branch,
conductor, is_v2_release, labels, conflicted_files):
# Opens a PR from the given branch to the release branch
def open_pr(repo, all_commits, short_main_sha, branch_name):
# Sort the commits into the pull requests that introduced them,
# and any commits that don't have a pull request
pull_requests = []
@@ -64,8 +61,9 @@ def open_pr(
# Start constructing the body text
body = []
body.append('Merging ' + source_branch_short_sha + ' into ' + target_branch)
body.append('Merging ' + short_main_sha + ' into ' + LATEST_RELEASE_BRANCH)
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
body.append('')
body.append('Conductor for this PR is @' + conductor)
@@ -82,46 +80,43 @@ def open_pr(
body.append('')
body.append('Contains the following commits not from a pull request:')
for commit in commits_without_pull_requests:
author_description = ' (@' + commit.author.login + ')' if commit.author is not None else ''
body.append('- ' + commit.sha + ' - ' + get_truncated_commit_message(commit) + author_description)
body.append('- ' + commit.sha + ' - ' + get_truncated_commit_message(commit) + ' (@' + commit.author.login + ')')
body.append('')
body.append('Please review the following:')
if len(conflicted_files) > 0:
body.append(' - [ ] You have added commits to this branch that resolve the merge conflicts ' +
'in the following files:')
body.extend([f' - [ ] `{file}`' for file in conflicted_files])
body.append(' - [ ] Another maintainer has reviewed the additional commits you added to this ' +
'branch to resolve the merge conflicts.')
body.append(' - [ ] The CHANGELOG displays the correct version and date.')
body.append(' - [ ] The CHANGELOG includes all relevant, user-facing changes since the last release.')
body.append(' - [ ] There are no unexpected commits being merged into the ' + target_branch + ' branch.')
body.append(' - [ ] There are no unexpected commits being merged into the ' + LATEST_RELEASE_BRANCH + ' branch.')
body.append(' - [ ] The docs team is aware of any documentation changes that need to be released.')
if is_v2_release:
body.append(' - [ ] The mergeback PR is merged back into ' + source_branch + ' after this PR is merged.')
body.append(' - [ ] The v1 release PR is merged after this PR is merged.')
body.append(' - [ ] The mergeback PR is merged back into ' + MAIN_BRANCH + ' after this PR is merged.')
title = 'Merge ' + source_branch + ' into ' + target_branch
title = 'Merge ' + MAIN_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
# Create the pull request
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft so that
# a maintainer can take the PR out of draft, thereby triggering the PR checks.
pr = repo.create_pull(title=title, body='\n'.join(body), head=new_branch_name, base=target_branch, draft=True)
pr.add_to_labels(*labels)
pr = repo.create_pull(title=title, body='\n'.join(body), head=branch_name, base=LATEST_RELEASE_BRANCH, draft=True)
print('Created PR #' + str(pr.number))
# Assign the conductor
pr.add_to_assignees(conductor)
print('Assigned PR to ' + conductor)
# Gets a list of the SHAs of all commits that have happened on the source branch
# since the last release to the target branch.
# This will not include any commits that exist on the target branch
# that aren't on the source branch.
def get_commit_difference(repo, source_branch, target_branch):
# Passing split nothing means that the empty string splits to nothing: compare `''.split() == []`
# to `''.split('\n') == ['']`.
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + target_branch + '..' + ORIGIN + '/' + source_branch).strip().split()
# Gets the person who should be in charge of the mergeback PR
def get_conductor(repo, pull_requests, other_commits):
# If there are any PRs then use whoever merged the last one
if len(pull_requests) > 0:
return get_merger_of_pr(repo, pull_requests[-1])
# Otherwise take the author of the latest commit
return other_commits[-1].author.login
# Gets a list of the SHAs of all commits that have happened on main
# since the release branched off.
# This will not include any commits that exist on the release branch
# that aren't on main.
def get_commit_difference(repo):
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '..' + ORIGIN + '/' + MAIN_BRANCH).strip().split('\n')
# Convert to full-fledged commit objects
commits = [repo.get_commit(c) for c in commits]
@@ -141,7 +136,7 @@ def get_truncated_commit_message(commit):
else:
return message
# Converts a commit into the PR that introduced it to the source branch.
# Converts a commit into the PR that introduced it to the main branch.
# Returns the PR object, or None if no PR could be found.
def get_pr_for_commit(repo, commit):
prs = commit.get_pulls()
@@ -184,65 +179,29 @@ def update_changelog(version):
def main():
parser = argparse.ArgumentParser('update-release-branch.py')
if len(sys.argv) != 3:
raise Exception('Usage: update-release.branch.py <github token> <repository nwo>')
github_token = sys.argv[1]
repository_nwo = sys.argv[2]
parser.add_argument(
'--github-token',
type=str,
required=True,
help='GitHub token, typically from GitHub Actions.'
)
parser.add_argument(
'--repository-nwo',
type=str,
required=True,
help='The nwo of the repository, for example github/codeql-action.'
)
parser.add_argument(
'--mode',
type=str,
required=True,
choices=[V2_MODE, V1_MODE],
help=f"Which release to perform. '{V2_MODE}' uses {SOURCE_BRANCH_FOR_MODE[V2_MODE]} as the source " +
f"branch and {TARGET_BRANCH_FOR_MODE[V2_MODE]} as the target branch. " +
f"'{V1_MODE}' uses {SOURCE_BRANCH_FOR_MODE[V1_MODE]} as the source branch and " +
f"{TARGET_BRANCH_FOR_MODE[V1_MODE]} as the target branch."
)
parser.add_argument(
'--conductor',
type=str,
required=True,
help='The GitHub handle of the person who is conducting the release process.'
)
args = parser.parse_args()
source_branch = SOURCE_BRANCH_FOR_MODE[args.mode]
target_branch = TARGET_BRANCH_FOR_MODE[args.mode]
repo = Github(args.github_token).get_repo(args.repository_nwo)
repo = Github(github_token).get_repo(repository_nwo)
version = get_current_version()
if args.mode == V1_MODE:
# Change the version number to a v1 equivalent
version = get_current_version()
version = f'1{version[1:]}'
# Print what we intend to go
print('Considering difference between ' + source_branch + ' and ' + target_branch)
source_branch_short_sha = run_git('rev-parse', '--short', ORIGIN + '/' + source_branch).strip()
print('Current head of ' + source_branch + ' is ' + source_branch_short_sha)
print('Considering difference between ' + MAIN_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
short_main_sha = run_git('rev-parse', '--short', ORIGIN + '/' + MAIN_BRANCH).strip()
print('Current head of ' + MAIN_BRANCH + ' is ' + short_main_sha)
# See if there are any commits to merge in
commits = get_commit_difference(repo=repo, source_branch=source_branch, target_branch=target_branch)
commits = get_commit_difference(repo)
if len(commits) == 0:
print('No commits to merge from ' + source_branch + ' to ' + target_branch)
print('No commits to merge from ' + MAIN_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
return
# The branch name is based off of the name of branch being merged into
# and the SHA of the branch being merged from. Thus if the branch already
# exists we can assume we don't need to recreate it.
new_branch_name = 'update-v' + version + '-' + source_branch_short_sha
new_branch_name = 'update-v' + version + '-' + short_main_sha
print('Branch name is ' + new_branch_name)
# Check if the branch already exists. If so we can abort as this script
@@ -253,90 +212,19 @@ def main():
# Create the new branch and push it to the remote
print('Creating branch ' + new_branch_name)
run_git('checkout', '-b', new_branch_name, ORIGIN + '/' + MAIN_BRANCH)
# The process of creating the v1 release can run into merge conflicts. We commit the unresolved
# conflicts so a maintainer can easily resolve them (vs erroring and requiring maintainers to
# reconstruct the release manually)
conflicted_files = []
print('Updating changelog')
update_changelog(version)
if args.mode == V1_MODE:
# If we're performing a backport, start from the target branch
print(f'Creating {new_branch_name} from the {ORIGIN}/{target_branch} branch')
run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/{target_branch}')
# Revert the commit that we made as part of the last release that updated the version number and
# changelog to refer to 1.x.x variants. This avoids merge conflicts in the changelog and
# package.json files when we merge in the v2 branch.
# This commit will not exist the first time we release the v1 branch from the v2 branch, so we
# use `git log --grep` to conditionally revert the commit.
print('Reverting the 1.x.x version number and changelog updates from the last release to avoid conflicts')
v1_update_commits = run_git('log', '--grep', '^Update version and changelog for v', '--format=%H').split()
if len(v1_update_commits) > 0:
print(f' Reverting {v1_update_commits[0]}')
# Only revert the newest commit as older ones will already have been reverted in previous
# releases.
run_git('revert', v1_update_commits[0], '--no-edit')
# Also revert the "Update checked-in dependencies" commit created by Actions.
update_dependencies_commit = run_git('log', '--grep', '^Update checked-in dependencies', '--format=%H').split()[0]
print(f' Reverting {update_dependencies_commit}')
run_git('revert', update_dependencies_commit, '--no-edit')
else:
print(' Nothing to revert.')
print(f'Merging {ORIGIN}/{source_branch} into the release prep branch')
# Commit any conflicts (see the comment for `conflicted_files`)
run_git('merge', f'{ORIGIN}/{source_branch}', allow_non_zero_exit_code=True)
conflicted_files = run_git('diff', '--name-only', '--diff-filter', 'U').splitlines()
if len(conflicted_files) > 0:
run_git('add', '.')
run_git('commit', '--no-edit')
# Migrate the package version number from a v2 version number to a v1 version number
print(f'Setting version number to {version}')
subprocess.run(['npm', 'version', version, '--no-git-tag-version'])
run_git('add', 'package.json', 'package-lock.json')
# Migrate the changelog notes from v2 version numbers to v1 version numbers
print('Migrating changelog notes from v2 to v1')
subprocess.run(['sed', '-i', 's/^## 2\./## 1./g', 'CHANGELOG.md'])
# Remove changelog notes from v2 that don't apply to v1
subprocess.run(['sed', '-i', '/^- \[v2+ only\]/d', 'CHANGELOG.md'])
# Amend the commit generated by `npm version` to update the CHANGELOG
run_git('add', 'CHANGELOG.md')
run_git('commit', '-m', f'Update version and changelog for v{version}')
else:
# If we're performing a standard release, there won't be any new commits on the target branch,
# as these will have already been merged back into the source branch. Therefore we can just
# start from the source branch.
run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/{source_branch}')
print('Updating changelog')
update_changelog(version)
# Create a commit that updates the CHANGELOG
run_git('add', 'CHANGELOG.md')
run_git('commit', '-m', f'Update changelog for v{version}')
# Create a commit that updates the CHANGELOG
run_git('add', 'CHANGELOG.md')
run_git('commit', '-m', version)
run_git('push', ORIGIN, new_branch_name)
# Open a PR to update the branch
open_pr(
repo,
commits,
source_branch_short_sha,
new_branch_name,
source_branch=source_branch,
target_branch=target_branch,
conductor=args.conductor,
is_v2_release=args.mode == V2_MODE,
labels=['Update dependencies'] if args.mode == V1_MODE else [],
conflicted_files=conflicted_files
)
open_pr(repo, commits, short_main_sha, new_branch_name)
if __name__ == '__main__':
main()

View File

@@ -1,95 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: "PR Check - Analyze: 'ref' and 'sha' from inputs"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
analyze-ref-input:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210308
- os: macos-latest
version: stable-20210308
- os: windows-2019
version: stable-20210308
- os: ubuntu-latest
version: stable-20210319
- os: macos-latest
version: stable-20210319
- os: windows-2019
version: stable-20210319
- os: ubuntu-latest
version: stable-20210809
- os: macos-latest
version: stable-20210809
- os: windows-2019
version: stable-20210809
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-2019
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-2019
version: latest
- os: windows-2022
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-2019
version: nightly-latest
- os: windows-2022
version: nightly-latest
name: "Analyze: 'ref' and 'sha' from inputs"
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
env:
TEST_MODE: true
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,56 +23,38 @@ jobs:
debug-artifacts:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210308
- os: macos-latest
version: stable-20210308
- os: ubuntu-latest
version: stable-20210319
- os: macos-latest
version: stable-20210319
- os: ubuntu-latest
version: stable-20210809
- os: macos-latest
version: stable-20210809
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
version:
- stable-20201028
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os: [ubuntu-latest, macos-latest]
name: Debug artifact upload
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Initialize dotnet
run: dotnet restore
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
debug: true
debug-artifact-name: my-debug-artifacts
debug-database-name: my-db
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
id: analysis
- uses: actions/download-artifact@v3
- uses: actions/download-artifact@v2
with:
name: my-debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
name: debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
- shell: bash
run: |
LANGUAGES="cpp csharp go java javascript python"
@@ -83,7 +64,7 @@ jobs:
echo "Missing a SARIF file for $language"
exit 1
fi
if [[ ! -f "my-db-$language.zip" ]] ; then
if [[ ! -f "$language.zip" ]] ; then
echo "Missing a database bundle for $language"
exit 1
fi

View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,15 +23,13 @@ jobs:
extractor-ram-threads:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
version: [latest]
os: [ubuntu-latest]
name: Extractor ram and threads options test
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test

View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,59 +23,30 @@ jobs:
go-custom-queries:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210308
- os: macos-latest
version: stable-20210308
- os: windows-2019
version: stable-20210308
- os: ubuntu-latest
version: stable-20210319
- os: macos-latest
version: stable-20210319
- os: windows-2019
version: stable-20210319
- os: ubuntu-latest
version: stable-20210809
- os: macos-latest
version: stable-20210809
- os: windows-2019
version: stable-20210809
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-2019
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-2019
version: latest
- os: windows-2022
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-2019
version: nightly-latest
- os: windows-2022
version: nightly-latest
version:
- stable-20201028
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os:
- ubuntu-latest
- macos-latest
- windows-latest
name: 'Go: Custom queries'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: actions/setup-go@v3
- name: Initialize dotnet
run: dotnet restore
- uses: actions/setup-go@v2
with:
go-version: ^1.13.1
- uses: ./../action/init

View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,43 +23,25 @@ jobs:
go-custom-tracing-autobuild:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210308
- os: macos-latest
version: stable-20210308
- os: ubuntu-latest
version: stable-20210319
- os: macos-latest
version: stable-20210319
- os: ubuntu-latest
version: stable-20210809
- os: macos-latest
version: stable-20210809
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
version:
- stable-20201028
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os: [ubuntu-latest, macos-latest]
name: 'Go: Autobuild custom tracing'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: actions/setup-go@v3
- uses: actions/setup-go@v2
with:
go-version: ^1.13.1
- uses: ./../action/init

View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,59 +23,28 @@ jobs:
go-custom-tracing:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210308
- os: macos-latest
version: stable-20210308
- os: windows-2019
version: stable-20210308
- os: ubuntu-latest
version: stable-20210319
- os: macos-latest
version: stable-20210319
- os: windows-2019
version: stable-20210319
- os: ubuntu-latest
version: stable-20210809
- os: macos-latest
version: stable-20210809
- os: windows-2019
version: stable-20210809
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-2019
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-2019
version: latest
- os: windows-2022
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-2019
version: nightly-latest
- os: windows-2022
version: nightly-latest
version:
- stable-20201028
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os:
- ubuntu-latest
- macos-latest
- windows-latest
name: 'Go: Custom tracing'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: actions/setup-go@v3
- uses: actions/setup-go@v2
with:
go-version: ^1.13.1
- uses: ./../action/init

View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,19 +23,13 @@ jobs:
javascript-source-root:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: ubuntu-latest
version: cached
- os: ubuntu-latest
version: nightly-latest
version: [latest, cached, nightly-latest] # This feature is not compatible with old CLIs
os: [ubuntu-latest]
name: Custom source root
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test

View File

@@ -1,129 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - ML-powered queries
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
ml-powered-queries:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20220120
- os: macos-latest
version: stable-20220120
- os: windows-latest
version: stable-20220120
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-latest
version: nightly-latest
name: ML-powered queries
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
languages: javascript
queries: security-extended
source-root: ./../action/tests/ml-powered-queries-repo
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
upload-database: false
env:
TEST_MODE: true
- name: Upload SARIF
uses: actions/upload-artifact@v3
with:
name: ml-powered-queries-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
retention-days: 7
- name: Check results
# Running ML-powered queries on Windows requires CodeQL CLI 2.9.0+. We don't run these checks
# against Windows and `cached` while CodeQL CLI 2.9.0 makes its way into `cached` to avoid the
# test starting to fail when the cached CodeQL Bundle gets updated. Once the CodeQL Bundle
# containing CodeQL CLI 2.9.0 has been fully released, we can drop this line and start running
# these checks on Windows and `cached`.
if: matrix.os != 'windows-latest' || matrix.version != 'cached'
env:
# Running on Windows requires CodeQL CLI 2.9.0+, which has so far only made it to 'latest'.
SHOULD_RUN_ML_POWERED_QUERIES: ${{ matrix.os != 'windows-latest' || matrix.version
== 'latest' || matrix.version == 'nightly-latest' }}
shell: bash
run: |
echo "Expecting ML-powered queries to be run: ${SHOULD_RUN_ML_POWERED_QUERIES}"
cd "$RUNNER_TEMP/results"
# We should run at least the ML-powered queries in `expected_rules`.
expected_rules="js/ml-powered/nosql-injection js/ml-powered/path-injection js/ml-powered/sql-injection js/ml-powered/xss"
for rule in ${expected_rules}; do
found_rule=$(jq --arg rule "${rule}" '[.runs[0].tool.extensions[].rules | select(. != null) |
flatten | .[].id] | any(. == $rule)' javascript.sarif)
echo "Did find rule '${rule}': ${found_rule}"
if [[ "${found_rule}" != "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
echo "Expected SARIF output to contain rule '${rule}', but found no such rule."
exit 1
elif [[ "${found_rule}" == "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
echo "Found rule '${rule}' in the SARIF output which shouldn't have been part of the analysis."
exit 1
fi
done
# We should have at least one alert from an ML-powered query.
num_alerts=$(jq '[.runs[0].results[] |
select(.properties.score != null and (.rule.id | startswith("js/ml-powered/")))] | length' \
javascript.sarif)
echo "Found ${num_alerts} alerts from ML-powered queries.";
if [[ "${num_alerts}" -eq 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
echo "Expected to find at least one alert from an ML-powered query but found ${num_alerts}."
exit 1
elif [[ "${num_alerts}" -ne 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
echo "Expected not to find any alerts from an ML-powered query but found ${num_alerts}."
exit 1
fi
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,42 +23,26 @@ jobs:
multi-language-autodetect:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210308
- os: macos-latest
version: stable-20210308
- os: ubuntu-latest
version: stable-20210319
- os: macos-latest
version: stable-20210319
- os: ubuntu-latest
version: stable-20210809
- os: macos-latest
version: stable-20210809
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
version:
- stable-20201028
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os: [ubuntu-latest, macos-latest]
name: Multi-language repository
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Initialize dotnet
run: dotnet restore
- uses: ./../action/init
with:
db-location: ${{ runner.temp }}/customDbLocation

View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,44 +23,24 @@ jobs:
packaging-config-inputs-js:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-2019
version: latest
- os: windows-2022
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-2019
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-2019
version: nightly-latest
- os: windows-2022
version: nightly-latest
version: [nightly-20210831] # This CLI version is known to work with package used in this test
os: [ubuntu-latest, macos-latest]
name: 'Packaging: Config and input'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Initialize dotnet
run: dotnet restore
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging3.yml
packs: +dsp-testing/codeql-pack1@1.0.0
packs: +dsp-testing/codeql-pack1@0.1.0
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
@@ -76,11 +55,11 @@ jobs:
shell: bash
run: |
cd "$RUNNER_TEMP/results"
# We should have 4 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
# We should have 3 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
echo "Found matching rules '$RULES'"
if [ "$RULES" != "$EXPECTED_RULES" ]; then
echo "Did not match expected rules '$EXPECTED_RULES'."

View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,40 +23,20 @@ jobs:
packaging-config-js:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-2019
version: latest
- os: windows-2022
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-2019
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-2019
version: nightly-latest
- os: windows-2022
version: nightly-latest
version: [nightly-20210831] # This CLI version is known to work with package used in this test
os: [ubuntu-latest, macos-latest]
name: 'Packaging: Config file'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Initialize dotnet
run: dotnet restore
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging.yml
@@ -75,11 +54,11 @@ jobs:
shell: bash
run: |
cd "$RUNNER_TEMP/results"
# We should have 4 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
# We should have 3 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
echo "Found matching rules '$RULES'"
if [ "$RULES" != "$EXPECTED_RULES" ]; then
echo "Did not match expected rules '$EXPECTED_RULES'."

View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,45 +23,25 @@ jobs:
packaging-inputs-js:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-2019
version: latest
- os: windows-2022
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-2019
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-2019
version: nightly-latest
- os: windows-2022
version: nightly-latest
version: [nightly-20210831] # This CLI version is known to work with package used in this test
os: [ubuntu-latest, macos-latest]
name: 'Packaging: Action input'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Initialize dotnet
run: dotnet restore
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging2.yml
languages: javascript
packs: dsp-testing/codeql-pack1@1.0.0, dsp-testing/codeql-pack2, dsp-testing/codeql-pack3:other-query.ql
packs: dsp-testing/codeql-pack1@0.1.0, dsp-testing/codeql-pack2
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
shell: bash
@@ -76,11 +55,11 @@ jobs:
shell: bash
run: |
cd "$RUNNER_TEMP/results"
# We should have 4 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
# We should have 3 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
echo "Found matching rules '$RULES'"
if [ "$RULES" != "$EXPECTED_RULES" ]; then
echo "Did not match expected rules '$EXPECTED_RULES'."

View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,63 +23,34 @@ jobs:
remote-config:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210308
- os: macos-latest
version: stable-20210308
- os: windows-2019
version: stable-20210308
- os: ubuntu-latest
version: stable-20210319
- os: macos-latest
version: stable-20210319
- os: windows-2019
version: stable-20210319
- os: ubuntu-latest
version: stable-20210809
- os: macos-latest
version: stable-20210809
- os: windows-2019
version: stable-20210809
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-2019
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-2019
version: latest
- os: windows-2022
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-2019
version: nightly-latest
- os: windows-2022
version: nightly-latest
version:
- stable-20201028
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os:
- ubuntu-latest
- macos-latest
- windows-latest
name: Remote config file
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Initialize dotnet
run: dotnet restore
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
- name: Build code
shell: bash

View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,25 +23,19 @@ jobs:
rubocop-multi-language:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210308
- os: ubuntu-latest
version: stable-20210319
- os: ubuntu-latest
version: stable-20210809
- os: ubuntu-latest
version: cached
- os: ubuntu-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
version:
- stable-20201028
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os: [ubuntu-latest]
name: RuboCop multi-language
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test

View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,34 +23,24 @@ jobs:
split-workflow:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
version: [nightly-20210831] # This CLI version is known to work with package used in this test
os: [ubuntu-latest, macos-latest]
name: Split workflow
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Initialize dotnet
run: dotnet restore
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging3.yml
packs: +dsp-testing/codeql-pack1@1.0.0
packs: +dsp-testing/codeql-pack1@0.1.0
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
@@ -80,11 +69,11 @@ jobs:
shell: bash
run: |
cd "$RUNNER_TEMP/results"
# We should have 4 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
# We should have 3 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
echo "Found matching rules '$RULES'"
if [ "$RULES" != "$EXPECTED_RULES" ]; then
echo "Did not match expected rules '$EXPECTED_RULES'."

View File

@@ -1,67 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Autobuild working directory
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
test-autobuild-working-dir:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
name: Autobuild working directory
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Test setup
shell: bash
run: |
# Make sure that Gradle build succeeds in autobuild-dir ...
cp -a ../action/tests/java-repo autobuild-dir
# ... and fails if attempted in the current directory
echo > build.gradle
- uses: ./../action/init
with:
languages: java
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/autobuild
with:
working-directory: autobuild-dir
- uses: ./../action/analyze
env:
TEST_MODE: true
- name: Check database
shell: bash
run: |
cd "$RUNNER_TEMP/codeql_databases"
if [[ ! -d java ]]; then
echo "Did not find a Java database"
exit 1
fi
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,20 +23,20 @@ jobs:
test-local-codeql:
strategy:
matrix:
include:
- os: ubuntu-latest
version: nightly-latest
version: [nightly-latest]
os: [ubuntu-latest]
name: Local CodeQL bundle
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Initialize dotnet
run: dotnet restore
- name: Fetch a CodeQL bundle
shell: bash
env:

11
.github/workflows/__test-proxy.yml generated vendored
View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,15 +23,13 @@ jobs:
test-proxy:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
version: [latest]
os: [ubuntu-latest]
name: Proxy test
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test

21
.github/workflows/__test-ruby.yml generated vendored
View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,25 +23,13 @@ jobs:
test-ruby:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
version: [latest, cached, nightly-latest]
os: [ubuntu-latest, macos-latest]
name: Ruby analysis
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test

View File

@@ -11,8 +11,7 @@ on:
push:
branches:
- main
- releases/v1
- releases/v2
- v1
pull_request:
types:
- opened
@@ -24,30 +23,26 @@ jobs:
unset-environment:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210308
- os: ubuntu-latest
version: stable-20210319
- os: ubuntu-latest
version: stable-20210809
- os: ubuntu-latest
version: cached
- os: ubuntu-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
version:
- stable-20201028
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os: [ubuntu-latest]
name: Test unsetting environment variables
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Initialize dotnet
run: dotnet restore
- uses: ./../action/init
with:
db-location: ${{ runner.temp }}/customDbLocation

View File

@@ -1,102 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: "PR Check - Upload-sarif: 'ref' and 'sha' from inputs"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
upload-ref-sha-input:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210308
- os: macos-latest
version: stable-20210308
- os: windows-2019
version: stable-20210308
- os: ubuntu-latest
version: stable-20210319
- os: macos-latest
version: stable-20210319
- os: windows-2019
version: stable-20210319
- os: ubuntu-latest
version: stable-20210809
- os: macos-latest
version: stable-20210809
- os: windows-2019
version: stable-20210809
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-2019
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-2019
version: latest
- os: windows-2022
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-2019
version: nightly-latest
- os: windows-2022
version: nightly-latest
name: "Upload-sarif: 'ref' and 'sha' from inputs"
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
upload: false
env:
TEST_MODE: true
- uses: ./../action/upload-sarif
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
env:
TEST_MODE: true
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -1,146 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Use a custom `checkout_path`
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
with-checkout-path:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210308
- os: macos-latest
version: stable-20210308
- os: windows-2019
version: stable-20210308
- os: ubuntu-latest
version: stable-20210319
- os: macos-latest
version: stable-20210319
- os: windows-2019
version: stable-20210319
- os: ubuntu-latest
version: stable-20210809
- os: macos-latest
version: stable-20210809
- os: windows-2019
version: stable-20210809
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-2019
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-2019
version: latest
- os: windows-2022
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-2019
version: nightly-latest
- os: windows-2022
version: nightly-latest
name: Use a custom `checkout_path`
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: actions/checkout@v3
with:
ref: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
path: x/y/z/some-path
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
# it's enough to test one compiled language and one interpreted language
languages: csharp,javascript
source-path: x/y/z/some-path/tests/multi-language-repo
debug: true
- name: Build code (non-windows)
shell: bash
if: ${{ runner.os != 'Windows' }}
run: |
$CODEQL_RUNNER x/y/z/some-path/tests/multi-language-repo/build.sh
- name: Build code (windows)
shell: bash
if: ${{ runner.os == 'Windows' }}
run: |
x/y/z/some-path/tests/multi-language-repo/build.sh
- uses: ./../action/analyze
with:
checkout_path: x/y/z/some-path/tests/multi-language-repo
ref: v1.1.0
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
upload: false
env:
TEST_MODE: true
- uses: ./../action/upload-sarif
with:
ref: v1.1.0
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
checkout_path: x/y/z/some-path/tests/multi-language-repo
env:
TEST_MODE: true
- name: Verify SARIF after upload
shell: bash
run: |
EXPECTED_COMMIT_OID="474bbf07f9247ffe1856c6a0f94aeeb10e7afee6"
EXPECTED_REF="v1.1.0"
EXPECTED_CHECKOUT_URI_SUFFIX="/x/y/z/some-path/tests/multi-language-repo"
ACTUAL_COMMIT_OID="$(cat "$RUNNER_TEMP/payload.json" | jq -r .commit_oid)"
ACTUAL_REF="$(cat "$RUNNER_TEMP/payload.json" | jq -r .ref)"
ACTUAL_CHECKOUT_URI="$(cat "$RUNNER_TEMP/payload.json" | jq -r .checkout_uri)"
if [[ "$EXPECTED_COMMIT_OID" != "$ACTUAL_COMMIT_OID" ]]; then
echo "::error Invalid commit oid. Expected: $EXPECTED_COMMIT_OID Actual: $ACTUAL_COMMIT_OID"
echo "$RUNNER_TEMP/payload.json"
exit 1
fi
if [[ "$EXPECTED_REF" != "$ACTUAL_REF" ]]; then
echo "::error Invalid ref. Expected: '$EXPECTED_REF' Actual: '$ACTUAL_REF'"
echo "$RUNNER_TEMP/payload.json"
exit 1
fi
if [[ "$ACTUAL_CHECKOUT_URI" != *$EXPECTED_CHECKOUT_URI_SUFFIX ]]; then
echo "::error Invalid checkout URI suffix. Expected suffix: $EXPECTED_CHECKOUT_URI_SUFFIX Actual uri: $ACTUAL_CHECKOUT_URI"
echo "$RUNNER_TEMP/payload.json"
exit 1
fi
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -15,11 +15,11 @@ jobs:
steps:
- name: Checkout CodeQL Action
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Check Expected Release Files
run: |
bundle_version="$(cat "./src/defaults.json" | jq -r ".bundleVersion")"
set -x
for expected_file in "codeql-bundle.tar.gz" "codeql-bundle-linux64.tar.gz" "codeql-bundle-osx64.tar.gz" "codeql-bundle-win64.tar.gz"; do
for expected_file in "codeql-bundle.tar.gz" "codeql-bundle-linux64.tar.gz" "codeql-bundle-osx64.tar.gz" "codeql-bundle-win64.tar.gz" "codeql-runner-linux" "codeql-runner-macos" "codeql-runner-win.exe"; do
curl --location --fail --head --request GET "https://github.com/github/codeql-action/releases/download/$bundle_version/$expected_file" > /dev/null
done

View File

@@ -1,31 +0,0 @@
# Checks for any conflict markers created by git. This check is primarily intended to validate that
# any merge conflicts in the v2 -> v1 backport PR are fixed before the PR is merged.
name: Check for conflicts
on:
pull_request:
branches: [main, v1, v2]
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
# by other workflows.
types: [opened, synchronize, reopened, ready_for_review]
jobs:
check-for-conflicts:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Check for conflicts
run: |
# Use `|| true` since grep returns exit code 1 if there are no matches, and we don't want
# this to fail the workflow.
FILES_WITH_CONFLICTS=$(grep --extended-regexp --ignore-case --line-number --recursive \
'^(<<<<<<<|>>>>>>>)' . || true)
if [[ "${FILES_WITH_CONFLICTS}" ]]; then
echo "Fail: Found merge conflict markers in the following files:"
echo ""
echo "${FILES_WITH_CONFLICTS}"
exit 1
else
echo "Success: Found no merge conflict markers."
fi

View File

@@ -2,9 +2,9 @@ name: "CodeQL action"
on:
push:
branches: [main, releases/v1, releases/v2]
branches: [main, v1]
pull_request:
branches: [main, releases/v1, releases/v2]
branches: [main, v1]
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
# by other workflows.
types: [opened, synchronize, reopened, ready_for_review]
@@ -20,7 +20,7 @@ jobs:
security-events: write
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- name: Init with default CodeQL bundle from the VM image
id: init-default
uses: ./init
@@ -75,7 +75,7 @@ jobs:
security-events: write
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- uses: ./init
id: init
with:

View File

@@ -1,8 +1,7 @@
# This workflow runs after a release of the action. For v2 releases, it merges any changes from the
# release back into the main branch. Typically, this is just a single commit that updates the
# changelog. For v2 and v1 releases, it then (a) tags the merge commit on the release branch that
# represents the new release with an `vx.y.z` tag and (b) updates the `vx` tag to refer to this
# commit.
# This workflow runs after a release of the action.
# It merges any changes from the release back into the
# main branch. Typically, this is just a single commit
# that updates the changelog.
name: Tag release and merge back
on:
@@ -15,8 +14,7 @@ on:
push:
branches:
- releases/v1
- releases/v2
- v1
jobs:
merge-back:
@@ -27,16 +25,13 @@ jobs:
HEAD_BRANCH: "${{ github.head_ref || github.ref }}"
steps:
- name: Dump environment
run: env
- name: Dump GitHub context
- name: Dump GitHub Event context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
run: echo "${GITHUB_CONTEXT}"
GITHUB_EVENT_CONTEXT: "${{ toJson(github.event) }}"
run: echo "$GITHUB_EVENT_CONTEXT"
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
- name: Update git config
run: |
@@ -47,25 +42,25 @@ jobs:
id: getVersion
run: |
VERSION="v$(jq '.version' -r 'package.json')"
echo "::set-output name=version::${VERSION}"
short_sha="${GITHUB_SHA:0:8}"
NEW_BRANCH="mergeback/${VERSION}-to-${BASE_BRANCH}-${short_sha}"
echo "::set-output name=newBranch::${NEW_BRANCH}"
SHORT_SHA="${GITHUB_SHA:0:8}"
echo "::set-output name=version::$VERSION"
NEW_BRANCH="mergeback/${VERSION}-to-${BASE_BRANCH}-${SHORT_SHA}"
echo "::set-output name=newBranch::$NEW_BRANCH"
- name: Dump branches
env:
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
run: |
echo "BASE_BRANCH ${BASE_BRANCH}"
echo "HEAD_BRANCH ${HEAD_BRANCH}"
echo "NEW_BRANCH ${NEW_BRANCH}"
echo "BASE_BRANCH $BASE_BRANCH"
echo "HEAD_BRANCH $HEAD_BRANCH"
echo "NEW_BRANCH $NEW_BRANCH"
- name: Create mergeback branch
env:
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
run: |
git checkout -b "${NEW_BRANCH}"
git checkout -b "$NEW_BRANCH"
- name: Check for tag
id: check
@@ -73,13 +68,13 @@ jobs:
VERSION: "${{ steps.getVersion.outputs.version }}"
run: |
set +e # don't fail on an errored command
git ls-remote --tags origin | grep "${VERSION}"
exists="$?"
if [ "${exists}" -eq 0 ]; then
echo "Tag ${VERSION} exists. Not going to re-release."
git ls-remote --tags origin | grep "$VERSION"
EXISTS="$?"
if [ "$EXISTS" -eq 0 ]; then
echo "Tag $TAG exists. Not going to re-release."
echo "::set-output name=exists::true"
else
echo "Tag ${VERSION} does not exist yet."
echo "Tag $TAG does not exist yet."
fi
# we didn't tag the release during the update-release-branch workflow because the
@@ -90,48 +85,35 @@ jobs:
env:
VERSION: ${{ steps.getVersion.outputs.version }}
run: |
# Unshallow the repo in order to allow pushes
git fetch --unshallow
# Create the `vx.y.z` tag
git tag --annotate "${VERSION}" --message "${VERSION}"
# Update the `vx` tag
major_version_tag=$(cut -d '.' -f1 <<< "${VERSION}")
# Use `--force` to overwrite the major version tag
git tag --annotate "${major_version_tag}" --message "${major_version_tag}" --force
# Push the tags, using:
# - `--atomic` to make sure we either update both tags or neither (an intermediate state,
# e.g. where we update the v2.x.y tag on the remote but not the v2 tag, could result in
# unwanted Dependabot updates, e.g. from v2 to v2.x.y)
# - `--force` since we're overwriting the `vx` tag
git push origin --atomic --force refs/tags/"${VERSION}" refs/tags/"${major_version_tag}"
git tag -a "$VERSION" -m "$VERSION"
git fetch --unshallow # unshallow the repo in order to allow pushes
git push origin --follow-tags "$VERSION"
- name: Create mergeback branch
if: steps.check.outputs.exists != 'true' && contains(github.ref, 'releases/v2')
if: steps.check.outputs.exists != 'true'
env:
VERSION: "${{ steps.getVersion.outputs.version }}"
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
run: |
set -exu
pr_title="Mergeback ${VERSION} ${HEAD_BRANCH} into ${BASE_BRANCH}"
pr_body="Updates version and changelog."
# Update the version number ready for the next release
npm version patch --no-git-tag-version
PR_TITLE="Mergeback $VERSION $HEAD_BRANCH into $BASE_BRANCH"
PR_BODY="Updates version and changelog."
# Update the changelog
perl -i -pe 's/^/## \[UNRELEASED\]\n\nNo user facing changes.\n\n/ if($.==3)' CHANGELOG.md
git add .
git commit -m "Update changelog and version after ${VERSION}"
git commit -m "Update changelog and version after $VERSION"
npm version patch
git push origin "${NEW_BRANCH}"
git push origin "$NEW_BRANCH"
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft
# so that a maintainer can take the PR out of draft, thereby triggering the PR checks.
gh pr create \
--head "${NEW_BRANCH}" \
--base "${BASE_BRANCH}" \
--title "${pr_title}" \
--head "$NEW_BRANCH" \
--base "$BASE_BRANCH" \
--title "$PR_TITLE" \
--label "Update dependencies" \
--body "${pr_body}" \
--body "$PR_BODY" \
--draft

View File

@@ -1,393 +1,17 @@
name: PR Checks (Basic Checks and Runner)
on:
push:
branches: [main, releases/v1, releases/v2]
pull_request:
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
# by other workflows.
types: [opened, synchronize, reopened, ready_for_review]
workflow_dispatch:
jobs:
lint-js:
name: Lint
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v3
- name: Run Lint
run: npm run-script lint
check-js:
runs-on: ubuntu-latest
timeout-minutes: 45
strategy:
fail-fast: true
matrix:
node-types-version: [12.12, current]
steps:
- uses: actions/checkout@v3
- name: Update version of @types/node
if: matrix.node-types-version != 'current'
env:
NODE_TYPES_VERSION: ${{ matrix.node-types-version }}
run: |
# Export `NODE_TYPES_VERSION` so it's available to jq
export NODE_TYPES_VERSION="${NODE_TYPES_VERSION}"
contents=$(jq '.devDependencies."@types/node" = env.NODE_TYPES_VERSION' package.json)
echo "${contents}" > package.json
# Usually we run `npm install` on macOS to ensure that we pick up macOS-only dependencies.
# However we're not checking in the updated lockfile here, so it's fine to run
# `npm install` on Linux.
npm install
if [ ! -z "$(git status --porcelain)" ]; then
git config --global user.email "github-actions@github.com"
git config --global user.name "github-actions[bot]"
# The period in `git add --all .` ensures that we stage deleted files too.
git add --all .
git commit -m "Use @types/node=${NODE_TYPES_VERSION}"
fi
- name: Check generated JS
run: .github/workflows/script/check-js.sh
check-node-modules:
name: Check modules up to date
runs-on: macos-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v3
- name: Check node modules up to date
run: .github/workflows/script/check-node-modules.sh
verify-pr-checks:
name: Verify PR checks up to date
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v3
with:
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install ruamel.yaml
- name: Verify PR checks up to date
run: .github/workflows/script/verify-pr-checks.sh
npm-test:
name: Unit Test
needs: [check-js, check-node-modules]
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.os }}
timeout-minutes: 45
steps:
- uses: actions/checkout@v3
- name: npm run-script test
run: npm run-script test
runner-analyze-javascript-ubuntu:
name: Runner ubuntu JS analyze
needs: [check-js, check-node-modules]
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
# Pass --config-file here, but not for other jobs in this workflow.
# This means we're testing the config file parsing in the runner
# but not slowing down all jobs unnecessarily as it doesn't add much
# testing the parsing on different operating systems and languages.
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-windows:
name: Runner windows JS analyze
needs: [check-js, check-node-modules]
timeout-minutes: 45
runs-on: windows-latest
steps:
- uses: actions/checkout@v3
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-macos:
name: Runner macos JS analyze
needs: [check-js, check-node-modules]
timeout-minutes: 45
runs-on: macos-latest
steps:
- uses: actions/checkout@v3
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-ubuntu:
name: Runner ubuntu C# analyze
needs: [check-js, check-node-modules]
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
. ./codeql-runner/codeql-env.sh
$CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-windows:
name: Runner windows C# analyze
needs: [check-js, check-node-modules]
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
# `windows-latest`.
timeout-minutes: 45
runs-on: windows-2019
steps:
- uses: actions/checkout@v3
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
$Env:CODEQL_EXTRACTOR_CSHARP_ROOT = "" # Unset an environment variable to make sure the tracer resists this
& $Env:CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
- name: Upload tracer logs
uses: actions/upload-artifact@v3
with:
name: tracer-logs
path: ./codeql-runner/compound-build-tracer.log
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-macos:
name: Runner macos C# analyze
timeout-minutes: 45
needs: [check-js, check-node-modules]
runs-on: macos-latest
steps:
- uses: actions/checkout@v3
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
. ./codeql-runner/codeql-env.sh
$CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-ubuntu:
name: Runner ubuntu autobuild C# analyze
timeout-minutes: 45
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
../action/runner/dist/codeql-runner-linux autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-windows:
timeout-minutes: 45
name: Runner windows autobuild C# analyze
needs: [check-js, check-node-modules]
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
# `windows-latest`.
runs-on: windows-2019
steps:
- uses: actions/checkout@v3
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
../action/runner/dist/codeql-runner-win.exe autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-macos:
name: Runner macos autobuild C# analyze
needs: [check-js, check-node-modules]
runs-on: macos-latest
timeout-minutes: 45
env:
ACTIONS_RUNNER_DEBUG: 1
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
@@ -403,80 +27,43 @@ jobs:
npm install
npm run build-runner
- name: Initialize dotnet
run: dotnet restore
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Create extractor directory
run: |
mkdir -p /Users/runner/work/codeql-action/codeql-action/codeql-runner/codeql_databases/csharp/log
echo ""
- name: Check env
run: env
- name: Build code
shell: bash
run: |
export CODEQL_EXTRACTOR_CSHARP_LOG_DIR="/Users/runner/work/codeql-action/codeql-action/codeql-runner/codeql_databases/csharp/log"
mkdir -p "$CODEQL_EXTRACTOR_CSHARP_LOG_DIR"
touch "$CODEQL_EXTRACTOR_CSHARP_LOG_DIR/csharp.log"
../action/runner/dist/codeql-runner-macos autobuild
- name: Check env
if: always()
run: env
- name: Upload
if: always()
uses: actions/upload-artifact@v2
with:
path: /Users/runner/work/codeql-action/codeql-action/codeql-runner/codeql_databases/csharp
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-upload-sarif:
name: Runner upload sarif
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
timeout-minutes: 45
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
steps:
- uses: actions/checkout@v3
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Upload with runner
run: |
# Deliberately don't use TEST_MODE here. This is specifically testing
# the compatibility with the API.
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
runner-extractor-ram-threads-options:
name: Runner ubuntu extractor RAM and threads options
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v3
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-linux init --ram=230 --threads=1 --repository $GITHUB_REPOSITORY --languages java --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Assert Results
shell: bash
run: |
. ./codeql-runner/codeql-env.sh
if [ "${CODEQL_RAM}" != "230" ]; then
echo "CODEQL_RAM is '${CODEQL_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_RAM}" != "230" ]; then
echo "CODEQL_EXTRACTOR_JAVA_RAM is '${CODEQL_EXTRACTOR_JAVA_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_THREADS}" != "1" ]; then
echo "CODEQL_THREADS is '${CODEQL_THREADS}' instead of 1"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_THREADS}" != "1" ]; then
echo "CODEQL_EXTRACTOR_JAVA_THREADS is '${CODEQL_EXTRACTOR_JAVA_THREADS}' instead of 1"
exit 1
fi

View File

@@ -2,7 +2,7 @@ name: Test Python Package Installation on Linux and Mac
on:
push:
branches: [main, releases/v1, releases/v2]
branches: [main, v1]
pull_request:
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
# by other workflows.
@@ -10,7 +10,6 @@ on:
jobs:
test-setup-python-scripts:
timeout-minutes: 45
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
@@ -25,7 +24,7 @@ jobs:
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- name: Initialize CodeQL
uses: ./init
@@ -71,7 +70,7 @@ jobs:
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- name: Initialize CodeQL
uses: ./init
@@ -122,11 +121,11 @@ jobs:
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- uses: actions/setup-python@v3
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python_version }}
python-version: ${{ matrix.python-version }}
- name: Initialize CodeQL
uses: ./init

54
.github/workflows/release-runner.yml vendored Normal file
View File

@@ -0,0 +1,54 @@
name: Release runner
on:
workflow_dispatch:
inputs:
bundle-tag:
description: 'Tag of the bundle release (e.g., "codeql-bundle-20200826")'
required: false
jobs:
release-runner:
runs-on: ubuntu-latest
env:
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
strategy:
matrix:
extension: ["linux", "macos", "win.exe"]
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- uses: actions/upload-artifact@v2
with:
name: codeql-runner-${{matrix.extension}}
path: runner/dist/codeql-runner-${{matrix.extension}}
- name: Resolve Upload URL for the release
if: ${{ github.event.inputs.bundle-tag != null }}
id: save_url
run: |
UPLOAD_URL=$(curl -sS \
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}" \
-H "Accept: application/json" \
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" | jq .upload_url | sed s/\"//g)
echo ${UPLOAD_URL}
echo "::set-output name=upload_url::${UPLOAD_URL}"
- name: Upload Platform Package
if: ${{ github.event.inputs.bundle-tag != null }}
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.save_url.outputs.upload_url }}
asset_path: runner/dist/codeql-runner-${{matrix.extension}}
asset_name: codeql-runner-${{matrix.extension}}
asset_content_type: application/octet-stream

View File

@@ -1,35 +0,0 @@
#!/usr/bin/env bash
# Update the required checks based on the current branch.
# Typically, this will be main.
if [ -z "$GITHUB_TOKEN" ]; then
echo "Failed: No GitHub token found. This script requires admin access to `github/codeql-action`."
exit 1
fi
if [ "$#" -eq 1 ]; then
# If we were passed an argument, pass it as a query to fzf
GITHUB_SHA="$@"
elif [ "$#" -gt 1 ]; then
echo "Usage: $0 [SHA]"
echo "Update the required checks based on the SHA, or main."
elif [ -z "$GITHUB_SHA" ]; then
# If we don't have a SHA, use main
GITHUB_SHA="$(git rev-parse main)"
fi
echo "Getting checks for $GITHUB_SHA"
# Ignore any checks with "https://", CodeQL, LGTM, and Update checks.
CHECKS="$(gh api repos/github/codeql-action/commits/${GITHUB_SHA}/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or contains("Update") | not)] | sort')"
echo "$CHECKS" | jq
echo "{\"contexts\": ${CHECKS}}" > checks.json
for BRANCH in main releases/v2 releases/v1; do
echo "Updating $BRANCH"
gh api --silent -X "PATCH" "repos/github/codeql-action/branches/$BRANCH/protection/required_status_checks" --input checks.json
done
rm checks.json

73
.github/workflows/split.yml vendored Normal file
View File

@@ -0,0 +1,73 @@
#
# Split the CodeQL Bundle into platform bundles
#
# Instructions:
# 1. Upload the new codeql-bundle (codeql-bundle.tar.gz) as an asset of the
# release (codeql-bundle-20200826)
# 2. Take note of the CLI Release used by the bundle (e.g., v2.2.5)
# 3. Manually launch this workflow file (via the Actions UI) specifying
# - The CLI Release (e.g., v2.2.5)
# - The release tag (e.g., codeql-bundle-20200826)
# 4. If everything succeeds you should see 3 new assets.
#
name: Split Bundle
on:
workflow_dispatch:
inputs:
cli-release:
description: 'CodeQL CLI Release (e.g., "v2.2.5")'
required: true
bundle-tag:
description: 'Tag of the bundle release (e.g., "codeql-bundle-20200826")'
required: true
jobs:
build:
runs-on: ubuntu-latest
env:
CLI_RELEASE: "${{ github.event.inputs.cli-release }}"
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
strategy:
fail-fast: false
matrix:
platform: ["linux64", "osx64", "win64"]
steps:
- name: Resolve Upload URL for the release
id: save_url
run: |
UPLOAD_URL=$(curl -sS \
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}" \
-H "Accept: application/json" \
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" | jq .upload_url | sed s/\"//g)
echo ${UPLOAD_URL}
echo "::set-output name=upload_url::${UPLOAD_URL}"
- name: Download CodeQL CLI and Bundle
run: |
wget --no-verbose "https://github.com/${GITHUB_REPOSITORY}/releases/download/${RELEASE_TAG}/codeql-bundle.tar.gz"
wget --no-verbose "https://github.com/github/codeql-cli-binaries/releases/download/${CLI_RELEASE}/codeql-${{matrix.platform}}.zip"
- name: Create Platform Package
# Replace the codeql-binaries with the platform specific ones
run: |
gunzip codeql-bundle.tar.gz
tar -f codeql-bundle.tar --delete codeql
unzip -q codeql-${{matrix.platform}}.zip
tar -f codeql-bundle.tar --append codeql
gzip codeql-bundle.tar
mv codeql-bundle.tar.gz codeql-bundle-${{matrix.platform}}.tar.gz
du -sh codeql-bundle-${{matrix.platform}}.tar.gz
- name: Upload Platform Package
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.save_url.outputs.upload_url }}
asset_path: ./codeql-bundle-${{matrix.platform}}.tar.gz
asset_name: codeql-bundle-${{matrix.platform}}.tar.gz
asset_content_type: application/tar+gzip

View File

@@ -6,12 +6,11 @@ on:
jobs:
update:
name: Update dependencies
timeout-minutes: 45
runs-on: macos-latest
if: contains(github.event.pull_request.labels.*.name, 'Update dependencies') && (github.event.pull_request.head.repo.full_name == 'github/codeql-action')
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Remove PR label
env:

View File

@@ -1,35 +1,26 @@
name: Update release branch
on:
# You can trigger this workflow via workflow dispatch to start a release.
# This will open a PR to update the v2 release branch.
schedule:
- cron: 0 9 * * 1
repository_dispatch:
# Example of how to trigger this:
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
# Replace <token> with a personal access token from this page: https://github.com/settings/tokens
types: [update-release-branch]
workflow_dispatch:
# When the v2 release is complete, this workflow will open a PR to update the v1 release branch.
push:
branches:
- releases/v2
jobs:
update:
timeout-minutes: 45
runs-on: ubuntu-latest
if: github.repository == 'github/codeql-action'
if: ${{ github.repository == 'github/codeql-action' }}
steps:
- name: Dump environment
run: env
- name: Dump GitHub context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v3
- uses: actions/checkout@v2
with:
# Need full history so we calculate diffs
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v3
uses: actions/setup-python@v2
with:
python-version: 3.8
@@ -43,20 +34,5 @@ jobs:
git config --global user.email "github-actions@github.com"
git config --global user.name "github-actions[bot]"
- name: Update v2 release branch
if: github.event_name == 'workflow_dispatch'
run: |
python .github/update-release-branch.py \
--github-token ${{ secrets.GITHUB_TOKEN }} \
--repository-nwo ${{ github.repository }} \
--mode v2-release \
--conductor ${GITHUB_ACTOR}
- name: Update v1 release branch
if: github.event_name == 'push'
run: |
python .github/update-release-branch.py \
--github-token ${{ secrets.GITHUB_TOKEN }} \
--repository-nwo ${{ github.repository }} \
--mode v1-release \
--conductor ${GITHUB_ACTOR}
- name: Update release branch
run: python .github/update-release-branch.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }}

View File

@@ -6,20 +6,18 @@ on:
jobs:
update-supported-enterprise-server-versions:
name: Update Supported Enterprise Server Versions
timeout-minutes: 45
runs-on: ubuntu-latest
if: ${{ github.repository == 'github/codeql-action' }}
steps:
- name: Setup Python
uses: actions/setup-python@v3
uses: actions/setup-python@v2
with:
python-version: "3.7"
- name: Checkout CodeQL Action
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Checkout Enterprise Releases
uses: actions/checkout@v3
uses: actions/checkout@v2
with:
repository: github/enterprise-releases
ssh-key: ${{ secrets.ENTERPRISE_RELEASES_SSH_KEY }}
@@ -42,6 +40,5 @@ jobs:
body: ""
author: GitHub <noreply@github.com>
branch: update-supported-enterprise-server-versions
draft: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

2
.gitignore vendored
View File

@@ -1,4 +1,2 @@
/runner/dist/
/runner/node_modules/
# Ignore for example failing-tests.json from AVA
node_modules/.cache

View File

@@ -1,107 +1,8 @@
# CodeQL Action Changelog
# CodeQL Action and CodeQL Runner Changelog
## [UNRELEASED]
No user facing changes.
## 2.1.12 - 01 Jun 2022
- Update default CodeQL bundle version to 2.9.3. [#1084](https://github.com/github/codeql-action/pull/1084)
## 2.1.11 - 17 May 2022
- Update default CodeQL bundle version to 2.9.2. [#1074](https://github.com/github/codeql-action/pull/1074)
## 2.1.10 - 10 May 2022
- Update default CodeQL bundle version to 2.9.1. [#1056](https://github.com/github/codeql-action/pull/1056)
- When `wait-for-processing` is enabled, the workflow will now fail if there were any errors that occurred during processing of the analysis results.
## 2.1.9 - 27 Apr 2022
- Add `working-directory` input to the `autobuild` action. [#1024](https://github.com/github/codeql-action/pull/1024)
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`. [#1007](https://github.com/github/codeql-action/pull/1007)
- Update default CodeQL bundle version to 2.9.0.
- Fix a bug where [status reporting fails on Windows](https://github.com/github/codeql-action/issues/1041). [#1042](https://github.com/github/codeql-action/pull/1042)
## 2.1.8 - 08 Apr 2022
- Update default CodeQL bundle version to 2.8.5. [#1014](https://github.com/github/codeql-action/pull/1014)
- Fix error where the init action would fail due to a GitHub API request that was taking too long to complete [#1025](https://github.com/github/codeql-action/pull/1025)
## 2.1.7 - 05 Apr 2022
- A bug where additional queries specified in the workflow file would sometimes not be respected has been fixed. [#1018](https://github.com/github/codeql-action/pull/1018)
## 2.1.6 - 30 Mar 2022
- [v2+ only] The CodeQL Action now runs on Node.js v16. [#1000](https://github.com/github/codeql-action/pull/1000)
- Update default CodeQL bundle version to 2.8.4. [#990](https://github.com/github/codeql-action/pull/990)
- Fix a bug where an invalid `commit_oid` was being sent to code scanning when a custom checkout path was being used. [#956](https://github.com/github/codeql-action/pull/956)
## 1.1.5 - 15 Mar 2022
- Update default CodeQL bundle version to 2.8.3.
- The CodeQL runner is now deprecated and no longer being released. For more information, see [CodeQL runner deprecation](https://github.blog/changelog/2021-09-21-codeql-runner-deprecation/).
- Fix two bugs that cause action failures with GHES 3.3 or earlier. [#978](https://github.com/github/codeql-action/pull/978)
- Fix `not a permitted key` invalid requests with GHES 3.1 or earlier
- Fix `RUNNER_ARCH environment variable must be set` errors with GHES 3.3 or earlier
## 1.1.4 - 07 Mar 2022
- Update default CodeQL bundle version to 2.8.2. [#950](https://github.com/github/codeql-action/pull/950)
- Fix a bug where old results can be uploaded if the languages in a repository change when using a non-ephemeral self-hosted runner. [#955](https://github.com/github/codeql-action/pull/955)
## 1.1.3 - 23 Feb 2022
- Fix a bug where the CLR traces can continue tracing even after tracing should be stopped. [#938](https://github.com/github/codeql-action/pull/938)
## 1.1.2 - 17 Feb 2022
- Due to potential issues for GHES 3.13.3 customers who are using recent versions of the CodeQL Action via GHES Connect, the CodeQL Action now uses Node.js v12 rather than Node.js v16. [#937](https://github.com/github/codeql-action/pull/937)
## 1.1.1 - 17 Feb 2022
- The CodeQL CLI versions up to and including version 2.4.4 are not compatible with the CodeQL Action 1.1.1 and later. The Action will emit an error if it detects that it is being used by an incompatible version of the CLI. [#931](https://github.com/github/codeql-action/pull/931)
- Update default CodeQL bundle version to 2.8.1. [#925](https://github.com/github/codeql-action/pull/925)
## 1.1.0 - 11 Feb 2022
- The CodeQL Action now uses Node.js v16. [#909](https://github.com/github/codeql-action/pull/909)
- Beware that the CodeQL build tracer in this release (and in all earlier releases) is incompatible with Windows 11 and Windows Server 2022. This incompatibility affects database extraction for compiled languages: cpp, csharp, go, and java. As a result, analyzing these languages with the `windows-latest` or `windows-2022` Actions virtual environments is currently unsupported. If you use any of these languages, please use the `windows-2019` Actions virtual environment or otherwise avoid these specific Windows versions until a new release fixes this incompatibility.
## 1.0.32 - 07 Feb 2022
- Add `sarif-id` as an output for the `upload-sarif` and `analyze` actions. [#889](https://github.com/github/codeql-action/pull/889)
- Add `ref` and `sha` inputs to the `analyze` action, which override the defaults provided by the GitHub Action context. [#889](https://github.com/github/codeql-action/pull/889)
- Update default CodeQL bundle version to 2.8.0. [#911](https://github.com/github/codeql-action/pull/911)
## 1.0.31 - 31 Jan 2022
- Remove `experimental` message when using custom CodeQL packages. [#888](https://github.com/github/codeql-action/pull/888)
- Add a better warning message stating that experimental features will be disabled if the workflow has been triggered by a pull request from a fork or the `security-events: write` permission is not present. [#882](https://github.com/github/codeql-action/pull/882)
## 1.0.30 - 24 Jan 2022
- Display a better error message when encountering a workflow that runs the `codeql-action/init` action multiple times. [#876](https://github.com/github/codeql-action/pull/876)
- Update default CodeQL bundle version to 2.7.6. [#877](https://github.com/github/codeql-action/pull/877)
## 1.0.29 - 21 Jan 2022
- The feature to wait for SARIF processing to complete after upload has been disabled by default due to a bug in its interaction with pull requests from forks.
## 1.0.28 - 18 Jan 2022
- Update default CodeQL bundle version to 2.7.5. [#866](https://github.com/github/codeql-action/pull/866)
- Fix a bug where SARIF files were failing upload due to an invalid test for unique categories. [#872](https://github.com/github/codeql-action/pull/872)
## 1.0.27 - 11 Jan 2022
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`. [#855](https://github.com/github/codeql-action/pull/855)
## 1.0.26 - 10 Dec 2021
- Update default CodeQL bundle version to 2.7.3. [#842](https://github.com/github/codeql-action/pull/842)
- Update default CodeQL bundle version to 2.7.3. [#837](https://github.com/github/codeql-action/pull/837)
## 1.0.25 - 06 Dec 2021

View File

@@ -1,3 +1 @@
**/* @github/codeql-action-reviewers
/python-setup/ @github/codeql-python @github/codeql-action-reviewers

View File

@@ -61,30 +61,16 @@ Here are a few things you can do that will increase the likelihood of your pull
## Releasing (write access required)
1. The first step of releasing a new version of the `codeql-action` is running the "Update release branch" workflow.
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `releases/v2` release branch.
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `v1` release branch.
You can start a release by triggering this workflow via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
1. The workflow run will open a pull request titled "Merge main into releases/v2". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
A release is automatically started every Monday via a scheduled run of this workflow, however you can start a release manually by triggering a run via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
1. The workflow run will open a pull request titled "Merge main into v1". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
1. Review the checklist items in the pull request description.
Once you've checked off all but the last two of these, approve the PR and automerge it.
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Tag release and merge back" workflow will create a mergeback PR.
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into releases/v2" pull request, and bumps the patch version of the CodeQL Action.
Once you've checked off all but the last of these, approve the PR and automerge it.
1. When the "Merge main into v1" pull request is merged into the `v1` branch, the "Tag release and merge back" workflow will create a mergeback PR.
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into v1" pull request, and bumps the patch version of the CodeQL Action.
Approve the mergeback PR and automerge it.
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Update release branch" workflow will create a "Merge releases/v2 into releases/v1" pull request to merge the changes since the last release into the `releases/v1` release branch.
This ensures we keep both the `releases/v1` and `releases/v2` release branches up to date and fully supported.
Review the checklist items in the pull request description.
Once you've checked off all the items, approve the PR and automerge it.
1. Once the mergeback has been merged to `main` and the "Merge releases/v2 into releases/v1" PR has been merged to `releases/v1`, the release is complete.
## Keeping the PR checks up to date (admin access required)
Since the `codeql-action` runs most of its testing through individual Actions workflows, there are over two hundred jobs that need to pass in order for a PR to turn green. You can regenerate the checks automatically by running the [update-required-checks.sh](.github/workflows/script/update-required-checks.sh) script:
1. By default, this script retrieves the checks from the latest SHA on `main`, so make sure that your `main` branch is up to date.
2. Run the script. If there's a reason to, you can pass in a different SHA as a CLI argument.
3. After running, go to the [branch protection rules settings page](https://github.com/github/codeql-action/settings/branches) and validate that the rules for `main`, `v1`, and `v2` have been updated.
Approve the mergeback PR and automerge it. Once the mergeback has been merged into main, the release is complete.
## Resources

View File

@@ -52,11 +52,11 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
@@ -64,10 +64,10 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below).
- name: Autobuild
uses: github/codeql-action/autobuild@v2
uses: github/codeql-action/autobuild@v1
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following
# three lines and modify them (or add more) to build your code if your
@@ -78,14 +78,14 @@ jobs:
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
uses: github/codeql-action/analyze@v1
```
If you prefer to integrate this within an existing CI workflow, it should end up looking something like this:
```yaml
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
uses: github/codeql-action/init@v1
with:
languages: go, javascript
@@ -95,7 +95,7 @@ If you prefer to integrate this within an existing CI workflow, it should end up
make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
uses: github/codeql-action/analyze@v1
```
### Configuration file
@@ -103,7 +103,7 @@ If you prefer to integrate this within an existing CI workflow, it should end up
Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`.
```yaml
- uses: github/codeql-action/init@v2
- uses: github/codeql-action/init@v1
with:
config-file: ./.github/codeql/codeql-config.yml
```
@@ -111,7 +111,7 @@ Use the `config-file` parameter of the `init` action to enable the configuration
The configuration file can be located in a different repository. This is useful if you want to share the same configuration across multiple repositories. If the configuration file is in a private repository you can also specify an `external-repository-token` option. This should be a personal access token that has read access to any repositories containing referenced config files and queries.
```yaml
- uses: github/codeql-action/init@v2
- uses: github/codeql-action/init@v1
with:
config-file: owner/repo/codeql-config.yml@branch
external-repository-token: ${{ secrets.EXTERNAL_REPOSITORY_TOKEN }}
@@ -122,7 +122,7 @@ For information on how to write a configuration file, see "[Using a custom confi
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:
```yaml
- uses: github/codeql-action/init@v2
- uses: github/codeql-action/init@v1
with:
queries: <local-or-remote-query>,<another-query>
```
@@ -130,7 +130,7 @@ If you only want to customise the queries used, you can specify them in your wor
By default, this will override any queries specified in a config file. If you wish to use both sets of queries, prefix the list of queries in the workflow with `+`:
```yaml
- uses: github/codeql-action/init@v2
- uses: github/codeql-action/init@v1
with:
queries: +<local-or-remote-query>,<another-query>
```
@@ -139,3 +139,10 @@ By default, this will override any queries specified in a config file. If you wi
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
### Note on "missing analysis" message
The very first time code scanning is run and if it is on a pull request, you will probably get a message mentioning a "missing analysis". This is expected.
After code scanning has analyzed the code in a pull request, it needs to compare the analysis of the topic branch (the merge commit of the branch you used to create the pull request) with the analysis of the base branch (the branch into which you want to merge the pull request). This allows code scanning to compute which alerts are newly introduced by the pull request, which alerts were already present in the base branch, and whether any existing alerts are fixed by the changes in the pull request. Initially, if you use a pull request to add code scanning to a repository, the base branch has not yet been analyzed, so it's not possible to compute these details. In this case, when you click through from the results check on the pull request you will see the "Missing analysis for base commit SHA-HASH" message.
For more information and other causes of this message, see [Reasons for the "missing analysis" message](https://docs.github.com/en/code-security/secure-coding/automatically-scanning-your-code-for-vulnerabilities-and-errors/setting-up-code-scanning-for-a-repository#reasons-for-the-missing-analysis-message)

View File

@@ -45,12 +45,6 @@ inputs:
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
required: false
default: ${{ github.workspace }}
ref:
description: "The ref where results will be uploaded. If not provided, the Action will use the GITHUB_REF environment variable. If provided, the sha input must be provided as well. This input is not available in pull requests from forks."
required: false
sha:
description: "The sha of the HEAD of the ref where results will be uploaded. If not provided, the Action will use the GITHUB_SHA environment variable. If provided, the ref input must be provided as well. This input is not available in pull requests from forks."
required: false
category:
description: String used by Code Scanning for matching the analyses
required: false
@@ -61,7 +55,7 @@ inputs:
wait-for-processing:
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
required: true
default: "true"
default: "false"
token:
default: ${{ github.token }}
matrix:
@@ -69,8 +63,6 @@ inputs:
outputs:
db-locations:
description: A map from language to absolute path for each database created by CodeQL.
sarif-id:
description: The ID of the uploaded SARIF file.
runs:
using: "node16"
using: "node12"
main: "../lib/analyze-action.js"

View File

@@ -6,12 +6,6 @@ inputs:
default: ${{ github.token }}
matrix:
default: ${{ toJson(matrix) }}
working-directory:
description: >-
Run the autobuilder using this path (relative to $GITHUB_WORKSPACE) as
working directory. If this input is not set, the autobuilder runs with
$GITHUB_WORKSPACE as its working directory.
required: false
runs:
using: 'node16'
main: '../lib/autobuild-action.js'
using: 'node12'
main: '../lib/autobuild-action.js'

View File

@@ -59,19 +59,9 @@ inputs:
description: Enable debugging mode. This will result in more output being produced which may be useful when debugging certain issues.
required: false
default: 'false'
debug-artifact-name:
description: >-
The name of the artifact to store debugging information in.
This is only used when debug mode is enabled.
required: false
debug-database-name:
description: >-
The name of the database uploaded to the debugging artifact.
This is only used when debug mode is enabled.
required: false
outputs:
codeql-path:
description: The path of the CodeQL binary used for analysis
runs:
using: 'node16'
using: 'node12'
main: '../lib/init-action.js'

143
lib/actions-util.js generated
View File

@@ -19,9 +19,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path"));
const core = __importStar(require("@actions/core"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
@@ -30,8 +29,6 @@ const yaml = __importStar(require("js-yaml"));
const api = __importStar(require("./api-client"));
const sharedEnv = __importStar(require("./shared-environment"));
const util_1 = require("./util");
// eslint-disable-next-line import/no-commonjs
const pkg = require("../package.json");
/**
* The utils in this module are meant to be run inside of the action only.
* Code paths from the runner should not enter this module.
@@ -54,10 +51,10 @@ exports.getRequiredInput = getRequiredInput;
* This allows us to get stronger type checking of required/optional inputs
* and make behaviour more consistent between actions and the runner.
*/
const getOptionalInput = function (name) {
function getOptionalInput(name) {
const value = core.getInput(name);
return value.length > 0 ? value : undefined;
};
}
exports.getOptionalInput = getOptionalInput;
function getTemporaryDirectory() {
const value = process.env["CODEQL_ACTION_TEMP"];
@@ -76,7 +73,7 @@ exports.getToolCacheDirectory = getToolCacheDirectory;
/**
* Gets the SHA of the commit that is currently checked out.
*/
const getCommitOid = async function (checkoutPath, ref = "HEAD") {
const getCommitOid = async function (ref = "HEAD") {
// Try to use git to get the current commit SHA. If that fails then
// log but otherwise silently fall back to using the SHA from the environment.
// The only time these two values will differ is during analysis of a PR when
@@ -96,68 +93,16 @@ const getCommitOid = async function (checkoutPath, ref = "HEAD") {
process.stderr.write(data);
},
},
cwd: checkoutPath,
}).exec();
return commitOid.trim();
}
catch (e) {
core.info(`Failed to call git to get current commit. Continuing with data from environment or input: ${e}`);
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
core.info(e.stack || "NO STACK");
return (0, exports.getOptionalInput)("sha") || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
return (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
}
};
exports.getCommitOid = getCommitOid;
/**
* If the action was triggered by a pull request, determine the commit sha of the merge base.
* Returns undefined if run by other triggers or the merge base cannot be determined.
*/
const determineMergeBaseCommitOid = async function () {
if (process.env.GITHUB_EVENT_NAME !== "pull_request") {
return undefined;
}
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
const checkoutPath = (0, exports.getOptionalInput)("checkout_path");
try {
let commitOid = "";
let baseOid = "";
let headOid = "";
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), ["show", "-s", "--format=raw", mergeSha], {
silent: true,
listeners: {
stdline: (data) => {
if (data.startsWith("commit ") && commitOid === "") {
commitOid = data.substring(7);
}
else if (data.startsWith("parent ")) {
if (baseOid === "") {
baseOid = data.substring(7);
}
else if (headOid === "") {
headOid = data.substring(7);
}
}
},
stderr: (data) => {
process.stderr.write(data);
},
},
cwd: checkoutPath,
}).exec();
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
if (commitOid === mergeSha &&
headOid.length === 40 &&
baseOid.length === 40) {
return baseOid;
}
return undefined;
}
catch (e) {
core.info(`Failed to call git to determine merge base. Continuing with data from environment: ${e}`);
core.info(e.stack || "NO STACK");
return undefined;
}
};
exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
function isObject(o) {
return o !== null && typeof o === "object";
}
@@ -357,7 +302,7 @@ async function getWorkflowPath() {
const repo = repo_nwo[1];
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
const apiClient = api.getActionsApiClient();
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
owner,
repo,
run_id,
@@ -428,24 +373,8 @@ exports.computeAutomationID = computeAutomationID;
async function getRef() {
// Will be in the form "refs/heads/master" on a push event
// or in the form "refs/pull/N/merge" on a pull_request event
const refInput = (0, exports.getOptionalInput)("ref");
const shaInput = (0, exports.getOptionalInput)("sha");
const checkoutPath = (0, exports.getOptionalInput)("checkout_path") ||
(0, exports.getOptionalInput)("source-root") ||
(0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE");
const hasRefInput = !!refInput;
const hasShaInput = !!shaInput;
// If one of 'ref' or 'sha' are provided, both are required
if ((hasRefInput || hasShaInput) && !(hasRefInput && hasShaInput)) {
throw new Error("Both 'ref' and 'sha' are required if one of them is provided.");
}
const ref = refInput || (0, util_1.getRequiredEnvParam)("GITHUB_REF");
const sha = shaInput || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
// If the ref is a user-provided input, we have to skip logic
// and assume that it is really where they want to upload the results.
if (refInput) {
return refInput;
}
const ref = (0, util_1.getRequiredEnvParam)("GITHUB_REF");
const sha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
// For pull request refs we want to detect whether the workflow
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
// than the 'merge' ref. If so, we want to convert the ref that
@@ -454,14 +383,15 @@ async function getRef() {
if (!pull_ref_regex.test(ref)) {
return ref;
}
const head = await (0, exports.getCommitOid)(checkoutPath, "HEAD");
// in actions/checkout@v2+ we can check if git rev-parse HEAD == GITHUB_SHA
const head = await (0, exports.getCommitOid)("HEAD");
// in actions/checkout@v2 we can check if git rev-parse HEAD == GITHUB_SHA
// in actions/checkout@v1 this may not be true as it checks out the repository
// using GITHUB_REF. There is a subtle race condition where
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
const hasChangedRef = sha !== head &&
(await (0, exports.getCommitOid)(checkoutPath, ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !== head;
(await (0, exports.getCommitOid)(ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !==
head;
if (hasChangedRef) {
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
core.debug(`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`);
@@ -472,15 +402,6 @@ async function getRef() {
}
}
exports.getRef = getRef;
function getActionsStatus(error, otherFailureCause) {
if (error || otherFailureCause) {
return error instanceof util_1.UserError ? "user-error" : "failure";
}
else {
return "success";
}
}
exports.getActionsStatus = getActionsStatus;
/**
* Compose a StatusReport.
*
@@ -491,7 +412,7 @@ exports.getActionsStatus = getActionsStatus;
* @param exception Exception (only supply if status is 'failure')
*/
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
const commitOid = (0, exports.getOptionalInput)("sha") || process.env["GITHUB_SHA"] || "";
const commitOid = process.env["GITHUB_SHA"] || "";
const ref = await getRef();
const workflowRunIDStr = process.env["GITHUB_RUN_ID"];
let workflowRunID = -1;
@@ -506,8 +427,6 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
workflowStartedAt = actionStartedAt.toISOString();
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
}
const runnerOs = (0, util_1.getRequiredEnvParam)("RUNNER_OS");
const codeQlCliVersion = (0, util_1.getCachedCodeQlVersion)();
// If running locally then the GITHUB_ACTION_REF cannot be trusted as it may be for the previous action
// See https://github.com/actions/runner/issues/803
const actionRef = isRunningLocalAction()
@@ -526,8 +445,6 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
started_at: workflowStartedAt,
action_started_at: actionStartedAt.toISOString(),
status,
runner_os: runnerOs,
action_version: pkg.version,
};
// Add optional parameters
if (cause) {
@@ -536,32 +453,18 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
if (exception) {
statusReport.exception = exception;
}
if (status === "success" ||
status === "failure" ||
status === "aborted" ||
status === "user-error") {
if (status === "success" || status === "failure" || status === "aborted") {
statusReport.completed_at = new Date().toISOString();
}
const matrix = getRequiredInput("matrix");
if (matrix) {
statusReport.matrix_vars = matrix;
}
if ("RUNNER_ARCH" in process.env) {
// RUNNER_ARCH is available only in GHES 3.4 and later
// Values other than X86, X64, ARM, or ARM64 are discarded server side
statusReport.runner_arch = process.env["RUNNER_ARCH"];
}
if (runnerOs === "Windows" || runnerOs === "macOS") {
statusReport.runner_os_release = os.release();
}
if (codeQlCliVersion !== undefined) {
statusReport.codeql_version = codeQlCliVersion;
}
return statusReport;
}
exports.createStatusReportBase = createStatusReportBase;
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
const GENERIC_404_MSG = "Not authorized to use the CodeQL code scanning feature on this repo.";
const GENERIC_404_MSG = "Not authorized to used the CodeQL code scanning feature on this repo.";
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
/**
@@ -574,20 +477,8 @@ const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code sc
* Returns whether sending the status report was successful of not.
*/
async function sendStatusReport(statusReport) {
const gitHubVersion = await api.getGitHubVersionActionsOnly();
if ((0, util_1.isGitHubGhesVersionBelow)(gitHubVersion, "3.2.0")) {
// GHES 3.1 and earlier versions reject unexpected properties, which means
// that they will reject status reports with newly added properties.
// Inhibiting status reporting for GHES < 3.2 avoids such failures.
return true;
}
const statusReportJSON = JSON.stringify(statusReport);
core.debug(`Sending status report: ${statusReportJSON}`);
// If in test mode we don't want to upload the results
if ((0, util_1.isInTestMode)()) {
core.debug("In test mode. Status reports are not uploaded.");
return true;
}
const nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY");
const [owner, repo] = nwo.split("/");
const client = api.getActionsApiClient();
@@ -677,7 +568,7 @@ async function isAnalyzingDefaultBranch() {
// Get the current ref and trim and refs/heads/ prefix
let currentRef = await getRef();
currentRef = currentRef.startsWith("refs/heads/")
? currentRef.slice("refs/heads/".length)
? currentRef.substr("refs/heads/".length)
: currentRef;
const event = getWorkflowEvent();
const defaultBranch = (_a = event === null || event === void 0 ? void 0 : event.repository) === null || _a === void 0 ? void 0 : _a.default_branch;

File diff suppressed because one or more lines are too long

111
lib/actions-util.test.js generated
View File

@@ -39,93 +39,37 @@ function errorCodes(actual, expected) {
await t.throwsAsync(actionsutil.getRef);
});
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const expectedRef = "refs/pull/1/merge";
const currentSha = "a".repeat(40);
process.env["GITHUB_REF"] = expectedRef;
process.env["GITHUB_SHA"] = currentSha;
const callback = sinon.stub(actionsutil, "getCommitOid");
callback.withArgs("HEAD").resolves(currentSha);
const actualRef = await actionsutil.getRef();
t.deepEqual(actualRef, expectedRef);
callback.restore();
});
const expectedRef = "refs/pull/1/merge";
const currentSha = "a".repeat(40);
process.env["GITHUB_REF"] = expectedRef;
process.env["GITHUB_SHA"] = currentSha;
const callback = sinon.stub(actionsutil, "getCommitOid");
callback.withArgs("HEAD").resolves(currentSha);
const actualRef = await actionsutil.getRef();
t.deepEqual(actualRef, expectedRef);
callback.restore();
});
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const expectedRef = "refs/pull/1/merge";
process.env["GITHUB_REF"] = expectedRef;
process.env["GITHUB_SHA"] = "b".repeat(40);
const sha = "a".repeat(40);
const callback = sinon.stub(actionsutil, "getCommitOid");
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
callback.withArgs("HEAD").resolves(sha);
const actualRef = await actionsutil.getRef();
t.deepEqual(actualRef, expectedRef);
callback.restore();
});
const expectedRef = "refs/pull/1/merge";
process.env["GITHUB_REF"] = expectedRef;
process.env["GITHUB_SHA"] = "b".repeat(40);
const sha = "a".repeat(40);
const callback = sinon.stub(actionsutil, "getCommitOid");
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
callback.withArgs("HEAD").resolves(sha);
const actualRef = await actionsutil.getRef();
t.deepEqual(actualRef, expectedRef);
callback.restore();
});
(0, ava_1.default)("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
process.env["GITHUB_REF"] = "refs/pull/1/merge";
process.env["GITHUB_SHA"] = "a".repeat(40);
const callback = sinon.stub(actionsutil, "getCommitOid");
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
const actualRef = await actionsutil.getRef();
t.deepEqual(actualRef, "refs/pull/1/head");
callback.restore();
});
});
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
// These values are be ignored
process.env["GITHUB_REF"] = "refs/pull/1/merge";
process.env["GITHUB_SHA"] = "a".repeat(40);
const callback = sinon.stub(actionsutil, "getCommitOid");
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
callback.withArgs("HEAD").resolves("b".repeat(40));
const actualRef = await actionsutil.getRef();
t.deepEqual(actualRef, "refs/pull/2/merge");
callback.restore();
getAdditionalInputStub.restore();
});
});
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
await t.throwsAsync(async () => {
await actionsutil.getRef();
}, {
instanceOf: Error,
message: "Both 'ref' and 'sha' are required if one of them is provided.",
});
getAdditionalInputStub.restore();
});
});
(0, ava_1.default)("getRef() throws an error if only `sha` is provided as an input", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
process.env["GITHUB_WORKSPACE"] = "/tmp";
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
await t.throwsAsync(async () => {
await actionsutil.getRef();
}, {
instanceOf: Error,
message: "Both 'ref' and 'sha' are required if one of them is provided.",
});
getAdditionalInputStub.restore();
});
process.env["GITHUB_REF"] = "refs/pull/1/merge";
process.env["GITHUB_SHA"] = "a".repeat(40);
const callback = sinon.stub(actionsutil, "getCommitOid");
callback.withArgs("refs/pull/1/merge").resolves("a".repeat(40));
callback.withArgs("HEAD").resolves("b".repeat(40));
const actualRef = await actionsutil.getRef();
t.deepEqual(actualRef, "refs/pull/1/head");
callback.restore();
});
(0, ava_1.default)("computeAutomationID()", async (t) => {
let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
@@ -480,7 +424,6 @@ on: ["push"]
});
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const envFile = path.join(tmpDir, "event.json");
fs.writeFileSync(envFile, JSON.stringify({
repository: {

File diff suppressed because one or more lines are too long

4
lib/analysis-paths.js generated
View File

@@ -37,11 +37,11 @@ function buildIncludeExcludeEnvVar(paths) {
return paths.join("\n");
}
function printPathFiltersWarning(config, logger) {
// Index include/exclude/filters only work in javascript/python/ruby.
// Index include/exclude/filters only work in javascript and python.
// If any other languages are detected/configured then show a warning.
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
!config.languages.every(isInterpretedLanguage)) {
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript, Python, and Ruby');
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript and Python');
}
}
exports.printPathFiltersWarning = printPathFiltersWarning;

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,oEAAoE;IACpE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,4FAA4F,CAC7F,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}

View File

@@ -43,9 +43,6 @@ const util = __importStar(require("./util"));
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
injectedMlQueries: false,
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
@@ -68,9 +65,6 @@ const util = __importStar(require("./util"));
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
injectedMlQueries: false,
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
@@ -94,9 +88,6 @@ const util = __importStar(require("./util"));
dbLocation: path.resolve(tempDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
injectedMlQueries: false,
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;SACjB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;SACjB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;SACjB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

View File

@@ -38,28 +38,21 @@ const util = __importStar(require("./util"));
// but the first test would fail.
(0, ava_1.default)("analyze action with RAM & threads from environment variables", async (t) => {
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
process.env["GITHUB_REPOSITORY"] = "fake/repository";
sinon
.stub(actionsUtil, "createStatusReportBase")
.resolves({});
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
const gitHubVersion = {
type: util.GitHubVariant.DOTCOM,
};
sinon.stub(configUtils, "getConfig").resolves({
gitHubVersion,
languages: [],
packs: [],
});
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("token").returns("fake-token");
requiredInputStub.withArgs("upload-database").returns("false");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("cleanup-level").returns("none");
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
// When there are no action inputs for RAM and threads, the action uses
// environment variables (passed down from the init action) to set RAM and
// threads usage.

View File

@@ -1 +1 @@
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAA+D;AAC/D,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

View File

@@ -38,28 +38,21 @@ const util = __importStar(require("./util"));
// but the first test would fail.
(0, ava_1.default)("analyze action with RAM & threads from action inputs", async (t) => {
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
process.env["GITHUB_REPOSITORY"] = "fake/repository";
sinon
.stub(actionsUtil, "createStatusReportBase")
.resolves({});
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
const gitHubVersion = {
type: util.GitHubVariant.DOTCOM,
};
sinon.stub(configUtils, "getConfig").resolves({
gitHubVersion,
languages: [],
packs: [],
});
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("token").returns("fake-token");
requiredInputStub.withArgs("upload-database").returns("false");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("cleanup-level").returns("none");
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
process.env["CODEQL_THREADS"] = "1";
process.env["CODEQL_RAM"] = "4992";
// Action inputs have precedence over environment variables.

View File

@@ -1 +1 @@
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAA+D;AAC/D,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

64
lib/analyze-action.js generated
View File

@@ -36,16 +36,13 @@ const util = __importStar(require("./util"));
const util_1 = require("./util");
// eslint-disable-next-line import/no-commonjs
const pkg = require("../package.json");
async function sendStatusReport(startedAt, config, stats, error) {
const status = actionsUtil.getActionsStatus(error, stats === null || stats === void 0 ? void 0 : stats.analyze_failure_language);
async function sendStatusReport(startedAt, stats, error) {
const status = (stats === null || stats === void 0 ? void 0 : stats.analyze_failure_language) !== undefined || error !== undefined
? "failure"
: "success";
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, error === null || error === void 0 ? void 0 : error.message, error === null || error === void 0 ? void 0 : error.stack);
const statusReport = {
...statusReportBase,
...(config
? {
ml_powered_javascript_queries: util.getMlPoweredJsQueriesStatus(config),
}
: {}),
...(stats || {}),
};
await actionsUtil.sendStatusReport(statusReport);
@@ -57,7 +54,6 @@ async function run() {
let runStats = undefined;
let config = undefined;
util.initializeEnvironment(util.Mode.actions, pkg.version);
await util.checkActionVersion(pkg.version);
try {
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
return;
@@ -75,13 +71,12 @@ async function run() {
const outputDir = actionsUtil.getRequiredInput("output");
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]);
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
if (config.debugMode) {
// Upload the SARIF files as an Actions artifact for debugging
await uploadDebugArtifacts(config.languages.map((lang) => path.resolve(outputDir, `${lang}.sarif`)), outputDir, config.debugArtifactName);
await uploadDebugArtifacts(config.languages.map((lang) => path.resolve(outputDir, `${lang}.sarif`)), outputDir);
}
}
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
@@ -95,10 +90,10 @@ async function run() {
// Multilanguage tracing: there are additional logs in the root of the cluster
toUpload.push(...listFolder(path.resolve(config.dbLocation, "log")));
}
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
await uploadDebugArtifacts(toUpload, config.dbLocation);
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
await uploadDebugArtifacts([path.resolve(config.tempDir, "compound-build-tracer.log")], config.tempDir, config.debugArtifactName);
await uploadDebugArtifacts([path.resolve(config.tempDir, "compound-build-tracer.log")], config.tempDir);
}
}
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
@@ -111,21 +106,23 @@ async function run() {
core.setOutput("db-locations", dbLocations);
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
uploadResult = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
core.setOutput("sarif-id", uploadResult.sarifID);
}
else {
logger.info("Not uploading results");
}
// Possibly upload the database bundles for remote queries
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger);
// We don't upload results in test mode, so don't wait for processing
if (util.isInTestMode()) {
core.debug("In test mode. Waiting for processing is disabled.");
}
else if (uploadResult !== undefined &&
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger); // Possibly upload the database bundles for remote queries
if (uploadResult !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
}
if (config.debugMode) {
// Upload the database bundles as an Actions artifact for debugging
const toUpload = [];
for (const language of config.languages)
toUpload.push(await (0, util_1.bundleDb)(config, language, codeql));
await uploadDebugArtifacts(toUpload, config.dbLocation);
}
}
catch (origError) {
const error = origError instanceof Error ? origError : new Error(String(origError));
@@ -133,27 +130,14 @@ async function run() {
console.log(error);
if (error instanceof analyze_1.CodeQLAnalysisError) {
const stats = { ...error.queriesStatusReport };
await sendStatusReport(startedAt, config, stats, error);
await sendStatusReport(startedAt, stats, error);
}
else {
await sendStatusReport(startedAt, config, undefined, error);
await sendStatusReport(startedAt, undefined, error);
}
return;
}
finally {
if (config !== undefined && config.debugMode) {
try {
// Upload the database bundles as an Actions artifact for debugging
const toUpload = [];
for (const language of config.languages) {
toUpload.push(await (0, util_1.bundleDb)(config, language, await (0, codeql_1.getCodeQL)(config.codeQLCmd), `${config.debugDatabaseName}-${language}`));
}
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
}
catch (error) {
console.log(`Failed to upload database debug bundles: ${error}`);
}
}
if (core.isDebug() && config !== undefined) {
core.info("Debug mode is on. Printing CodeQL debug logs...");
for (const language of config.languages) {
@@ -177,26 +161,26 @@ async function run() {
}
}
if (runStats && uploadResult) {
await sendStatusReport(startedAt, config, {
await sendStatusReport(startedAt, {
...runStats,
...uploadResult.statusReport,
});
}
else if (runStats) {
await sendStatusReport(startedAt, config, { ...runStats });
await sendStatusReport(startedAt, { ...runStats });
}
else {
await sendStatusReport(startedAt, config, undefined);
await sendStatusReport(startedAt, undefined);
}
}
async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
async function uploadDebugArtifacts(toUpload, rootDir) {
let suffix = "";
const matrix = actionsUtil.getRequiredInput("matrix");
if (matrix !== undefined && matrix !== "null") {
for (const entry of Object.entries(JSON.parse(matrix)).sort())
suffix += `-${entry[1]}`;
}
await artifact.create().uploadArtifact(actionsUtil.sanitizeArifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
await artifact.create().uploadArtifact(actionsUtil.sanitizeArifactName(`${util_1.DEBUG_ARTIFACT_NAME}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
}
function listFolder(dir) {
const entries = fs.readdirSync(dir, { withFileTypes: true });

File diff suppressed because one or more lines are too long

57
lib/analyze.js generated
View File

@@ -18,22 +18,17 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.runCleanup = exports.runFinalize = exports.runQueries = exports.CodeQLAnalysisError = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const del_1 = __importDefault(require("del"));
const yaml = __importStar(require("js-yaml"));
const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql");
const count_loc_1 = require("./count-loc");
const languages_1 = require("./languages");
const sharedEnv = __importStar(require("./shared-environment"));
const tracer_config_1 = require("./tracer-config");
const util = __importStar(require("./util"));
class CodeQLAnalysisError extends Error {
constructor(queriesStatusReport, message) {
@@ -135,11 +130,14 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
if (!hasBuiltinQueries && !hasCustomQueries && !hasPackWithCustomQueries) {
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
}
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
try {
if (hasPackWithCustomQueries) {
logger.info("Performing analysis with custom CodeQL Packs.");
logger.info("*************");
logger.info("Performing analysis with custom QL Packs. QL Packs are an experimental feature.");
logger.info("And should not be used in production yet.");
logger.info("*************");
logger.startGroup(`Downloading custom packs for ${language}`);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
const results = await codeql.packDownload(packsWithVersion);
logger.info(`Downloaded packs: ${results.packs
.map((r) => `${r.name}@${r.version || "latest"}`)
@@ -163,7 +161,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
}
}
if (packsWithVersion.length > 0) {
querySuitePaths.push(...(await runQueryPacks(language, "packs", packsWithVersion, undefined)));
querySuitePaths.push(await runQueryGroup(language, "packs", createPackSuiteContents(packsWithVersion), undefined));
ranCustom = true;
}
if (ranCustom) {
@@ -221,42 +219,25 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
return querySuitePath;
}
async function runQueryPacks(language, type, packs, searchPath) {
const databasePath = util.getCodeQLDatabasePath(config, language);
// Run the queries individually instead of all at once to avoid command
// line length restrictions, particularly on windows.
for (const pack of packs) {
logger.debug(`Running query pack for ${language}-${type}: ${pack}`);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
await codeql.databaseRunQueries(databasePath, searchPath, pack, memoryFlag, threadsFlag);
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
}
return packs;
}
}
exports.runQueries = runQueries;
function createQuerySuiteContents(queries) {
return queries.map((q) => `- query: ${q}`).join("\n");
}
function createPackSuiteContents(packsWithVersion) {
return packsWithVersion.map(packWithVersionToQuerySuiteEntry).join("\n");
}
function packWithVersionToQuerySuiteEntry(pack) {
let text = `- qlpack: ${pack.packName}`;
if (pack.version) {
text += `\n version: ${pack.version}`;
}
return text;
}
async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger) {
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
if (await util.codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
// Delete variables as specified by the end-tracing script
await (0, tracer_config_1.endTracingForCluster)(config);
}
else {
// Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
}
try {
await (0, del_1.default)(outputDir, { force: true });
}
catch (error) {
if ((error === null || error === void 0 ? void 0 : error.code) !== "ENOENT") {
throw error;
}
}
await fs.promises.mkdir(outputDir, { recursive: true });
// Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
fs.mkdirSync(outputDir, { recursive: true });
await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger);
}
exports.runFinalize = runFinalize;

File diff suppressed because one or more lines are too long

40
lib/analyze.test.js generated
View File

@@ -26,6 +26,7 @@ const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const ava_1 = __importDefault(require("ava"));
const yaml = __importStar(require("js-yaml"));
const semver_1 = require("semver");
const sinon = __importStar(require("sinon"));
const analyze_1 = require("./analyze");
const codeql_1 = require("./codeql");
@@ -52,8 +53,18 @@ const util = __importStar(require("./util"));
const addSnippetsFlag = "";
const threadsFlag = "";
const packs = {
[languages_1.Language.cpp]: ["a/b@1.0.0"],
[languages_1.Language.java]: ["c/d@2.0.0"],
[languages_1.Language.cpp]: [
{
packName: "a/b",
version: (0, semver_1.clean)("1.0.0"),
},
],
[languages_1.Language.java]: [
{
packName: "c/d",
version: (0, semver_1.clean)("2.0.0"),
},
],
};
for (const language of Object.values(languages_1.Language)) {
(0, codeql_1.setCodeQL)({
@@ -115,9 +126,6 @@ const util = __importStar(require("./util"));
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs,
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
injectedMlQueries: false,
};
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
recursive: true,
@@ -198,10 +206,32 @@ const util = __importStar(require("./util"));
query: "bar.ql",
},
];
const qlsPackContentCpp = [
{
qlpack: "a/b",
version: "1.0.0",
},
];
const qlsPackContentJava = [
{
qlpack: "c/d",
version: "2.0.0",
},
];
for (const lang of Object.values(languages_1.Language)) {
t.deepEqual(readContents(`${lang}-queries-builtin.qls`), qlsContent);
t.deepEqual(readContents(`${lang}-queries-custom-0.qls`), qlsContent);
t.deepEqual(readContents(`${lang}-queries-custom-1.qls`), qlsContent2);
const packSuiteName = `${lang}-queries-packs.qls`;
if (lang === languages_1.Language.cpp) {
t.deepEqual(readContents(packSuiteName), qlsPackContentCpp);
}
else if (lang === languages_1.Language.java) {
t.deepEqual(readContents(packSuiteName), qlsPackContentJava);
}
else {
t.false(fs.existsSync(path.join(tmpDir, "codeql_databases", packSuiteName)));
}
}
function readContents(name) {
const x = fs.readFileSync(path.join(tmpDir, "codeql_databases", name), "utf8");

File diff suppressed because one or more lines are too long

34
lib/api-client.js generated
View File

@@ -22,13 +22,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getGitHubVersionActionsOnly = exports.getActionsApiClient = exports.getApiClient = exports.DisallowedAPIVersionReason = void 0;
exports.getActionsApiClient = exports.getApiClient = exports.DisallowedAPIVersionReason = void 0;
const path = __importStar(require("path"));
const githubUtils = __importStar(require("@actions/github/lib/utils"));
const retry = __importStar(require("@octokit/plugin-retry"));
const console_log_level_1 = __importDefault(require("console-log-level"));
const actions_util_1 = require("./actions-util");
const util = __importStar(require("./util"));
const util_1 = require("./util");
// eslint-disable-next-line import/no-commonjs
const pkg = require("../package.json");
@@ -58,36 +57,15 @@ function getApiUrl(githubUrl) {
url.pathname = path.join(url.pathname, "api", "v3");
return url.toString();
}
function getApiDetails() {
return {
auth: (0, actions_util_1.getRequiredInput)("token"),
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
};
}
// Temporary function to aid in the transition to running on and off of github actions.
// Once all code has been converted this function should be removed or made canonical
// and called only from the action entrypoints.
function getActionsApiClient() {
return (0, exports.getApiClient)(getApiDetails());
const apiDetails = {
auth: (0, actions_util_1.getRequiredInput)("token"),
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
};
return (0, exports.getApiClient)(apiDetails);
}
exports.getActionsApiClient = getActionsApiClient;
let cachedGitHubVersion = undefined;
/**
* Report the GitHub server version. This is a wrapper around
* util.getGitHubVersion() that automatically supplies GitHub API details using
* GitHub Action inputs. If you need to get the GitHub server version from the
* Runner, please call util.getGitHubVersion() instead.
*
* @returns GitHub version
*/
async function getGitHubVersionActionsOnly() {
if (!util.isActions()) {
throw new Error("getGitHubVersionActionsOnly() works only in an action");
}
if (cachedGitHubVersion === undefined) {
cachedGitHubVersion = await util.getGitHubVersion(getApiDetails());
}
return cachedGitHubVersion;
}
exports.getGitHubVersionActionsOnly = getGitHubVersionActionsOnly;
//# sourceMappingURL=api-client.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,6CAA+B;AAC/B,iCAAqE;AAErE,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeM,MAAM,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,UAAU,IAAA,cAAO,GAAE,IAAI,GAAG,CAAC,OAAO,EAAE;QAC/C,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAdW,QAAA,YAAY,gBAcvB;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,SAAS,aAAa;IACpB,OAAO;QACL,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;KAC9C,CAAC;AACJ,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB;IACjC,OAAO,IAAA,oBAAY,EAAC,aAAa,EAAE,CAAC,CAAC;AACvC,CAAC;AAFD,kDAEC;AAED,IAAI,mBAAmB,GAA8B,SAAS,CAAC;AAE/D;;;;;;;GAOG;AACI,KAAK,UAAU,2BAA2B;IAC/C,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE;QACrB,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAC;KAC1E;IACD,IAAI,mBAAmB,KAAK,SAAS,EAAE;QACrC,mBAAmB,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,EAAE,CAAC,CAAC;KACpE;IACD,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AARD,kEAQC"}
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,iCAAsD;AAEtD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeM,MAAM,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,UAAU,IAAA,cAAO,GAAE,IAAI,GAAG,CAAC,OAAO,EAAE;QAC/C,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAdW,QAAA,YAAY,gBAcvB;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB;IACjC,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,IAAA,oBAAY,EAAC,UAAU,CAAC,CAAC;AAClC,CAAC;AAPD,kDAOC"}

View File

@@ -1 +1 @@
{ "maximumVersion": "3.5", "minimumVersion": "3.1" }
{ "maximumVersion": "3.3", "minimumVersion": "3.0" }

View File

@@ -29,7 +29,9 @@ const util_1 = require("./util");
const pkg = require("../package.json");
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
const status = (0, actions_util_1.getActionsStatus)(cause, failingLanguage);
const status = failingLanguage !== undefined || cause !== undefined
? "failure"
: "success";
const statusReportBase = await (0, actions_util_1.createStatusReportBase)("autobuild", status, startedAt, cause === null || cause === void 0 ? void 0 : cause.message, cause === null || cause === void 0 ? void 0 : cause.stack);
const statusReport = {
...statusReportBase,
@@ -39,9 +41,8 @@ async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguag
await (0, actions_util_1.sendStatusReport)(statusReport);
}
async function run() {
const startedAt = new Date();
const logger = (0, logging_1.getActionsLogger)();
await (0, util_1.checkActionVersion)(pkg.version);
const startedAt = new Date();
let language = undefined;
try {
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("autobuild", "starting", startedAt)))) {
@@ -53,11 +54,6 @@ async function run() {
}
language = (0, autobuild_1.determineAutobuildLanguage)(config, logger);
if (language !== undefined) {
const workingDirectory = (0, actions_util_1.getOptionalInput)("working-directory");
if (workingDirectory) {
logger.info(`Changing autobuilder working directory to ${workingDirectory}`);
process.chdir(workingDirectory);
}
await (0, autobuild_1.runAutobuild)(language, config, logger);
}
}

View File

@@ -1 +1 @@
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAOwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAyE;AAEzE,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,IAAA,sCAA0B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAKwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,IAAA,sCAA0B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

102
lib/codeql.js generated
View File

@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
@@ -33,12 +33,10 @@ const actions_util_1 = require("./actions-util");
const api = __importStar(require("./api-client"));
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
const error_matcher_1 = require("./error-matcher");
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const toolcache = __importStar(require("./toolcache"));
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
const util = __importStar(require("./util"));
const util_1 = require("./util");
class CommandInvocationError extends Error {
constructor(cmd, args, exitCode, error) {
super(`Failure invoking ${cmd} with arguments ${args}.\n
@@ -60,10 +58,9 @@ const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
* below can be used to conditionally enable certain features on versions newer
* than this. Please record the reason we cannot support an older version.
*
* Reason: First version containing fix for the "We still have not reached
* idleness" deadlock.
* Reason: Changes to how the tracing environment is set up.
*/
const CODEQL_MINIMUM_VERSION = "2.4.5";
const CODEQL_MINIMUM_VERSION = "2.3.1";
/**
* Versions of CodeQL that version-flag certain functionality in the Action.
* For convenience, please keep these in descending order. Once a version
@@ -76,8 +73,6 @@ const CODEQL_VERSION_GROUP_RULES = "2.5.5";
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
const CODEQL_VERSION_LUA_TRACER_CONFIG = "2.9.3";
/**
* This variable controls using the new style of tracing from the CodeQL
* CLI. In particular, with versions above this we will use both indirect
@@ -88,12 +83,6 @@ const CODEQL_VERSION_LUA_TRACER_CONFIG = "2.9.3";
* versions above that.
*/
exports.CODEQL_VERSION_NEW_TRACING = "2.7.0";
/**
* Versions 2.9.0+ of the CodeQL CLI run machine learning models from a temporary directory, which
* resolves an issue on Windows where TensorFlow models are not correctly loaded due to the path of
* some of their files being greater than MAX_PATH (260 characters).
*/
exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = "2.9.0";
function getCodeQLBundleName() {
let platform;
if (process.platform === "win32") {
@@ -202,19 +191,6 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
}
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
}
/**
* Set up CodeQL CLI access.
*
* @param codeqlURL
* @param apiDetails
* @param tempDir
* @param toolCacheDir
* @param variant
* @param logger
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
* version requirement. Must be set to true outside tests.
* @returns
*/
async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger, checkVersion) {
try {
// We use the special value of 'latest' to prioritize the version in the
@@ -239,7 +215,7 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant
// specified explicitly (in which case we always honor it).
if (!codeqlFolder && !codeqlURL && !forceLatest) {
const codeqlVersions = toolcache.findAllVersions("CodeQL", toolCacheDir, logger);
if (codeqlVersions.length === 1 && (0, util_1.isGoodVersion)(codeqlVersions[0])) {
if (codeqlVersions.length === 1) {
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0], toolCacheDir, logger);
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
@@ -256,9 +232,7 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant
}
const parsedCodeQLURL = new URL(codeqlURL);
const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search);
const headers = {
accept: "application/octet-stream",
};
const headers = { accept: "application/octet-stream" };
// We only want to provide an authorization header if we are downloading
// from the same GitHub instance the Action is running on.
// This avoids leaking Enterprise tokens to dotcom.
@@ -388,26 +362,16 @@ async function getCodeQLForTesting() {
return getCodeQLForCmd("codeql-for-testing", false);
}
exports.getCodeQLForTesting = getCodeQLForTesting;
/**
* Return a CodeQL object for CodeQL CLI access.
*
* @param cmd Path to CodeQL CLI
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
* version requirement. Must be set to true outside tests.
* @returns A new CodeQL object
*/
async function getCodeQLForCmd(cmd, checkVersion) {
let cachedVersion = undefined;
const codeql = {
getPath() {
return cmd;
},
async getVersion() {
let result = util.getCachedCodeQlVersion();
if (result === undefined) {
result = (await runTool(cmd, ["version", "--format=terse"])).trim();
util.cacheCodeQlVersion(result);
}
return result;
if (cachedVersion === undefined)
cachedVersion = runTool(cmd, ["version", "--format=terse"]);
return await cachedVersion;
},
async printVersion() {
await runTool(cmd, ["version", "--format=json"]);
@@ -462,33 +426,22 @@ async function getCodeQLForCmd(cmd, checkVersion) {
...getExtraOptionsFromEnv(["database", "init"]),
]);
},
async databaseInitCluster(config, sourceRoot, processName, processLevel, featureFlags) {
const extraArgs = config.languages.map((language) => `--language=${language}`);
if (config.languages.filter(languages_1.isTracedLanguage).length > 0) {
async databaseInitCluster(databasePath, languages, sourceRoot, processName, processLevel) {
const extraArgs = languages.map((language) => `--language=${language}`);
if (languages.filter(languages_1.isTracedLanguage).length > 0) {
extraArgs.push("--begin-tracing");
if (processName !== undefined) {
extraArgs.push(`--trace-process-name=${processName}`);
}
else {
// We default to 3 if no other arguments are provided since this was the default
// behaviour of the Runner. Note this path never happens in the CodeQL Action
// because that always passes in a process name.
extraArgs.push(`--trace-process-level=${processLevel || 3}`);
}
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_LUA_TRACER_CONFIG)) {
if (await featureFlags.getValue(feature_flags_1.FeatureFlag.LuaTracerConfigEnabled)) {
extraArgs.push("--internal-use-lua-tracing");
}
else {
extraArgs.push("--no-internal-use-lua-tracing");
}
}
}
await runTool(cmd, [
"database",
"init",
"--db-cluster",
config.dbLocation,
databasePath,
`--source-root=${sourceRoot}`,
...extraArgs,
...getExtraOptionsFromEnv(["database", "init"]),
@@ -557,12 +510,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, args, error_matcher_1.errorMatchers);
},
async resolveLanguages() {
const codeqlArgs = [
"resolve",
"languages",
"--format=json",
...getExtraOptionsFromEnv(["resolve", "languages"]),
];
const codeqlArgs = ["resolve", "languages", "--format=json"];
const output = await runTool(cmd, codeqlArgs);
try {
return JSON.parse(output);
@@ -630,8 +578,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
(await util.codeQlVersionAbove(this, CODEQL_VERSION_SARIF_GROUP))) {
codeqlArgs.push("--sarif-category", automationDetailsId);
}
codeqlArgs.push(databasePath);
codeqlArgs.push(...querySuitePaths);
codeqlArgs.push(databasePath, ...querySuitePaths);
// capture stdout, which contains analysis summaries
return await runTool(cmd, codeqlArgs);
},
@@ -659,9 +606,8 @@ async function getCodeQLForCmd(cmd, checkVersion) {
"pack",
"download",
"--format=json",
"--resolve-query-specs",
...getExtraOptionsFromEnv(["pack", "download"]),
...packs,
...packs.map(packWithVersionToString),
];
const output = await runTool(cmd, codeqlArgs);
try {
@@ -687,36 +633,28 @@ async function getCodeQLForCmd(cmd, checkVersion) {
"cleanup",
databasePath,
`--mode=${cleanupLevel}`,
...getExtraOptionsFromEnv(["database", "cleanup"]),
];
await runTool(cmd, codeqlArgs);
},
async databaseBundle(databasePath, outputFilePath, databaseName) {
async databaseBundle(databasePath, outputFilePath) {
const args = [
"database",
"bundle",
databasePath,
`--output=${outputFilePath}`,
`--name=${databaseName}`,
...getExtraOptionsFromEnv(["database", "bundle"]),
];
await new toolrunner.ToolRunner(cmd, args).exec();
},
};
// To ensure that status reports include the CodeQL CLI version whereever
// possbile, we want to call getVersion(), which populates the version value
// used by status reporting, at the earliest opportunity. But invoking
// getVersion() directly here breaks tests that only pretend to create a
// CodeQL object. So instead we rely on the assumption that all non-test
// callers would set checkVersion to true, and util.codeQlVersionAbove()
// would call getVersion(), so the CLI version would be cached as soon as the
// CodeQL object is created.
if (checkVersion &&
!(await util.codeQlVersionAbove(codeql, CODEQL_MINIMUM_VERSION))) {
throw new Error(`Expected a CodeQL CLI with version at least ${CODEQL_MINIMUM_VERSION} but got version ${await codeql.getVersion()}`);
}
return codeql;
}
function packWithVersionToString(pack) {
return pack.version ? `${pack.packName}@${pack.version}` : pack.packName;
}
/**
* Gets the options for `path` of `options` as an array of extra option strings.
*/

File diff suppressed because one or more lines are too long

51
lib/codeql.test.js generated
View File

@@ -30,8 +30,6 @@ const nock_1 = __importDefault(require("nock"));
const sinon = __importStar(require("sinon"));
const codeql = __importStar(require("./codeql"));
const defaults = __importStar(require("./defaults.json"));
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
@@ -235,55 +233,6 @@ ava_1.default.beforeEach(() => {
await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
});
const stubConfig = {
languages: [languages_1.Language.cpp],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: "",
toolCacheDir: "",
codeQLCmd: "",
gitHubVersion: {
type: util.GitHubVariant.DOTCOM,
},
dbLocation: "",
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
injectedMlQueries: false,
};
(0, ava_1.default)("databaseInitCluster() Lua feature flag enabled, but old CLI", async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves("2.9.0");
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.LuaTracerConfigEnabled]));
t.false(runnerConstructorStub.firstCall.args[1].includes("--internal-use-lua-tracing"), "--internal-use-lua-tracing should be absent, but it is present");
t.false(runnerConstructorStub.firstCall.args[1].includes("--no-internal-use-lua-tracing"), "--no-internal-use-lua-tracing should be absent, but it is present");
});
(0, ava_1.default)("databaseInitCluster() Lua feature flag disabled, with old CLI", async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves("2.9.0");
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([]));
t.false(runnerConstructorStub.firstCall.args[1].includes("--internal-use-lua-tracing"), "--internal-use-lua-tracing should be absent, but it is present");
t.false(runnerConstructorStub.firstCall.args[1].includes("--no-internal-use-lua-tracing"), "--no-internal-use-lua-tracing should be absent, but it is present");
});
(0, ava_1.default)("databaseInitCluster() Lua feature flag enabled, compatible CLI", async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves("2.10.0");
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.LuaTracerConfigEnabled]));
t.true(runnerConstructorStub.firstCall.args[1].includes("--internal-use-lua-tracing"), "--internal-use-lua-tracing should be present, but it is absent");
});
(0, ava_1.default)("databaseInitCluster() Lua feature flag disabled, compatible CLI", async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves("2.10.0");
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([]));
t.true(runnerConstructorStub.firstCall.args[1].includes("--no-internal-use-lua-tracing"), "--no-internal-use-lua-tracing should be present, but it is absent");
});
function stubToolRunnerConstructor() {
const runnerObjectStub = sinon.createStubInstance(toolrunner.ToolRunner);
runnerObjectStub.exec.resolves(0);

File diff suppressed because one or more lines are too long

154
lib/config-utils.js generated
View File

@@ -19,17 +19,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePacksSpecification = exports.parsePacksFromConfig = exports.getDefaultConfig = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPacksRequireLanguage = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = void 0;
exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.parsePacksFromConfig = exports.getDefaultConfig = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPacksRequireLanguage = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const yaml = __importStar(require("js-yaml"));
const semver = __importStar(require("semver"));
const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
const externalQueries = __importStar(require("./external-queries"));
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const util_1 = require("./util");
// Property names from the user-supplied config file.
const NAME_PROPERTY = "name";
const DISABLE_DEFAULT_QUERIES_PROPERTY = "disable-default-queries";
@@ -118,41 +115,14 @@ const builtinSuites = ["security-extended", "security-and-quality"];
/**
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
* Throws an error if suiteName is not a valid builtin suite.
* May inject ML queries, and the return value will declare if this was done.
*/
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureFlags, configFile) {
var _a;
let injectedMlQueries = false;
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, suiteName, configFile) {
const found = builtinSuites.find((suite) => suite === suiteName);
if (!found) {
throw new Error(getQueryUsesInvalid(configFile, suiteName));
}
// If we're running the JavaScript security-extended analysis (or a superset of it), the repo is
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
// pack, then add the ML-powered query pack so that we run ML-powered queries.
if (
// Only run ML-powered queries on Windows if we have a CLI that supports it.
(process.platform !== "win32" ||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS))) &&
languages.includes("javascript") &&
(found === "security-extended" || found === "security-and-quality") &&
!((_a = packs.javascript) === null || _a === void 0 ? void 0 : _a.some(isMlPoweredJsQueriesPack)) &&
(await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) &&
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) {
if (!packs.javascript) {
packs.javascript = [];
}
packs.javascript.push(await (0, util_1.getMlPoweredJsQueriesPack)(codeQL));
injectedMlQueries = true;
}
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined);
return injectedMlQueries;
}
function isMlPoweredJsQueriesPack(pack) {
return (pack === util_1.ML_POWERED_JS_QUERIES_PACK_NAME ||
pack.startsWith(`${util_1.ML_POWERED_JS_QUERIES_PACK_NAME}@`) ||
pack.startsWith(`${util_1.ML_POWERED_JS_QUERIES_PACK_NAME}:`));
}
/**
* Retrieve the set of queries at localQueryPath and add them to resultMap.
@@ -209,13 +179,8 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
* parsing the 'uses' actions in the workflow file. So it can handle
* local paths starting with './', or references to remote repos, or
* a finite set of hardcoded terms for builtin suites.
*
* This may inject ML queries into the packs to use, and the return value will
* declare if this was done.
*
* @returns whether or not we injected ML queries into the packs
*/
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureFlags, logger, configFile) {
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, workspacePath, apiDetails, logger, configFile) {
queryUses = queryUses.trim();
if (queryUses === "") {
throw new Error(getQueryUsesInvalid(configFile));
@@ -223,15 +188,15 @@ async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, te
// Check for the local path case before we start trying to parse the repository name
if (queryUses.startsWith("./")) {
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), workspacePath, configFile);
return false;
return;
}
// Check for one of the builtin suites
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
return await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureFlags, configFile);
await addBuiltinSuiteQueries(languages, codeQL, resultMap, queryUses, configFile);
return;
}
// Otherwise, must be a reference to another repo
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
return false;
}
// Regex validating stars in paths or paths-ignore entries.
// The intention is to only allow ** to appear when immediately
@@ -439,16 +404,13 @@ async function getLanguages(codeQL, languagesInput, repository, apiDetails, logg
}
return parsedLanguages;
}
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureFlags, logger) {
let injectedMlQueries = false;
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, workspacePath, apiDetails, logger) {
queriesInput = queriesInput.trim();
// "+" means "don't override config file" - see shouldAddConfigFileQueries
queriesInput = queriesInput.replace(/^\+/, "");
for (const query of queriesInput.split(",")) {
const didInject = await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureFlags, logger);
injectedMlQueries = injectedMlQueries || didInject;
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, workspacePath, apiDetails, logger);
}
return injectedMlQueries;
}
// Returns true if either no queries were provided in the workflow.
// or if the queries in the workflow were provided in "additive" mode,
@@ -456,14 +418,14 @@ async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, r
// should instead be added in addition
function shouldAddConfigFileQueries(queriesInput) {
if (queriesInput) {
return queriesInput.trimStart().slice(0, 1) === "+";
return queriesInput.trimStart().substr(0, 1) === "+";
}
return true;
}
/**
* Get the default config for when the user has not supplied one.
*/
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
var _a;
const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger);
const queries = {};
@@ -474,11 +436,10 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
};
}
await addDefaultQueries(codeQL, languages, queries);
const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
let injectedMlQueries = false;
if (queriesInput) {
injectedMlQueries = await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger);
}
const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
return {
languages,
queries,
@@ -492,16 +453,13 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
gitHubVersion,
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
debugMode,
debugArtifactName,
debugDatabaseName,
injectedMlQueries,
};
}
exports.getDefaultConfig = getDefaultConfig;
/**
* Load the config from the given file.
*/
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
var _a;
let parsedYAML;
if (isLocal(configFile)) {
@@ -542,14 +500,12 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
if (!disableDefaultQueries) {
await addDefaultQueries(codeQL, languages, queries);
}
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
// If queries were provided using `with` in the action configuration,
// they should take precedence over the queries in the config file
// unless they're prefixed with "+", in which case they supplement those
// in the config file.
let injectedMlQueries = false;
if (queriesInput) {
injectedMlQueries = await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger);
}
if (shouldAddConfigFileQueries(queriesInput) &&
QUERIES_PROPERTY in parsedYAML) {
@@ -562,7 +518,7 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueryUsesInvalid(configFile));
}
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, featureFlags, logger, configFile);
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, logger, configFile);
}
}
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
@@ -587,6 +543,7 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
}
}
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
return {
languages,
queries,
@@ -600,9 +557,6 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
gitHubVersion,
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
debugMode,
debugArtifactName,
debugDatabaseName,
injectedMlQueries,
};
}
/**
@@ -640,7 +594,7 @@ function parsePacksFromConfig(packsByLanguage, languages, configFile) {
}
packs[lang] = [];
for (const packStr of packsArr) {
packs[lang].push(validatePacksSpecification(packStr, configFile));
packs[lang].push(toPackWithVersion(packStr, configFile));
}
}
return packs;
@@ -665,74 +619,32 @@ function parsePacksFromInput(packsInput, languages) {
}
return {
[languages[0]]: packsInput.split(",").reduce((packs, pack) => {
packs.push(validatePacksSpecification(pack, ""));
packs.push(toPackWithVersion(pack, ""));
return packs;
}, []),
};
}
/**
* Validates that this package specification is syntactically correct.
* It may not point to any real package, but after this function returns
* without throwing, we are guaranteed that the package specification
* is roughly correct.
*
* The CLI itself will do a more thorough validation of the package
* specification.
*
* A package specification looks like this:
*
* `scope/name@version:path`
*
* Version and path are optional.
*
* @param packStr the package specification to verify.
* @param configFile Config file to use for error reporting
*/
function validatePacksSpecification(packStr, configFile) {
function toPackWithVersion(packStr, configFile) {
if (typeof packStr !== "string") {
throw new Error(getPacksStrInvalid(packStr, configFile));
}
packStr = packStr.trim();
const atIndex = packStr.indexOf("@");
const colonIndex = packStr.indexOf(":", atIndex);
const packStart = 0;
const versionStart = atIndex + 1 || undefined;
const pathStart = colonIndex + 1 || undefined;
const packEnd = Math.min(atIndex > 0 ? atIndex : Infinity, colonIndex > 0 ? colonIndex : Infinity, packStr.length);
const versionEnd = versionStart
? Math.min(colonIndex > 0 ? colonIndex : Infinity, packStr.length)
: undefined;
const pathEnd = pathStart ? packStr.length : undefined;
const packName = packStr.slice(packStart, packEnd).trim();
const version = versionStart
? packStr.slice(versionStart, versionEnd).trim()
: undefined;
const packPath = pathStart
? packStr.slice(pathStart, pathEnd).trim()
: undefined;
if (!PACK_IDENTIFIER_PATTERN.test(packName)) {
const nameWithVersion = packStr.trim().split("@");
let version;
if (nameWithVersion.length > 2 ||
!PACK_IDENTIFIER_PATTERN.test(nameWithVersion[0])) {
throw new Error(getPacksStrInvalid(packStr, configFile));
}
if (version) {
try {
new semver.Range(version);
}
catch (e) {
// The range string is invalid. OK to ignore the caught error
else if (nameWithVersion.length === 2) {
version = semver.clean(nameWithVersion[1]) || undefined;
if (!version) {
throw new Error(getPacksStrInvalid(packStr, configFile));
}
}
if (packPath &&
(path.isAbsolute(packPath) || path.normalize(packPath) !== packPath)) {
throw new Error(getPacksStrInvalid(packStr, configFile));
}
if (!packPath && pathStart) {
// 0 length path
throw new Error(getPacksStrInvalid(packStr, configFile));
}
return (packName + (version ? `@${version}` : "") + (packPath ? `:${packPath}` : ""));
return {
packName: nameWithVersion[0].trim(),
version,
};
}
exports.validatePacksSpecification = validatePacksSpecification;
// exported for testing
function parsePacks(rawPacksFromConfig, rawPacksInput, languages, configFile) {
const packsFromInput = parsePacksFromInput(rawPacksInput, languages);
@@ -770,16 +682,16 @@ function dbLocationOrDefault(dbLocation, tempDir) {
* This will parse the config from the user input if present, or generate
* a default config. The parsed config is then stored to a known location.
*/
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
var _a, _b, _c;
let config;
// If no config file was provided create an empty one
if (!configFile) {
logger.debug("No configuration file was provided");
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
}
else {
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
}
// The list of queries should not be empty for any language. If it is then
// it is a user configuration error.

File diff suppressed because one or more lines are too long

250
lib/config-utils.test.js generated
View File

@@ -26,11 +26,11 @@ const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const github = __importStar(require("@actions/github"));
const ava_1 = __importDefault(require("ava"));
const semver_1 = require("semver");
const sinon = __importStar(require("sinon"));
const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
@@ -88,8 +88,8 @@ function mockListLanguages(languages) {
};
},
});
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger));
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger));
});
});
(0, ava_1.default)("loading config saves config", async (t) => {
@@ -111,21 +111,18 @@ function mockListLanguages(languages) {
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// Sanity check that getConfig returns undefined before we have called initConfig
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
// The saved config file should now exist
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// And that same newly-initialised config should now be returned by getConfig
const config2 = await configUtils.getConfig(tmpDir, logger);
t.not(config2, undefined);
if (config2 !== undefined) {
t.deepEqual(config1, config2);
}
t.deepEqual(config1, config2);
});
});
(0, ava_1.default)("load input outside of workspace", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
try {
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -138,7 +135,7 @@ function mockListLanguages(languages) {
// no filename given, just a repo
const configFile = "octo-org/codeql-config@main";
try {
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -152,7 +149,7 @@ function mockListLanguages(languages) {
const configFile = "input";
t.false(fs.existsSync(path.join(tmpDir, configFile)));
try {
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -218,13 +215,10 @@ function mockListLanguages(languages) {
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: "my-artifact",
debugDatabaseName: "my-db",
injectedMlQueries: false,
};
const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir);
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "my-artifact", "my-db", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
@@ -260,7 +254,7 @@ function mockListLanguages(languages) {
fs.mkdirSync(path.join(tmpDir, "foo"));
const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir);
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
// Check resolve queries was called correctly
t.deepEqual(resolveQueriesArgs.length, 1);
t.deepEqual(resolveQueriesArgs[0].queries, [
@@ -303,7 +297,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries
// and once for `./foo` from the config file.
@@ -336,7 +330,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries and once for `./override`,
// but won't be called for './foo' from the config file.
@@ -368,7 +362,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly
// It'll be called once for `./workflow-query`,
// but won't be called for the default one since that was disabled
@@ -394,7 +388,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly:
// It'll be called once for the default queries,
// and then once for each of the two queries from the workflow
@@ -433,7 +427,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries,
// once for each of additional1 and additional2,
@@ -472,7 +466,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
try {
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
t.fail("initConfig did not throw error");
}
catch (err) {
@@ -515,7 +509,7 @@ function queriesToResolvedQueryForm(queries) {
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
const configFile = "octo-org/codeql-config/config.yaml@main";
const languages = "javascript";
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
t.assert(spyGetContents.called);
});
});
@@ -525,7 +519,7 @@ function queriesToResolvedQueryForm(queries) {
mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -541,7 +535,7 @@ function queriesToResolvedQueryForm(queries) {
mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -558,7 +552,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
try {
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -570,7 +564,7 @@ function queriesToResolvedQueryForm(queries) {
return await util.withTmpDir(async (tmpDir) => {
const languages = "rubbish,english";
try {
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -598,9 +592,14 @@ function queriesToResolvedQueryForm(queries) {
const configFile = path.join(tmpDir, "codeql-config.yaml");
fs.writeFileSync(configFile, inputFileContents);
const languages = "javascript";
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
t.deepEqual(packs, {
[languages_1.Language.javascript]: ["a/b@1.2.3"],
[languages_1.Language.javascript]: [
{
packName: "a/b",
version: (0, semver_1.clean)("1.2.3"),
},
],
});
});
});
@@ -632,10 +631,20 @@ function queriesToResolvedQueryForm(queries) {
fs.writeFileSync(configFile, inputFileContents);
fs.mkdirSync(path.join(tmpDir, "foo"));
const languages = "javascript,python,cpp";
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
t.deepEqual(packs, {
[languages_1.Language.javascript]: ["a/b@1.2.3"],
[languages_1.Language.python]: ["c/d@1.2.3"],
[languages_1.Language.javascript]: [
{
packName: "a/b",
version: (0, semver_1.clean)("1.2.3"),
},
],
[languages_1.Language.python]: [
{
packName: "c/d",
version: (0, semver_1.clean)("1.2.3"),
},
],
});
t.deepEqual(queries, {
cpp: {
@@ -675,7 +684,7 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
const inputFile = path.join(tmpDir, configFile);
fs.writeFileSync(inputFile, inputFileContents, "utf8");
try {
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -748,69 +757,52 @@ const invalidPaths = ["a/***/b", "a/**b", "a/b**", "**"];
/**
* Test macro for ensuring the packs block is valid
*/
const parsePacksMacro = ava_1.default.macro({
exec: (t, packsByLanguage, languages, expected) => t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), expected),
title: (providedTitle = "") => `Parse Packs: ${providedTitle}`,
});
function parsePacksMacro(t, packsByLanguage, languages, expected) {
t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), expected);
}
parsePacksMacro.title = (providedTitle) => `Parse Packs: ${providedTitle}`;
/**
* Test macro for testing when the packs block is invalid
*/
const parsePacksErrorMacro = ava_1.default.macro({
exec: (t, packsByLanguage, languages, expected) => t.throws(() => configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), {
function parsePacksErrorMacro(t, packsByLanguage, languages, expected) {
t.throws(() => {
configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b");
}, {
message: expected,
}),
title: (providedTitle = "") => `Parse Packs Error: ${providedTitle}`,
});
});
}
parsePacksErrorMacro.title = (providedTitle) => `Parse Packs Error: ${providedTitle}`;
/**
* Test macro for testing when the packs block is invalid
*/
const invalidPackNameMacro = ava_1.default.macro({
exec: (t, name) => parsePacksErrorMacro.exec(t, { [languages_1.Language.cpp]: [name] }, [languages_1.Language.cpp], new RegExp(`The configuration file "/a/b" is invalid: property "packs" "${name}" is not a valid pack`)),
title: (_providedTitle, arg) => `Invalid pack string: ${arg}`,
});
function invalidPackNameMacro(t, name) {
parsePacksErrorMacro(t, { [languages_1.Language.cpp]: [name] }, [languages_1.Language.cpp], new RegExp(`The configuration file "/a/b" is invalid: property "packs" "${name}" is not a valid pack`));
}
invalidPackNameMacro.title = (_, arg) => `Invalid pack string: ${arg}`;
(0, ava_1.default)("no packs", parsePacksMacro, {}, [], {});
(0, ava_1.default)("two packs", parsePacksMacro, ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
[languages_1.Language.cpp]: [
{ packName: "a/b", version: undefined },
{ packName: "c/d", version: (0, semver_1.clean)("1.2.3") },
],
});
(0, ava_1.default)("two packs with spaces", parsePacksMacro, [" a/b ", " c/d@1.2.3 "], [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
[languages_1.Language.cpp]: [
{ packName: "a/b", version: undefined },
{ packName: "c/d", version: (0, semver_1.clean)("1.2.3") },
],
});
(0, ava_1.default)("two packs with language", parsePacksMacro, {
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
}, [languages_1.Language.cpp, languages_1.Language.java, languages_1.Language.csharp], {
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
});
(0, ava_1.default)("packs with other valid names", parsePacksMacro, [
// ranges are ok
"c/d@1.0",
"c/d@~1.0.0",
"c/d@~1.0.0:a/b",
"c/d@~1.0.0+abc:a/b",
"c/d@~1.0.0-abc:a/b",
"c/d:a/b",
// whitespace is removed
" c/d @ ~1.0.0 : b.qls ",
// and it is retained within a path
" c/d @ ~1.0.0 : b/a path with/spaces.qls ",
// this is valid. the path is '@'. It will probably fail when passed to the CLI
"c/d@1.2.3:@",
// this is valid, too. It will fail if it doesn't match a path
// (globbing is not done)
"c/d@1.2.3:+*)_(",
], [languages_1.Language.cpp], {
[languages_1.Language.cpp]: [
"c/d@1.0",
"c/d@~1.0.0",
"c/d@~1.0.0:a/b",
"c/d@~1.0.0+abc:a/b",
"c/d@~1.0.0-abc:a/b",
"c/d:a/b",
"c/d@~1.0.0:b.qls",
"c/d@~1.0.0:b/a path with/spaces.qls",
"c/d@1.2.3:@",
"c/d@1.2.3:+*)_(",
{ packName: "a/b", version: undefined },
{ packName: "c/d", version: (0, semver_1.clean)("1.2.3") },
],
[languages_1.Language.java]: [
{ packName: "d/e", version: undefined },
{ packName: "f/g", version: (0, semver_1.clean)("1.2.3") },
],
});
(0, ava_1.default)("no language", parsePacksErrorMacro, ["a/b@1.2.3"], [languages_1.Language.java, languages_1.Language.python], /The configuration file "\/a\/b" is invalid: property "packs" must split packages by language/);
@@ -820,14 +812,7 @@ const invalidPackNameMacro = ava_1.default.macro({
(0, ava_1.default)(invalidPackNameMacro, "c-/d");
(0, ava_1.default)(invalidPackNameMacro, "-c/d");
(0, ava_1.default)(invalidPackNameMacro, "c/d_d");
(0, ava_1.default)(invalidPackNameMacro, "c/d@@");
(0, ava_1.default)(invalidPackNameMacro, "c/d@1.0.0:");
(0, ava_1.default)(invalidPackNameMacro, "c/d:");
(0, ava_1.default)(invalidPackNameMacro, "c/d:/a");
(0, ava_1.default)(invalidPackNameMacro, "@1.0.0:a");
(0, ava_1.default)(invalidPackNameMacro, "c/d@../a");
(0, ava_1.default)(invalidPackNameMacro, "c/d@b/../a");
(0, ava_1.default)(invalidPackNameMacro, "c/d:z@1");
(0, ava_1.default)(invalidPackNameMacro, "c/d@x");
/**
* Test macro for testing the packs block and the packs input
*/
@@ -844,85 +829,44 @@ function parseInputAndConfigErrorMacro(t, packsFromConfig, packsFromInput, langu
}
parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and config Error: ${providedTitle}`;
(0, ava_1.default)("input only", parseInputAndConfigMacro, {}, " c/d ", [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["c/d"],
[languages_1.Language.cpp]: [{ packName: "c/d", version: undefined }],
});
(0, ava_1.default)("input only with multiple", parseInputAndConfigMacro, {}, "a/b , c/d@1.2.3", [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
[languages_1.Language.cpp]: [
{ packName: "a/b", version: undefined },
{ packName: "c/d", version: "1.2.3" },
],
});
(0, ava_1.default)("input only with +", parseInputAndConfigMacro, {}, " + a/b , c/d@1.2.3 ", [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
[languages_1.Language.cpp]: [
{ packName: "a/b", version: undefined },
{ packName: "c/d", version: "1.2.3" },
],
});
(0, ava_1.default)("config only", parseInputAndConfigMacro, ["a/b", "c/d"], " ", [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["a/b", "c/d"],
[languages_1.Language.cpp]: [
{ packName: "a/b", version: undefined },
{ packName: "c/d", version: undefined },
],
});
(0, ava_1.default)("input overrides", parseInputAndConfigMacro, ["a/b", "c/d"], " e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["e/f", "g/h@1.2.3"],
[languages_1.Language.cpp]: [
{ packName: "e/f", version: undefined },
{ packName: "g/h", version: "1.2.3" },
],
});
(0, ava_1.default)("input and config", parseInputAndConfigMacro, ["a/b", "c/d"], " +e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["e/f", "g/h@1.2.3", "a/b", "c/d"],
[languages_1.Language.cpp]: [
{ packName: "e/f", version: undefined },
{ packName: "g/h", version: "1.2.3" },
{ packName: "a/b", version: undefined },
{ packName: "c/d", version: undefined },
],
});
(0, ava_1.default)("input with no language", parseInputAndConfigErrorMacro, {}, "c/d", [], /No languages specified/);
(0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
(0, ava_1.default)("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], /remove the '\+'/);
(0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], /"xxx" is not a valid pack/);
const mlPoweredQueriesMacro = ava_1.default.macro({
exec: async (t, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString) => {
return await util.withTmpDir(async (tmpDir) => {
const codeQL = (0, codeql_1.setCodeQL)({
async getVersion() {
return codeQLVersion;
},
async resolveQueries() {
return {
byLanguage: {
javascript: { "fake-query.ql": {} },
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
},
});
const { packs } = await configUtils.initConfig("javascript", queriesInput, packsInput, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled
? [feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled]
: []), (0, logging_1.getRunnerLogger)(true));
if (expectedVersionString !== undefined) {
t.deepEqual(packs, {
[languages_1.Language.javascript]: [
`codeql/javascript-experimental-atm-queries@${expectedVersionString}`,
],
});
}
else {
t.deepEqual(packs, {});
}
});
},
title: (_providedTitle, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString) => `ML-powered queries ${expectedVersionString !== undefined
? `${expectedVersionString} are`
: "aren't"} loaded for packs: ${packsInput}, queries: ${queriesInput} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`,
});
// macro, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString
// Test that ML-powered queries aren't run on v2.7.4 of the CLI.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, undefined, "security-extended", undefined);
// Test that ML-powered queries aren't run when the feature flag is off.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, undefined, "security-extended", undefined);
// Test that the ~0.1.0 version of ML-powered queries is run on v2.8.3 of the CLI.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.3", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.1.0");
// Test that ML-powered queries aren't run when the user hasn't specified that we should run the
// `security-extended` or `security-and-quality` query suite.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
// Test that ML-powered queries are run on non-Windows platforms running `security-extended` on
// versions of the CodeQL CLI prior to 2.9.0.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.5", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.2.0");
// Test that ML-powered queries are run on non-Windows platforms running `security-and-quality` on
// versions of the CodeQL CLI prior to 2.9.0.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.5", true, undefined, "security-and-quality", process.platform === "win32" ? undefined : "~0.2.0");
// Test that ML-powered queries are run on all platforms running `security-extended` on CodeQL CLI
// 2.9.0+.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, undefined, "security-extended", "~0.2.0");
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
// CLI 2.9.0+.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, undefined, "security-and-quality", "~0.2.0");
// Test that we don't inject an ML-powered query pack if the user has already specified one.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", "0.0.1");
// errors
// input w invalid pack name
//# sourceMappingURL=config-utils.test.js.map

File diff suppressed because one or more lines are too long

51
lib/database-upload.js generated
View File

@@ -42,25 +42,52 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
return;
}
const client = (0, api_client_1.getApiClient)(apiDetails);
let useUploadDomain;
try {
const response = await client.request("GET /repos/:owner/:repo/code-scanning/codeql/databases", {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
});
useUploadDomain = response.data["uploads_domain_enabled"];
}
catch (e) {
if (util.isHTTPError(e) && e.status === 404) {
logger.debug("Repository is not opted in to database uploads. Skipping upload.");
}
else {
console.log(e);
logger.info(`Skipping database upload due to unknown error: ${e}`);
}
return;
}
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
for (const language of config.languages) {
// Upload the database bundle.
// Although we are uploading arbitrary file contents to the API, it's worth
// noting that it's the API's job to validate that the contents is acceptable.
// This API method is available to anyone with write access to the repo.
const payload = fs.readFileSync(await (0, util_1.bundleDb)(config, language, codeql, language));
const payload = fs.readFileSync(await (0, util_1.bundleDb)(config, language, codeql));
try {
await client.request(`POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name`, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
language,
name: `${language}-database`,
data: payload,
headers: {
authorization: `token ${apiDetails.auth}`,
"Content-Type": "application/zip",
},
});
if (useUploadDomain) {
await client.request(`POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name`, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
language,
name: `${language}-database`,
data: payload,
headers: {
authorization: `token ${apiDetails.auth}`,
},
});
}
else {
await client.request(`PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language`, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
language,
data: payload,
});
}
logger.debug(`Successfully uploaded database for ${language}`);
}
catch (e) {

View File

@@ -1 +1 @@
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,8BAA8B;QAC9B,2EAA2E;QAC3E,8EAA8E;QAC9E,wEAAwE;QACxE,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAC7B,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CACnD,CAAC;QACF,IAAI;YACF,MAAM,MAAM,CAAC,OAAO,CAClB,wGAAwG,EACxG;gBACE,KAAK,EAAE,aAAa,CAAC,KAAK;gBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;gBACxB,QAAQ;gBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;gBAC5B,IAAI,EAAE,OAAO;gBACb,OAAO,EAAE;oBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;oBACzC,cAAc,EAAE,iBAAiB;iBAClC;aACF,CACF,CAAC;YACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAxDD,0CAwDC"}
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,IAAI,eAAwB,CAAC;IAC7B,IAAI;QACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,wDAAwD,EACxD;YACE,KAAK,EAAE,aAAa,CAAC,KAAK;YAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;SACzB,CACF,CAAC;QACF,eAAe,GAAG,QAAQ,CAAC,IAAI,CAAC,wBAAwB,CAAC,CAAC;KAC3D;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3C,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;SACH;aAAM;YACL,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,MAAM,CAAC,IAAI,CAAC,kDAAkD,CAAC,EAAE,CAAC,CAAC;SACpE;QACD,OAAO;KACR;IAED,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,8BAA8B;QAC9B,2EAA2E;QAC3E,8EAA8E;QAC9E,wEAAwE;QACxE,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC,CAAC;QAC1E,IAAI;YACF,IAAI,eAAe,EAAE;gBACnB,MAAM,MAAM,CAAC,OAAO,CAClB,wGAAwG,EACxG;oBACE,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;oBAC5B,IAAI,EAAE,OAAO;oBACb,OAAO,EAAE;wBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;qBAC1C;iBACF,CACF,CAAC;aACH;iBAAM;gBACL,MAAM,MAAM,CAAC,OAAO,CAClB,kEAAkE,EAClE;oBACE,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,OAAO;iBACd,CACF,CAAC;aACH;YACD,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAtFD,0CAsFC"}

View File

@@ -56,22 +56,60 @@ function getTestConfig(tmpDir) {
dbLocation: tmpDir,
packs: {},
debugMode: false,
debugArtifactName: util_1.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util_1.DEFAULT_DEBUG_DATABASE_NAME,
injectedMlQueries: false,
};
}
async function mockHttpRequests(databaseUploadStatusCode) {
function getRecordingLogger(messages) {
return {
debug: (message) => {
messages.push({ type: "debug", message });
console.debug(message);
},
info: (message) => {
messages.push({ type: "info", message });
console.info(message);
},
warning: (message) => {
messages.push({ type: "warning", message });
console.warn(message);
},
error: (message) => {
messages.push({ type: "error", message });
console.error(message);
},
isDebug: () => true,
startGroup: () => undefined,
endGroup: () => undefined,
};
}
function mockHttpRequests(optInStatusCode, useUploadDomain, databaseUploadStatusCode) {
// Passing an auth token is required, so we just use a dummy value
const client = github.getOctokit("123");
const requestSpy = sinon.stub(client, "request");
const url = "POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name";
const databaseUploadSpy = requestSpy.withArgs(url);
if (databaseUploadStatusCode < 300) {
databaseUploadSpy.resolves(undefined);
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql/databases");
if (optInStatusCode < 300) {
optInSpy.resolves({
status: optInStatusCode,
data: {
useUploadDomain,
},
headers: {},
url: "GET /repos/:owner/:repo/code-scanning/codeql/databases",
});
}
else {
databaseUploadSpy.throws(new util_1.HTTPError("some error message", databaseUploadStatusCode));
optInSpy.throws(new util_1.HTTPError("some error message", optInStatusCode));
}
if (databaseUploadStatusCode !== undefined) {
const url = useUploadDomain
? "POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name"
: "PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language";
const databaseUploadSpy = requestSpy.withArgs(url);
if (databaseUploadStatusCode < 300) {
databaseUploadSpy.resolves(undefined);
}
else {
databaseUploadSpy.throws(new util_1.HTTPError("some error message", databaseUploadStatusCode));
}
}
sinon.stub(apiClient, "getApiClient").value(() => client);
}
@@ -84,7 +122,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
.returns("false");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Database upload disabled in workflow. Skipping upload.") !== undefined);
});
@@ -100,7 +138,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
const config = getTestConfig(tmpDir);
config.gitHubVersion = { type: util_1.GitHubVariant.GHES, version: "3.0" };
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Not running against github.com. Skipping upload.") !== undefined);
});
@@ -116,7 +154,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
const config = getTestConfig(tmpDir);
config.gitHubVersion = { type: util_1.GitHubVariant.GHAE };
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Not running against github.com. Skipping upload.") !== undefined);
});
@@ -130,11 +168,53 @@ async function mockHttpRequests(databaseUploadStatusCode) {
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Not analyzing default branch. Skipping upload.") !== undefined);
});
});
(0, ava_1.default)("Abort database upload if opt-in request returns 404", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(404);
(0, codeql_1.setCodeQL)({
async databaseBundle() {
return;
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message ===
"Repository is not opted in to database uploads. Skipping upload.") !== undefined);
});
});
(0, ava_1.default)("Abort database upload if opt-in request fails with something other than 404", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(500);
(0, codeql_1.setCodeQL)({
async databaseBundle() {
return;
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "info" &&
v.message ===
"Skipping database upload due to unknown error: Error: some error message") !== undefined);
});
});
(0, ava_1.default)("Don't crash if uploading a database fails", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
@@ -143,14 +223,14 @@ async function mockHttpRequests(databaseUploadStatusCode) {
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
await mockHttpRequests(500);
mockHttpRequests(200, false, 500);
(0, codeql_1.setCodeQL)({
async databaseBundle(_, outputFilePath) {
fs.writeFileSync(outputFilePath, "");
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "warning" &&
v.message ===
"Failed to upload database for javascript: Error: some error message") !== undefined);
@@ -164,14 +244,14 @@ async function mockHttpRequests(databaseUploadStatusCode) {
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
await mockHttpRequests(201);
mockHttpRequests(200, false, 201);
(0, codeql_1.setCodeQL)({
async databaseBundle(_, outputFilePath) {
fs.writeFileSync(outputFilePath, "");
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Successfully uploaded database for javascript") !== undefined);
});
@@ -184,14 +264,14 @@ async function mockHttpRequests(databaseUploadStatusCode) {
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
await mockHttpRequests(201);
mockHttpRequests(200, true, 201);
(0, codeql_1.setCodeQL)({
async databaseBundle(_, outputFilePath) {
fs.writeFileSync(outputFilePath, "");
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Successfully uploaded database for javascript") !== undefined);
});

File diff suppressed because one or more lines are too long

View File

@@ -1,3 +1,3 @@
{
"bundleVersion": "codeql-bundle-20220527"
"bundleVersion": "codeql-bundle-20211207"
}

95
lib/feature-flags.js generated
View File

@@ -1,95 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.createFeatureFlags = exports.GitHubFeatureFlags = exports.FeatureFlag = void 0;
const api_client_1 = require("./api-client");
const util = __importStar(require("./util"));
var FeatureFlag;
(function (FeatureFlag) {
FeatureFlag["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
FeatureFlag["LuaTracerConfigEnabled"] = "lua_tracer_config_enabled";
})(FeatureFlag = exports.FeatureFlag || (exports.FeatureFlag = {}));
class GitHubFeatureFlags {
constructor(gitHubVersion, apiDetails, repositoryNwo, logger) {
this.gitHubVersion = gitHubVersion;
this.apiDetails = apiDetails;
this.repositoryNwo = repositoryNwo;
this.logger = logger;
}
async getValue(flag) {
const response = (await this.getApiResponse())[flag];
if (response === undefined) {
this.logger.debug(`Feature flag '${flag}' undefined in API response, considering it disabled.`);
return false;
}
return response;
}
async getApiResponse() {
const loadApiResponse = async () => {
// Do nothing when not running against github.com
if (this.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
this.logger.debug("Not running against github.com. Disabling all feature flags.");
return {};
}
const client = (0, api_client_1.getApiClient)(this.apiDetails);
try {
const response = await client.request("GET /repos/:owner/:repo/code-scanning/codeql-action/features", {
owner: this.repositoryNwo.owner,
repo: this.repositoryNwo.repo,
});
return response.data;
}
catch (e) {
if (util.isHTTPError(e) && e.status === 403) {
this.logger.warning("This run of the CodeQL Action does not have permission to access Code Scanning API endpoints. " +
"As a result, it will not be opted into any experimental features. " +
"This could be because the Action is running on a pull request from a fork. If not, " +
`please ensure the Action has the 'security-events: write' permission. Details: ${e}`);
}
else {
// Some feature flags, such as `ml_powered_queries_enabled` affect the produced alerts.
// Considering these feature flags disabled in the event of a transient error could
// therefore lead to alert churn. As a result, we crash if we cannot determine the value of
// the feature flags.
throw new Error(`Encountered an error while trying to load feature flags: ${e}`);
}
}
};
const apiResponse = this.cachedApiResponse || (await loadApiResponse());
this.cachedApiResponse = apiResponse;
return apiResponse;
}
}
exports.GitHubFeatureFlags = GitHubFeatureFlags;
/**
* Create a feature flags instance with the specified set of enabled flags.
*
* This should be only used within tests.
*/
function createFeatureFlags(enabledFlags) {
return {
getValue: async (flag) => {
return enabledFlags.includes(flag);
},
};
}
exports.createFeatureFlags = createFeatureFlags;
//# sourceMappingURL=feature-flags.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,6CAA8D;AAG9D,6CAA+B;AAM/B,IAAY,WAGX;AAHD,WAAY,WAAW;IACrB,qEAAsD,CAAA;IACtD,mEAAoD,CAAA;AACtD,CAAC,EAHW,WAAW,GAAX,mBAAW,KAAX,mBAAW,QAGtB;AAUD,MAAa,kBAAkB;IAG7B,YACU,aAAiC,EACjC,UAA4B,EAC5B,aAA4B,EAC5B,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,eAAU,GAAV,UAAU,CAAkB;QAC5B,kBAAa,GAAb,aAAa,CAAe;QAC5B,WAAM,GAAN,MAAM,CAAQ;IACrB,CAAC;IAEJ,KAAK,CAAC,QAAQ,CAAC,IAAiB;QAC9B,MAAM,QAAQ,GAAG,CAAC,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC;QACrD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,iBAAiB,IAAI,uDAAuD,CAC7E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE;YACjC,iDAAiD;YACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;gBACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8DAA8D,CAC/D,CAAC;gBACF,OAAO,EAAE,CAAC;aACX;YACD,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC7C,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,8DAA8D,EAC9D;oBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;oBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;iBAC9B,CACF,CAAC;gBACF,OAAO,QAAQ,CAAC,IAAI,CAAC;aACtB;YAAC,OAAO,CAAC,EAAE;gBACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;oBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;wBAC9F,oEAAoE;wBACpE,qFAAqF;wBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;iBACH;qBAAM;oBACL,uFAAuF;oBACvF,mFAAmF;oBACnF,2FAA2F;oBAC3F,qBAAqB;oBACrB,MAAM,IAAI,KAAK,CACb,4DAA4D,CAAC,EAAE,CAChE,CAAC;iBACH;aACF;QACH,CAAC,CAAC;QAEF,MAAM,WAAW,GAAG,IAAI,CAAC,iBAAiB,IAAI,CAAC,MAAM,eAAe,EAAE,CAAC,CAAC;QACxE,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;CACF;AAhED,gDAgEC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,YAA2B;IAC5D,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YACvB,OAAO,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;AACJ,CAAC;AAND,gDAMC"}

View File

@@ -1,92 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const feature_flags_1 = require("./feature-flags");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const testing_utils_1 = require("./testing-utils");
const util_1 = require("./util");
(0, testing_utils_1.setupTests)(ava_1.default);
ava_1.default.beforeEach(() => {
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
});
const testApiDetails = {
auth: "1234",
url: "https://github.com",
};
const testRepositoryNwo = (0, repository_1.parseRepositoryNwo)("github/example");
const ALL_FEATURE_FLAGS_DISABLED_VARIANTS = [
{
description: "GHES",
gitHubVersion: { type: util_1.GitHubVariant.GHES, version: "3.0.0" },
},
{ description: "GHAE", gitHubVersion: { type: util_1.GitHubVariant.GHAE } },
];
for (const variant of ALL_FEATURE_FLAGS_DISABLED_VARIANTS) {
(0, ava_1.default)(`All feature flags are disabled if running against ${variant.description}`, async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const loggedMessages = [];
const featureFlags = new feature_flags_1.GitHubFeatureFlags(variant.gitHubVersion, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
t.assert((await featureFlags.getValue(flag)) === false);
}
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message ===
"Not running against github.com. Disabling all feature flags.") !== undefined);
});
});
}
(0, ava_1.default)("Feature flags are disabled if they're not returned in API response", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const loggedMessages = [];
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
t.assert((await featureFlags.getValue(flag)) === false);
}
for (const featureFlag of ["ml_powered_queries_enabled"]) {
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message ===
`Feature flag '${featureFlag}' undefined in API response, considering it disabled.`) !== undefined);
}
});
});
(0, ava_1.default)("Feature flags exception is propagated if the API request errors", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, logging_1.getRunnerLogger)(true));
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(500, {});
await t.throwsAsync(async () => featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled), {
message: "Encountered an error while trying to load feature flags: Error: some error message",
});
});
});
const FEATURE_FLAGS = [
"ml_powered_queries_enabled",
"lua_tracer_config_enabled",
];
for (const featureFlag of FEATURE_FLAGS) {
(0, ava_1.default)(`Feature flag '${featureFlag}' is enabled if enabled in the API response`, async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, logging_1.getRunnerLogger)(true));
const expectedFeatureFlags = {};
for (const f of FEATURE_FLAGS) {
expectedFeatureFlags[f] = false;
}
expectedFeatureFlags[featureFlag] = true;
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureFlags);
const actualFeatureFlags = {
ml_powered_queries_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled),
lua_tracer_config_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.LuaTracerConfigEnabled),
};
t.deepEqual(actualFeatureFlags, expectedFeatureFlags);
});
});
}
//# sourceMappingURL=feature-flags.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"feature-flags.test.js","sourceRoot":"","sources":["../src/feature-flags.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAGvB,mDAAkE;AAClE,uCAA4C;AAC5C,6CAAkD;AAClD,mDAMyB;AAEzB,iCAAgF;AAEhF,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,MAAM,cAAc,GAAqB;IACvC,IAAI,EAAE,MAAM;IACZ,GAAG,EAAE,oBAAoB;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAA,+BAAkB,EAAC,gBAAgB,CAAC,CAAC;AAE/D,MAAM,mCAAmC,GAGpC;IACH;QACE,WAAW,EAAE,MAAM;QACnB,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC9D;IACD,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,EAAE;CACrE,CAAC;AAEF,KAAK,MAAM,OAAO,IAAI,mCAAmC,EAAE;IACzD,IAAA,aAAI,EAAC,qDAAqD,OAAO,CAAC,WAAW,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC3F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,OAAO,CAAC,aAAa,EACrB,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;YAEF,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;gBAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;aACzD;YAED,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,8DAA8D,CACnE,KAAK,SAAS,CAChB,CAAC;QACJ,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ;AAED,IAAA,aAAI,EAAC,oEAAoE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI,CAAC,4BAA4B,CAAC,EAAE;YACxD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,iBAAiB,WAAW,uDAAuD,CACxF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iEAAiE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,MAAM,CAAC,CAAC,WAAW,CACjB,KAAK,IAAI,EAAE,CAAC,YAAY,CAAC,QAAQ,CAAC,2BAAW,CAAC,uBAAuB,CAAC,EACtE;YACE,OAAO,EACL,oFAAoF;SACvF,CACF,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,MAAM,aAAa,GAAG;IACpB,4BAA4B;IAC5B,2BAA2B;CAC5B,CAAC;AAEF,KAAK,MAAM,WAAW,IAAI,aAAa,EAAE;IACvC,IAAA,aAAI,EAAC,iBAAiB,WAAW,6CAA6C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC1F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;YAEF,MAAM,oBAAoB,GAAgC,EAAE,CAAC;YAC7D,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;gBAC7B,oBAAoB,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;aACjC;YACD,oBAAoB,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC;YACzC,IAAA,0CAA0B,EAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;YAEtD,MAAM,kBAAkB,GAAgC;gBACtD,0BAA0B,EAAE,MAAM,YAAY,CAAC,QAAQ,CACrD,2BAAW,CAAC,uBAAuB,CACpC;gBACD,yBAAyB,EAAE,MAAM,YAAY,CAAC,QAAQ,CACpD,2BAAW,CAAC,sBAAsB,CACnC;aACF,CAAC;YAEF,CAAC,CAAC,SAAS,CAAC,kBAAkB,EAAE,oBAAoB,CAAC,CAAC;QACxD,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ"}

5
lib/fingerprints.js generated
View File

@@ -226,8 +226,9 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
exports.resolveUriToFile = resolveUriToFile;
// Compute fingerprints for results in the given sarif file
// and return an updated sarif file contents.
async function addFingerprints(sarif, sourceRoot, logger) {
async function addFingerprints(sarifContents, sourceRoot, logger) {
var _a, _b, _c;
const sarif = JSON.parse(sarifContents);
// Gather together results for the same file and construct
// callbacks to accept hashes for that file and update the location
const callbacksByFile = {};
@@ -265,7 +266,7 @@ async function addFingerprints(sarif, sourceRoot, logger) {
};
await hash(teeCallback, filepath);
}
return sarif;
return JSON.stringify(sarif);
}
exports.addFingerprints = addFingerprints;
//# sourceMappingURL=fingerprints.js.map

File diff suppressed because one or more lines are too long

View File

@@ -169,24 +169,30 @@ function testResolveUriToFile(uri, index, artifactsURIs) {
});
(0, ava_1.default)("addFingerprints", async (t) => {
// Run an end-to-end test on a test file
const input = JSON.parse(fs
let input = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
.toString());
const expected = JSON.parse(fs
.toString();
let expected = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.expected.sarif`)
.toString());
.toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, (0, logging_1.getRunnerLogger)(true)), expected);
});
(0, ava_1.default)("missingRegions", async (t) => {
// Run an end-to-end test on a test file
const input = JSON.parse(fs
let input = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
.toString());
const expected = JSON.parse(fs
.toString();
let expected = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.expected.sarif`)
.toString());
.toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, (0, logging_1.getRunnerLogger)(true)), expected);

File diff suppressed because one or more lines are too long

20
lib/init-action.js generated
View File

@@ -22,9 +22,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
const path = __importStar(require("path"));
const core = __importStar(require("@actions/core"));
const actions_util_1 = require("./actions-util");
const api_client_1 = require("./api-client");
const codeql_1 = require("./codeql");
const feature_flags_1 = require("./feature-flags");
const init_1 = require("./init");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
@@ -49,21 +47,20 @@ async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
}
if (queriesInput !== undefined) {
queriesInput = queriesInput.startsWith("+")
? queriesInput.slice(1)
? queriesInput.substr(1)
: queriesInput;
queries.push(...queriesInput.split(","));
}
const statusReport = {
...statusReportBase,
disable_default_queries: disableDefaultQueries,
languages,
ml_powered_javascript_queries: (0, util_1.getMlPoweredJsQueriesStatus)(config),
workflow_languages: workflowLanguages || "",
paths,
paths_ignore: pathsIgnore,
disable_default_queries: disableDefaultQueries,
queries: queries.join(","),
tools_input: (0, actions_util_1.getOptionalInput)("tools") || "",
tools_resolved_version: toolsVersion,
workflow_languages: workflowLanguages || "",
};
await (0, actions_util_1.sendStatusReport)(statusReport);
}
@@ -71,7 +68,6 @@ async function run() {
const startedAt = new Date();
const logger = (0, logging_1.getActionsLogger)();
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
await (0, util_1.checkActionVersion)(pkg.version);
let config;
let codeql;
let toolsVersion;
@@ -80,10 +76,8 @@ async function run() {
externalRepoAuth: (0, actions_util_1.getOptionalInput)("external-repository-token"),
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
};
const gitHubVersion = await (0, api_client_1.getGitHubVersionActionsOnly)();
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.actions);
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, apiDetails, repositoryNwo, logger);
try {
const workflowErrors = await (0, actions_util_1.validateWorkflow)();
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
@@ -93,7 +87,7 @@ async function run() {
codeql = initCodeQLResult.codeql;
toolsVersion = initCodeQLResult.toolsVersion;
await (0, util_1.enrichEnvironment)(util_1.Mode.actions, codeql);
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), (0, actions_util_1.getOptionalInput)("debug") === "true", (0, actions_util_1.getOptionalInput)("debug-artifact-name") || util_1.DEFAULT_DEBUG_ARTIFACT_NAME, (0, actions_util_1.getOptionalInput)("debug-database-name") || util_1.DEFAULT_DEBUG_DATABASE_NAME, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE"), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, featureFlags, logger);
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), (0, actions_util_1.getOptionalInput)("debug") === "true", (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), (0, actions_util_1.getTemporaryDirectory)(), (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE"), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, logger);
if (config.languages.includes(languages_1.Language.python) &&
(0, actions_util_1.getRequiredInput)("setup-python-dependencies") === "true") {
try {
@@ -128,7 +122,7 @@ async function run() {
(0, util_1.getMemoryFlagValue)((0, actions_util_1.getOptionalInput)("ram")).toString());
core.exportVariable("CODEQL_THREADS", (0, util_1.getThreadsFlagValue)((0, actions_util_1.getOptionalInput)("threads"), logger).toString());
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", undefined, featureFlags);
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", undefined);
if (tracerConfig !== undefined) {
for (const [key, value] of Object.entries(tracerConfig.env)) {
core.exportVariable(key, value);
@@ -143,7 +137,7 @@ async function run() {
catch (error) {
core.setFailed(String(error));
console.log(error);
await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", (0, actions_util_1.getActionsStatus)(error), startedAt, String(error), error instanceof Error ? error.stack : undefined));
await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "failure", startedAt, String(error), error instanceof Error ? error.stack : undefined));
return;
}
await sendSuccessStatusReport(startedAt, config, toolsVersion);

File diff suppressed because one or more lines are too long

39
lib/init.js generated
View File

@@ -38,43 +38,24 @@ async function initCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant,
return { codeql, toolsVersion };
}
exports.initCodeQL = initCodeQL;
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
logger.startGroup("Load language configuration");
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
analysisPaths.printPathFiltersWarning(config, logger);
logger.endGroup();
return config;
}
exports.initConfig = initConfig;
async function runInit(codeql, config, sourceRoot, processName, processLevel, featureFlags) {
var _a, _b;
async function runInit(codeql, config, sourceRoot, processName, processLevel) {
fs.mkdirSync(config.dbLocation, { recursive: true });
try {
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
// Init a database cluster
await codeql.databaseInitCluster(config, sourceRoot, processName, processLevel, featureFlags);
}
else {
for (const language of config.languages) {
// Init language database
await codeql.databaseInit(util.getCodeQLDatabasePath(config, language), language, sourceRoot);
}
}
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
// Init a database cluster
await codeql.databaseInitCluster(config.dbLocation, config.languages, sourceRoot, processName, processLevel);
}
catch (e) {
// Handle the situation where init is called twice
// for the same database in the same job.
if (e instanceof Error &&
((_a = e.message) === null || _a === void 0 ? void 0 : _a.includes("Refusing to create databases")) &&
e.message.includes("exists and is not an empty directory.")) {
throw new util.UserError(`Is the "init" action called twice in the same job? ${e.message}`);
}
else if (e instanceof Error &&
((_b = e.message) === null || _b === void 0 ? void 0 : _b.includes("is not compatible with this CodeQL CLI"))) {
throw new util.UserError(e.message);
}
else {
throw e;
else {
for (const language of config.languages) {
// Init language database
await codeql.databaseInit(util.getCodeQLDatabasePath(config, language), language, sourceRoot);
}
}
return await (0, tracer_config_1.getCombinedTracerConfig)(config, codeql);

View File

@@ -1 +1 @@
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,YAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,YAAY,EACZ,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA1CD,gCA0CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC,EAChC,YAA0B;;IAE1B,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI;QACF,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;YAChE,0BAA0B;YAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,YAAY,EACZ,YAAY,CACb,CAAC;SACH;aAAM;YACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,yBAAyB;gBACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,kDAAkD;QAClD,yCAAyC;QACzC,IACE,CAAC,YAAY,KAAK;aAClB,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,8BAA8B,CAAC,CAAA;YACnD,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,uCAAuC,CAAC,EAC3D;YACA,MAAM,IAAI,IAAI,CAAC,SAAS,CACtB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;SACH;aAAM,IACL,CAAC,YAAY,KAAK;aAClB,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,wCAAwC,CAAC,CAAA,EAC7D;YACA,MAAM,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACrC;aAAM;YACL,MAAM,CAAC,CAAC;SACT;KACF;IACD,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnDD,0BAmDC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,SAAkB,EAClB,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AApCD,gCAoCC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC;IAEhC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;QAChE,0BAA0B;QAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,SAAS,EAChB,UAAU,EACV,WAAW,EACX,YAAY,CACb,CAAC;KACH;SAAM;QACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACvC,yBAAyB;YACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;SACH;KACF;IAED,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AA9BD,0BA8BC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}

3
lib/languages.js generated
View File

@@ -11,7 +11,6 @@ var Language;
Language["javascript"] = "javascript";
Language["python"] = "python";
Language["ruby"] = "ruby";
Language["swift"] = "swift";
})(Language = exports.Language || (exports.Language = {}));
// Additional names for languages
const LANGUAGE_ALIASES = {
@@ -36,7 +35,7 @@ function parseLanguage(language) {
}
exports.parseLanguage = parseLanguage;
function isTracedLanguage(language) {
return (["cpp", "java", "csharp", "swift"].includes(language) ||
return (["cpp", "java", "csharp"].includes(language) ||
(process.env["CODEQL_EXTRACTOR_GO_BUILD_TRACING"] === "on" &&
language === Language.go));
}

View File

@@ -1 +1 @@
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;AAAA,wCAAwC;AACxC,IAAY,QASX;AATD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;IACjB,yBAAa,CAAA;IACb,2BAAe,CAAA;AACjB,CAAC,EATW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QASnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAAiC;IACrD,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO,CACL,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC;QACrD,CAAC,OAAO,CAAC,GAAG,CAAC,mCAAmC,CAAC,KAAK,IAAI;YACxD,QAAQ,KAAK,QAAQ,CAAC,EAAE,CAAC,CAC5B,CAAC;AACJ,CAAC;AAND,4CAMC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;AAAA,wCAAwC;AACxC,IAAY,QAQX;AARD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;IACjB,yBAAa,CAAA;AACf,CAAC,EARW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAQnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAAiC;IACrD,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO,CACL,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC;QAC5C,CAAC,OAAO,CAAC,GAAG,CAAC,mCAAmC,CAAC,KAAK,IAAI;YACxD,QAAQ,KAAK,QAAQ,CAAC,EAAE,CAAC,CAC5B,CAAC;AACJ,CAAC;AAND,4CAMC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}

11
lib/runner.js generated
View File

@@ -18,20 +18,15 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path"));
const commander_1 = require("commander");
const del_1 = __importDefault(require("del"));
const analyze_1 = require("./analyze");
const autobuild_1 = require("./autobuild");
const codeql_1 = require("./codeql");
const config_utils_1 = require("./config-utils");
const feature_flags_1 = require("./feature-flags");
const init_1 = require("./init");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
@@ -138,7 +133,7 @@ program
const checkoutPath = cmd.checkoutPath || process.cwd();
// Wipe the temp dir
logger.info(`Cleaning temp directory ${tempDir}`);
await (0, del_1.default)(tempDir, { force: true });
fs.rmSync(tempDir, { recursive: true, force: true });
fs.mkdirSync(tempDir, { recursive: true });
const auth = await (0, util_1.getGitHubAuth)(logger, cmd.githubAuth, cmd.githubAuthStdin);
const apiDetails = {
@@ -164,9 +159,9 @@ program
}
await (0, util_1.enrichEnvironment)(util_1.Mode.runner, codeql);
const workspacePath = checkoutPath;
const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, false, "", "", (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, false, (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, logger);
const sourceRoot = checkoutPath;
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, parseTraceProcessName(), parseTraceProcessLevel(), (0, feature_flags_1.createFeatureFlags)([]));
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, parseTraceProcessName(), parseTraceProcessLevel());
if (tracerConfig === undefined) {
return;
}

File diff suppressed because one or more lines are too long

50
lib/testing-utils.js generated
View File

@@ -19,12 +19,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.mockFeatureFlagApiEndpoint = exports.getRecordingLogger = exports.setupActionsVars = exports.setupTests = void 0;
const github = __importStar(require("@actions/github"));
exports.setupActionsVars = exports.setupTests = void 0;
const sinon = __importStar(require("sinon"));
const apiClient = __importStar(require("./api-client"));
const CodeQL = __importStar(require("./codeql"));
const util_1 = require("./util");
function wrapOutput(context) {
// Function signature taken from Socket.write.
// Note there are two overloads:
@@ -90,51 +87,6 @@ exports.setupTests = setupTests;
function setupActionsVars(tempDir, toolsDir) {
process.env["RUNNER_TEMP"] = tempDir;
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
process.env["GITHUB_WORKSPACE"] = tempDir;
}
exports.setupActionsVars = setupActionsVars;
function getRecordingLogger(messages) {
return {
debug: (message) => {
messages.push({ type: "debug", message });
console.debug(message);
},
info: (message) => {
messages.push({ type: "info", message });
console.info(message);
},
warning: (message) => {
messages.push({ type: "warning", message });
console.warn(message);
},
error: (message) => {
messages.push({ type: "error", message });
console.error(message);
},
isDebug: () => true,
startGroup: () => undefined,
endGroup: () => undefined,
};
}
exports.getRecordingLogger = getRecordingLogger;
/** Mock the HTTP request to the feature flags enablement API endpoint. */
function mockFeatureFlagApiEndpoint(responseStatusCode, response) {
// Passing an auth token is required, so we just use a dummy value
const client = github.getOctokit("123");
const requestSpy = sinon.stub(client, "request");
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql-action/features");
if (responseStatusCode < 300) {
optInSpy.resolves({
status: responseStatusCode,
data: response,
headers: {},
url: "GET /repos/:owner/:repo/code-scanning/codeql-action/features",
});
}
else {
optInSpy.throws(new util_1.HTTPError("some error message", responseStatusCode));
}
sinon.stub(apiClient, "getApiClient").value(() => client);
}
exports.mockFeatureFlagApiEndpoint = mockFeatureFlagApiEndpoint;
//# sourceMappingURL=testing-utils.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAiB;IAC1C,MAAM,SAAS,GAAG,IAA2B,CAAC;IAE9C,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;IAC5C,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,OAAO,CAAC;AAC5C,CAAC;AAJD,4CAIC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,6CAA+B;AAE/B,iDAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC"}

12
lib/toolcache.js generated
View File

@@ -18,9 +18,6 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.downloadTool = exports.findAllVersions = exports.find = exports.cacheDir = exports.extractTar = void 0;
const fs = __importStar(require("fs"));
@@ -30,7 +27,6 @@ const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const io = __importStar(require("@actions/io"));
const actionsToolcache = __importStar(require("@actions/tool-cache"));
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
const del_1 = __importDefault(require("del"));
const semver = __importStar(require("semver"));
const uuid_1 = require("uuid");
const util_1 = require("./util");
@@ -127,7 +123,7 @@ async function cacheDir(sourceDir, tool, version, toolCacheDir, logger) {
throw new Error("sourceDir is not a directory");
}
// Create the tool dir
const destPath = await createToolPath(tool, version, arch, toolCacheDir, logger);
const destPath = createToolPath(tool, version, arch, toolCacheDir, logger);
// copy each child item. do not move. move can fail on Windows
// due to anti-virus software having an open handle on a file.
for (const itemName of fs.readdirSync(sourceDir)) {
@@ -236,12 +232,12 @@ function createExtractFolder(tempDir) {
}
return dest;
}
async function createToolPath(tool, version, arch, toolCacheDir, logger) {
function createToolPath(tool, version, arch, toolCacheDir, logger) {
const folderPath = path.join(toolCacheDir, tool, semver.clean(version) || version, arch || "");
logger.debug(`destination ${folderPath}`);
const markerPath = `${folderPath}.complete`;
await (0, del_1.default)(folderPath, { force: true });
await (0, del_1.default)(markerPath, { force: true });
fs.rmSync(folderPath, { recursive: true, force: true });
fs.rmSync(markerPath, { recursive: true, force: true });
fs.mkdirSync(folderPath, { recursive: true });
return folderPath;
}

Some files were not shown because too many files have changed in this diff Show More