mirror of
https://github.com/github/codeql-action.git
synced 2025-12-15 03:49:11 +08:00
Compare commits
73 Commits
codeql-bun
...
v1.1.6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4c1021c504 | ||
|
|
9da34a6ec6 | ||
|
|
f83be76fd8 | ||
|
|
b45efc9e42 | ||
|
|
75743c96fc | ||
|
|
03a275bc11 | ||
|
|
28eead2408 | ||
|
|
a4da970395 | ||
|
|
34a1681e50 | ||
|
|
8833977736 | ||
|
|
bfe9e81020 | ||
|
|
1d58cc1f27 | ||
|
|
d8576e34bf | ||
|
|
f1060fbba0 | ||
|
|
af34c6da92 | ||
|
|
282b607642 | ||
|
|
f0e2f3c053 | ||
|
|
73ba7ffb48 | ||
|
|
2f4dd4bb41 | ||
|
|
8237e85158 | ||
|
|
eea7cf19ff | ||
|
|
fdc2a903c1 | ||
|
|
c22162c09d | ||
|
|
01c72238c1 | ||
|
|
63b2636c23 | ||
|
|
0ed0799824 | ||
|
|
9e403590f4 | ||
|
|
45b96c3de6 | ||
|
|
9a709c116e | ||
|
|
43c9f26143 | ||
|
|
b949e494e4 | ||
|
|
3d23aade46 | ||
|
|
d625a00cee | ||
|
|
077ec096bb | ||
|
|
4d6e9c02ac | ||
|
|
839aa81918 | ||
|
|
6d1f0a0357 | ||
|
|
88db5e75ec | ||
|
|
d068f5372a | ||
|
|
044f112dc1 | ||
|
|
f7846479e6 | ||
|
|
d0bd80897c | ||
|
|
bed132dae4 | ||
|
|
9d26fe0cb3 | ||
|
|
6e57bbac6c | ||
|
|
85cfdb24f4 | ||
|
|
df164705ad | ||
|
|
da7944b165 | ||
|
|
33599909af | ||
|
|
f143182488 | ||
|
|
0b037b4fcb | ||
|
|
1668e0a2bf | ||
|
|
bd4757cd6b | ||
|
|
5fb01dd153 | ||
|
|
124e7d96a6 | ||
|
|
b8f3a377bf | ||
|
|
4b465cb3ce | ||
|
|
d76b18254a | ||
|
|
33f749f1c9 | ||
|
|
ccda44cac5 | ||
|
|
81827d3fc6 | ||
|
|
b386fd4443 | ||
|
|
2a7a517ea5 | ||
|
|
ca5ed24270 | ||
|
|
fb22523acc | ||
|
|
b887a2ce1e | ||
|
|
686c3a37f0 | ||
|
|
c9973ef56b | ||
|
|
c3010cb18a | ||
|
|
5d72058994 | ||
|
|
76c3e91b1f | ||
|
|
5c2600e559 | ||
|
|
a92e8775d8 |
193
.github/update-release-branch.py
vendored
193
.github/update-release-branch.py
vendored
@@ -1,12 +1,9 @@
|
||||
import argparse
|
||||
import datetime
|
||||
from github import Github
|
||||
import random
|
||||
import requests
|
||||
import subprocess
|
||||
import sys
|
||||
import json
|
||||
import datetime
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
EMPTY_CHANGELOG = """# CodeQL Action and CodeQL Runner Changelog
|
||||
|
||||
@@ -16,12 +13,12 @@ No user facing changes.
|
||||
|
||||
"""
|
||||
|
||||
# The branch being merged from.
|
||||
# This is the one that contains day-to-day development work.
|
||||
MAIN_BRANCH = 'main'
|
||||
# The branch being merged into.
|
||||
# This is the release branch that users reference.
|
||||
LATEST_RELEASE_BRANCH = 'v1'
|
||||
# Value of the mode flag for a v1 release
|
||||
V1_MODE = 'v1-release'
|
||||
|
||||
# Value of the mode flag for a v2 release
|
||||
V2_MODE = 'v2-release'
|
||||
|
||||
# Name of the remote
|
||||
ORIGIN = 'origin'
|
||||
|
||||
@@ -38,8 +35,8 @@ def run_git(*args):
|
||||
def branch_exists_on_remote(branch_name):
|
||||
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
||||
|
||||
# Opens a PR from the given branch to the release branch
|
||||
def open_pr(repo, all_commits, short_main_sha, branch_name):
|
||||
# Opens a PR from the given branch to the target branch
|
||||
def open_pr(repo, all_commits, source_branch_short_sha, new_branch_name, source_branch, target_branch, conductor, is_v2_release, labels):
|
||||
# Sort the commits into the pull requests that introduced them,
|
||||
# and any commits that don't have a pull request
|
||||
pull_requests = []
|
||||
@@ -61,9 +58,8 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
|
||||
|
||||
# Start constructing the body text
|
||||
body = []
|
||||
body.append('Merging ' + short_main_sha + ' into ' + LATEST_RELEASE_BRANCH)
|
||||
body.append('Merging ' + source_branch_short_sha + ' into ' + target_branch)
|
||||
|
||||
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
|
||||
body.append('')
|
||||
body.append('Conductor for this PR is @' + conductor)
|
||||
|
||||
@@ -80,43 +76,40 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
|
||||
body.append('')
|
||||
body.append('Contains the following commits not from a pull request:')
|
||||
for commit in commits_without_pull_requests:
|
||||
body.append('- ' + commit.sha + ' - ' + get_truncated_commit_message(commit) + ' (@' + commit.author.login + ')')
|
||||
author_description = ' (@' + commit.author.login + ')' if commit.author is not None else ''
|
||||
body.append('- ' + commit.sha + ' - ' + get_truncated_commit_message(commit) + author_description)
|
||||
|
||||
body.append('')
|
||||
body.append('Please review the following:')
|
||||
body.append(' - [ ] The CHANGELOG displays the correct version and date.')
|
||||
body.append(' - [ ] The CHANGELOG includes all relevant, user-facing changes since the last release.')
|
||||
body.append(' - [ ] There are no unexpected commits being merged into the ' + LATEST_RELEASE_BRANCH + ' branch.')
|
||||
body.append(' - [ ] There are no unexpected commits being merged into the ' + target_branch + ' branch.')
|
||||
body.append(' - [ ] The docs team is aware of any documentation changes that need to be released.')
|
||||
body.append(' - [ ] The mergeback PR is merged back into ' + MAIN_BRANCH + ' after this PR is merged.')
|
||||
if is_v2_release:
|
||||
body.append(' - [ ] The mergeback PR is merged back into ' + source_branch + ' after this PR is merged.')
|
||||
body.append(' - [ ] The v1 release PR is merged after this PR is merged.')
|
||||
|
||||
title = 'Merge ' + MAIN_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
|
||||
title = 'Merge ' + source_branch + ' into ' + target_branch
|
||||
|
||||
# Create the pull request
|
||||
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft so that
|
||||
# a maintainer can take the PR out of draft, thereby triggering the PR checks.
|
||||
pr = repo.create_pull(title=title, body='\n'.join(body), head=branch_name, base=LATEST_RELEASE_BRANCH, draft=True)
|
||||
pr = repo.create_pull(title=title, body='\n'.join(body), head=new_branch_name, base=target_branch, draft=True)
|
||||
pr.add_to_labels(*labels)
|
||||
print('Created PR #' + str(pr.number))
|
||||
|
||||
# Assign the conductor
|
||||
pr.add_to_assignees(conductor)
|
||||
print('Assigned PR to ' + conductor)
|
||||
|
||||
# Gets the person who should be in charge of the mergeback PR
|
||||
def get_conductor(repo, pull_requests, other_commits):
|
||||
# If there are any PRs then use whoever merged the last one
|
||||
if len(pull_requests) > 0:
|
||||
return get_merger_of_pr(repo, pull_requests[-1])
|
||||
|
||||
# Otherwise take the author of the latest commit
|
||||
return other_commits[-1].author.login
|
||||
|
||||
# Gets a list of the SHAs of all commits that have happened on main
|
||||
# since the release branched off.
|
||||
# This will not include any commits that exist on the release branch
|
||||
# that aren't on main.
|
||||
def get_commit_difference(repo):
|
||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '..' + ORIGIN + '/' + MAIN_BRANCH).strip().split('\n')
|
||||
# Gets a list of the SHAs of all commits that have happened on the source branch
|
||||
# since the last release to the target branch.
|
||||
# This will not include any commits that exist on the target branch
|
||||
# that aren't on the source branch.
|
||||
def get_commit_difference(repo, source_branch, target_branch):
|
||||
# Passing split nothing means that the empty string splits to nothing: compare `''.split() == []`
|
||||
# to `''.split('\n') == ['']`.
|
||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + target_branch + '..' + ORIGIN + '/' + source_branch).strip().split()
|
||||
|
||||
# Convert to full-fledged commit objects
|
||||
commits = [repo.get_commit(c) for c in commits]
|
||||
@@ -136,7 +129,7 @@ def get_truncated_commit_message(commit):
|
||||
else:
|
||||
return message
|
||||
|
||||
# Converts a commit into the PR that introduced it to the main branch.
|
||||
# Converts a commit into the PR that introduced it to the source branch.
|
||||
# Returns the PR object, or None if no PR could be found.
|
||||
def get_pr_for_commit(repo, commit):
|
||||
prs = commit.get_pulls()
|
||||
@@ -179,29 +172,69 @@ def update_changelog(version):
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
raise Exception('Usage: update-release.branch.py <github token> <repository nwo>')
|
||||
github_token = sys.argv[1]
|
||||
repository_nwo = sys.argv[2]
|
||||
parser = argparse.ArgumentParser('update-release-branch.py')
|
||||
|
||||
repo = Github(github_token).get_repo(repository_nwo)
|
||||
parser.add_argument(
|
||||
'--github-token',
|
||||
type=str,
|
||||
required=True,
|
||||
help='GitHub token, typically from GitHub Actions.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--repository-nwo',
|
||||
type=str,
|
||||
required=True,
|
||||
help='The nwo of the repository, for example github/codeql-action.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--mode',
|
||||
type=str,
|
||||
required=True,
|
||||
choices=[V2_MODE, V1_MODE],
|
||||
help=f"Which release to perform. '{V2_MODE}' uses main as the source branch and v2 as the target branch. " +
|
||||
f"'{V1_MODE}' uses v2 as the source branch and v1 as the target branch."
|
||||
)
|
||||
parser.add_argument(
|
||||
'--conductor',
|
||||
type=str,
|
||||
required=True,
|
||||
help='The GitHub handle of the person who is conducting the release process.'
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.mode == V2_MODE:
|
||||
source_branch = 'main'
|
||||
target_branch = 'v2'
|
||||
elif args.mode == V1_MODE:
|
||||
source_branch = 'v2'
|
||||
target_branch = 'v1'
|
||||
else:
|
||||
raise ValueError(f"Unexpected value for release mode: '{args.mode}'")
|
||||
|
||||
repo = Github(args.github_token).get_repo(args.repository_nwo)
|
||||
version = get_current_version()
|
||||
|
||||
if args.mode == V1_MODE:
|
||||
# Change the version number to a v1 equivalent
|
||||
version = get_current_version()
|
||||
version = f'1{version[1:]}'
|
||||
|
||||
# Print what we intend to go
|
||||
print('Considering difference between ' + MAIN_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
|
||||
short_main_sha = run_git('rev-parse', '--short', ORIGIN + '/' + MAIN_BRANCH).strip()
|
||||
print('Current head of ' + MAIN_BRANCH + ' is ' + short_main_sha)
|
||||
print('Considering difference between ' + source_branch + ' and ' + target_branch)
|
||||
source_branch_short_sha = run_git('rev-parse', '--short', ORIGIN + '/' + source_branch).strip()
|
||||
print('Current head of ' + source_branch + ' is ' + source_branch_short_sha)
|
||||
|
||||
# See if there are any commits to merge in
|
||||
commits = get_commit_difference(repo)
|
||||
commits = get_commit_difference(repo=repo, source_branch=source_branch, target_branch=target_branch)
|
||||
if len(commits) == 0:
|
||||
print('No commits to merge from ' + MAIN_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
|
||||
print('No commits to merge from ' + source_branch + ' to ' + target_branch)
|
||||
return
|
||||
|
||||
# The branch name is based off of the name of branch being merged into
|
||||
# and the SHA of the branch being merged from. Thus if the branch already
|
||||
# exists we can assume we don't need to recreate it.
|
||||
new_branch_name = 'update-v' + version + '-' + short_main_sha
|
||||
new_branch_name = 'update-v' + version + '-' + source_branch_short_sha
|
||||
print('Branch name is ' + new_branch_name)
|
||||
|
||||
# Check if the branch already exists. If so we can abort as this script
|
||||
@@ -212,19 +245,79 @@ def main():
|
||||
|
||||
# Create the new branch and push it to the remote
|
||||
print('Creating branch ' + new_branch_name)
|
||||
run_git('checkout', '-b', new_branch_name, ORIGIN + '/' + MAIN_BRANCH)
|
||||
|
||||
if args.mode == V1_MODE:
|
||||
# If we're performing a backport, start from the v1 branch
|
||||
print(f'Creating {new_branch_name} from the {ORIGIN}/v1 branch')
|
||||
run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/v1')
|
||||
|
||||
# Revert the commit that we made as part of the last release that updated the version number and
|
||||
# changelog to refer to 1.x.x variants. This avoids merge conflicts in the changelog and
|
||||
# package.json files when we merge in the v2 branch.
|
||||
# This commit will not exist the first time we release the v1 branch from the v2 branch, so we
|
||||
# use `git log --grep` to conditionally revert the commit.
|
||||
print('Reverting the 1.x.x version number and changelog updates from the last release to avoid conflicts')
|
||||
v1_update_commits = run_git('log', '--grep', '^Update version and changelog for v', '--format=%H').split()
|
||||
|
||||
if len(v1_update_commits) > 0:
|
||||
print(f' Reverting {v1_update_commits[0]}')
|
||||
# Only revert the newest commit as older ones will already have been reverted in previous
|
||||
# releases.
|
||||
run_git('revert', v1_update_commits[0], '--no-edit')
|
||||
|
||||
# Also revert the "Update checked-in dependencies" commit created by Actions.
|
||||
update_dependencies_commit = run_git('log', '--grep', '^Update checked-in dependencies', '--format=%H').split()[0]
|
||||
print(f' Reverting {update_dependencies_commit}')
|
||||
run_git('revert', update_dependencies_commit, '--no-edit')
|
||||
|
||||
else:
|
||||
print(' Nothing to revert.')
|
||||
|
||||
print(f'Merging {ORIGIN}/{source_branch} into the release prep branch')
|
||||
run_git('merge', f'{ORIGIN}/{source_branch}', '--no-edit')
|
||||
|
||||
# Migrate the package version number from a v2 version number to a v1 version number
|
||||
print(f'Setting version number to {version}')
|
||||
subprocess.run(['npm', 'version', version, '--no-git-tag-version'])
|
||||
run_git('add', 'package.json', 'package-lock.json')
|
||||
|
||||
# Migrate the changelog notes from v2 version numbers to v1 version numbers
|
||||
print('Migrating changelog notes from v2 to v1')
|
||||
subprocess.run(['sed', '-i', 's/^## 2\./## 1./g', 'CHANGELOG.md'])
|
||||
|
||||
# Remove changelog notes from v2 that don't apply to v1
|
||||
subprocess.run(['sed', '-i', '/^- \[v2+ only\]/d', 'CHANGELOG.md'])
|
||||
|
||||
# Amend the commit generated by `npm version` to update the CHANGELOG
|
||||
run_git('add', 'CHANGELOG.md')
|
||||
run_git('commit', '-m', f'Update version and changelog for v{version}')
|
||||
else:
|
||||
# If we're performing a standard release, there won't be any new commits on the target branch,
|
||||
# as these will have already been merged back into the source branch. Therefore we can just
|
||||
# start from the source branch.
|
||||
run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/{source_branch}')
|
||||
|
||||
print('Updating changelog')
|
||||
update_changelog(version)
|
||||
|
||||
# Create a commit that updates the CHANGELOG
|
||||
run_git('add', 'CHANGELOG.md')
|
||||
run_git('commit', '-m', version)
|
||||
run_git('commit', '-m', f'Update changelog for v{version}')
|
||||
|
||||
run_git('push', ORIGIN, new_branch_name)
|
||||
|
||||
# Open a PR to update the branch
|
||||
open_pr(repo, commits, short_main_sha, new_branch_name)
|
||||
open_pr(
|
||||
repo,
|
||||
commits,
|
||||
source_branch_short_sha,
|
||||
new_branch_name,
|
||||
source_branch=source_branch,
|
||||
target_branch=target_branch,
|
||||
conductor=args.conductor,
|
||||
is_v2_release=args.mode == V2_MODE,
|
||||
labels=['Update dependencies'] if args.mode == V1_MODE else [],
|
||||
)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
2
.github/workflows/__analyze-ref-input.yml
generated
vendored
2
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: "Analyze: 'ref' and 'sha' from inputs"
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__debug-artifacts.yml
generated
vendored
2
.github/workflows/__debug-artifacts.yml
generated
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Debug artifact upload
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__extractor-ram-threads.yml
generated
vendored
2
.github/workflows/__extractor-ram-threads.yml
generated
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
name: Extractor ram and threads options test
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__go-custom-queries.yml
generated
vendored
2
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: 'Go: Custom queries'
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
2
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: 'Go: Autobuild custom tracing'
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__go-custom-tracing.yml
generated
vendored
2
.github/workflows/__go-custom-tracing.yml
generated
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: 'Go: Custom tracing'
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__javascript-source-root.yml
generated
vendored
2
.github/workflows/__javascript-source-root.yml
generated
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Custom source root
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__multi-language-autodetect.yml
generated
vendored
2
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Multi-language repository
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
- os: macos-latest
|
||||
version: nightly-20210831
|
||||
name: 'Packaging: Config and input'
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__packaging-config-js.yml
generated
vendored
2
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
- os: macos-latest
|
||||
version: nightly-20210831
|
||||
name: 'Packaging: Config file'
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
- os: macos-latest
|
||||
version: nightly-20210831
|
||||
name: 'Packaging: Action input'
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__remote-config.yml
generated
vendored
2
.github/workflows/__remote-config.yml
generated
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: Remote config file
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: RuboCop multi-language
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__split-workflow.yml
generated
vendored
2
.github/workflows/__split-workflow.yml
generated
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
- os: macos-latest
|
||||
version: nightly-20210831
|
||||
name: Split workflow
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__test-local-codeql.yml
generated
vendored
2
.github/workflows/__test-local-codeql.yml
generated
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Local CodeQL bundle
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__test-proxy.yml
generated
vendored
2
.github/workflows/__test-proxy.yml
generated
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
name: Proxy test
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__test-ruby.yml
generated
vendored
2
.github/workflows/__test-ruby.yml
generated
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Ruby analysis
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__unset-environment.yml
generated
vendored
2
.github/workflows/__unset-environment.yml
generated
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Test unsetting environment variables
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
2
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
2
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: "Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
||||
145
.github/workflows/__with-checkout-path.yml
generated
vendored
Normal file
145
.github/workflows/__with-checkout-path.yml
generated
vendored
Normal file
@@ -0,0 +1,145 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Use a custom `checkout_path`
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
with-checkout-path:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: Use a custom `checkout_path`
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
ref: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
path: x/y/z/some-path
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
# it's enough to test one compiled language and one interpreted language
|
||||
languages: csharp,javascript
|
||||
source-path: x/y/z/some-path/tests/multi-language-repo
|
||||
debug: true
|
||||
- name: Build code (non-windows)
|
||||
shell: bash
|
||||
if: ${{ runner.os != 'Windows' }}
|
||||
run: |
|
||||
$CODEQL_RUNNER x/y/z/some-path/tests/multi-language-repo/build.sh
|
||||
- name: Build code (windows)
|
||||
shell: bash
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
run: |
|
||||
x/y/z/some-path/tests/multi-language-repo/build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
checkout_path: x/y/z/some-path/tests/multi-language-repo
|
||||
ref: v1.1.0
|
||||
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
upload: false
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
ref: v1.1.0
|
||||
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
checkout_path: x/y/z/some-path/tests/multi-language-repo
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- name: Verify SARIF after upload
|
||||
shell: bash
|
||||
run: |
|
||||
EXPECTED_COMMIT_OID="474bbf07f9247ffe1856c6a0f94aeeb10e7afee6"
|
||||
EXPECTED_REF="v1.1.0"
|
||||
EXPECTED_CHECKOUT_URI_SUFFIX="/x/y/z/some-path/tests/multi-language-repo"
|
||||
|
||||
ACTUAL_COMMIT_OID="$(cat "$RUNNER_TEMP/payload.json" | jq -r .commit_oid)"
|
||||
ACTUAL_REF="$(cat "$RUNNER_TEMP/payload.json" | jq -r .ref)"
|
||||
ACTUAL_CHECKOUT_URI="$(cat "$RUNNER_TEMP/payload.json" | jq -r .checkout_uri)"
|
||||
|
||||
if [[ "$EXPECTED_COMMIT_OID" != "$ACTUAL_COMMIT_OID" ]]; then
|
||||
echo "::error Invalid commit oid. Expected: $EXPECTED_COMMIT_OID Actual: $ACTUAL_COMMIT_OID"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$EXPECTED_REF" != "$ACTUAL_REF" ]]; then
|
||||
echo "::error Invalid ref. Expected: '$EXPECTED_REF' Actual: '$ACTUAL_REF'"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$ACTUAL_CHECKOUT_URI" != *$EXPECTED_CHECKOUT_URI_SUFFIX ]]; then
|
||||
echo "::error Invalid checkout URI suffix. Expected suffix: $EXPECTED_CHECKOUT_URI_SUFFIX Actual uri: $ACTUAL_CHECKOUT_URI"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -2,9 +2,9 @@ name: "CodeQL action"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, v1]
|
||||
branches: [main, v1, v2]
|
||||
pull_request:
|
||||
branches: [main, v1]
|
||||
branches: [main, v1, v2]
|
||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||
# by other workflows.
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
|
||||
16
.github/workflows/post-release-mergeback.yml
vendored
16
.github/workflows/post-release-mergeback.yml
vendored
@@ -15,6 +15,7 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- v1
|
||||
- v2
|
||||
|
||||
jobs:
|
||||
merge-back:
|
||||
@@ -25,10 +26,13 @@ jobs:
|
||||
HEAD_BRANCH: "${{ github.head_ref || github.ref }}"
|
||||
|
||||
steps:
|
||||
- name: Dump GitHub Event context
|
||||
- name: Dump environment
|
||||
run: env
|
||||
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_EVENT_CONTEXT: "${{ toJson(github.event) }}"
|
||||
run: echo "$GITHUB_EVENT_CONTEXT"
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
@@ -90,7 +94,7 @@ jobs:
|
||||
git push origin --follow-tags "$VERSION"
|
||||
|
||||
- name: Create mergeback branch
|
||||
if: steps.check.outputs.exists != 'true'
|
||||
if: steps.check.outputs.exists != 'true' && contains(github.ref, 'v2')
|
||||
env:
|
||||
VERSION: "${{ steps.getVersion.outputs.version }}"
|
||||
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
||||
@@ -100,11 +104,13 @@ jobs:
|
||||
PR_TITLE="Mergeback $VERSION $HEAD_BRANCH into $BASE_BRANCH"
|
||||
PR_BODY="Updates version and changelog."
|
||||
|
||||
# Update the version number ready for the next release
|
||||
npm version patch --no-git-tag-version
|
||||
|
||||
# Update the changelog
|
||||
perl -i -pe 's/^/## \[UNRELEASED\]\n\nNo user facing changes.\n\n/ if($.==3)' CHANGELOG.md
|
||||
git add .
|
||||
git commit -m "Update changelog and version after $VERSION"
|
||||
npm version patch
|
||||
|
||||
git push origin "$NEW_BRANCH"
|
||||
|
||||
|
||||
60
.github/workflows/pr-checks.yml
vendored
60
.github/workflows/pr-checks.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
lint-js:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -22,17 +22,45 @@ jobs:
|
||||
|
||||
check-js:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
node-types-version: [12.12, current]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Update version of @types/node
|
||||
if: matrix.node-types-version != 'current'
|
||||
env:
|
||||
NODE_TYPES_VERSION: ${{ matrix.node-types-version }}
|
||||
run: |
|
||||
# Export `NODE_TYPES_VERSION` so it's available to jq
|
||||
export NODE_TYPES_VERSION="${NODE_TYPES_VERSION}"
|
||||
contents=$(jq '.devDependencies."@types/node" = env.NODE_TYPES_VERSION' package.json)
|
||||
echo "${contents}" > package.json
|
||||
# Usually we run `npm install` on macOS to ensure that we pick up macOS-only dependencies.
|
||||
# However we're not checking in the updated lockfile here, so it's fine to run
|
||||
# `npm install` on Linux.
|
||||
npm install
|
||||
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
git config --global user.email "github-actions@github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
# The period in `git add --all .` ensures that we stage deleted files too.
|
||||
git add --all .
|
||||
git commit -m "Use @types/node=${NODE_TYPES_VERSION}"
|
||||
fi
|
||||
|
||||
- name: Check generated JS
|
||||
run: .github/workflows/script/check-js.sh
|
||||
|
||||
check-node-modules:
|
||||
name: Check modules up to date
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -42,7 +70,7 @@ jobs:
|
||||
verify-pr-checks:
|
||||
name: Verify PR checks up to date
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -64,7 +92,7 @@ jobs:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -74,7 +102,7 @@ jobs:
|
||||
runner-analyze-javascript-ubuntu:
|
||||
name: Runner ubuntu JS analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -103,7 +131,7 @@ jobs:
|
||||
runner-analyze-javascript-windows:
|
||||
name: Runner windows JS analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
@@ -128,7 +156,7 @@ jobs:
|
||||
runner-analyze-javascript-macos:
|
||||
name: Runner macos JS analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
@@ -153,7 +181,7 @@ jobs:
|
||||
runner-analyze-csharp-ubuntu:
|
||||
name: Runner ubuntu C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -193,7 +221,7 @@ jobs:
|
||||
needs: [check-js, check-node-modules]
|
||||
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
|
||||
# `windows-latest`.
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: windows-2019
|
||||
|
||||
steps:
|
||||
@@ -238,7 +266,7 @@ jobs:
|
||||
|
||||
runner-analyze-csharp-macos:
|
||||
name: Runner macos C# analyze
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: macos-latest
|
||||
|
||||
@@ -277,7 +305,7 @@ jobs:
|
||||
|
||||
runner-analyze-csharp-autobuild-ubuntu:
|
||||
name: Runner ubuntu autobuild C# analyze
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -313,7 +341,7 @@ jobs:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-csharp-autobuild-windows:
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
name: Runner windows autobuild C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
|
||||
@@ -356,7 +384,7 @@ jobs:
|
||||
name: Runner macos autobuild C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -394,7 +422,7 @@ jobs:
|
||||
name: Runner upload sarif
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
|
||||
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
|
||||
|
||||
@@ -417,7 +445,7 @@ jobs:
|
||||
name: Runner ubuntu extractor RAM and threads options
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
2
.github/workflows/python-deps.yml
vendored
2
.github/workflows/python-deps.yml
vendored
@@ -10,7 +10,7 @@ on:
|
||||
|
||||
jobs:
|
||||
test-setup-python-scripts:
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
2
.github/workflows/release-runner.yml
vendored
2
.github/workflows/release-runner.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
|
||||
jobs:
|
||||
release-runner:
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
|
||||
|
||||
2
.github/workflows/split.yml
vendored
2
.github/workflows/split.yml
vendored
@@ -26,7 +26,7 @@ on:
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
env:
|
||||
CLI_RELEASE: "${{ github.event.inputs.cli-release }}"
|
||||
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
|
||||
|
||||
2
.github/workflows/update-dependencies.yml
vendored
2
.github/workflows/update-dependencies.yml
vendored
@@ -6,7 +6,7 @@ on:
|
||||
jobs:
|
||||
update:
|
||||
name: Update dependencies
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: macos-latest
|
||||
if: contains(github.event.pull_request.labels.*.name, 'Update dependencies') && (github.event.pull_request.head.repo.full_name == 'github/codeql-action')
|
||||
steps:
|
||||
|
||||
43
.github/workflows/update-release-branch.yml
vendored
43
.github/workflows/update-release-branch.yml
vendored
@@ -1,18 +1,28 @@
|
||||
name: Update release branch
|
||||
on:
|
||||
repository_dispatch:
|
||||
# Example of how to trigger this:
|
||||
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
|
||||
# Replace <token> with a personal access token from this page: https://github.com/settings/tokens
|
||||
types: [update-release-branch]
|
||||
# You can trigger this workflow via workflow dispatch to start a release.
|
||||
# This will open a PR to update the v2 release branch.
|
||||
workflow_dispatch:
|
||||
|
||||
# When the v2 release is complete, this workflow will open a PR to update the v1 release branch.
|
||||
push:
|
||||
branches:
|
||||
- v2
|
||||
|
||||
jobs:
|
||||
update:
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository == 'github/codeql-action' }}
|
||||
if: github.repository == 'github/codeql-action'
|
||||
steps:
|
||||
- name: Dump environment
|
||||
run: env
|
||||
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
# Need full history so we calculate diffs
|
||||
@@ -33,5 +43,20 @@ jobs:
|
||||
git config --global user.email "github-actions@github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
- name: Update release branch
|
||||
run: python .github/update-release-branch.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }}
|
||||
- name: Update v2 release branch
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
run: |
|
||||
python .github/update-release-branch.py \
|
||||
--github-token ${{ secrets.GITHUB_TOKEN }} \
|
||||
--repository-nwo ${{ github.repository }} \
|
||||
--mode v2-release \
|
||||
--conductor ${GITHUB_ACTOR}
|
||||
|
||||
- name: Update v1 release branch
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
python .github/update-release-branch.py \
|
||||
--github-token ${{ secrets.GITHUB_TOKEN }} \
|
||||
--repository-nwo ${{ github.repository }} \
|
||||
--mode v1-release \
|
||||
--conductor ${GITHUB_ACTOR}
|
||||
|
||||
@@ -7,7 +7,7 @@ on:
|
||||
jobs:
|
||||
update-supported-enterprise-server-versions:
|
||||
name: Update Supported Enterprise Server Versions
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository == 'github/codeql-action' }}
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
# CodeQL Action Changelog
|
||||
|
||||
## [UNRELEASED]
|
||||
## 1.1.6 - 30 Mar 2022
|
||||
|
||||
No user facing changes.
|
||||
- Update default CodeQL bundle version to 2.8.4. [#990](https://github.com/github/codeql-action/pull/990)
|
||||
- Fix a bug where an invalid `commit_oid` was being sent to code scanning when a custom checkout path was being used. [#956](https://github.com/github/codeql-action/pull/956)
|
||||
|
||||
## 1.1.5 - 15 Mar 2022
|
||||
|
||||
@@ -19,7 +20,7 @@ No user facing changes.
|
||||
|
||||
## 1.1.3 - 23 Feb 2022
|
||||
|
||||
- Fix bug where the CLR traces can continue tracing even after tracing should be stopped. [#938](https://github.com/github/codeql-action/pull/938)
|
||||
- Fix a bug where the CLR traces can continue tracing even after tracing should be stopped. [#938](https://github.com/github/codeql-action/pull/938)
|
||||
|
||||
## 1.1.2 - 17 Feb 2022
|
||||
|
||||
|
||||
@@ -61,16 +61,22 @@ Here are a few things you can do that will increase the likelihood of your pull
|
||||
## Releasing (write access required)
|
||||
|
||||
1. The first step of releasing a new version of the `codeql-action` is running the "Update release branch" workflow.
|
||||
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `v1` release branch.
|
||||
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `v2` release branch.
|
||||
|
||||
A release is automatically started every Monday via a scheduled run of this workflow, however you can start a release manually by triggering a run via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
|
||||
1. The workflow run will open a pull request titled "Merge main into v1". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
|
||||
You can start a release by triggering this workflow via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
|
||||
1. The workflow run will open a pull request titled "Merge main into v2". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
|
||||
1. Review the checklist items in the pull request description.
|
||||
Once you've checked off all but the last of these, approve the PR and automerge it.
|
||||
1. When the "Merge main into v1" pull request is merged into the `v1` branch, the "Tag release and merge back" workflow will create a mergeback PR.
|
||||
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into v1" pull request, and bumps the patch version of the CodeQL Action.
|
||||
Once you've checked off all but the last two of these, approve the PR and automerge it.
|
||||
1. When the "Merge main into v2" pull request is merged into the `v2` branch, the "Tag release and merge back" workflow will create a mergeback PR.
|
||||
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into v2" pull request, and bumps the patch version of the CodeQL Action.
|
||||
|
||||
Approve the mergeback PR and automerge it. Once the mergeback has been merged into main, the release is complete.
|
||||
Approve the mergeback PR and automerge it.
|
||||
1. When the "Merge main into v2" pull request is merged into the `v2` branch, the "Update release branch" workflow will create a "Merge v2 into v1" pull request to merge the changes since the last release into the `v1` release branch.
|
||||
This ensures we keep both the `v1` and `v2` release branches up to date and fully supported.
|
||||
|
||||
Review the checklist items in the pull request description.
|
||||
Once you've checked off all the items, approve the PR and automerge it.
|
||||
1. Once the mergeback has been merged to `main` and the "Merge v2 into v1" PR has been merged to `v1`, the release is complete.
|
||||
|
||||
## Keeping the PR checks up to date (admin access required)
|
||||
|
||||
@@ -85,12 +91,12 @@ To regenerate the PR jobs for the action:
|
||||
CHECKS="$(gh api repos/github/codeql-action/commits/${SHA}/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or . == "Update dependencies" or . == "Update Supported Enterprise Server Versions" | not)]')"
|
||||
echo "{\"contexts\": ${CHECKS}}" > checks.json
|
||||
gh api -X "PATCH" repos/github/codeql-action/branches/main/protection/required_status_checks --input checks.json
|
||||
gh api -X "PATCH" repos/github/codeql-action/branches/v2/protection/required_status_checks --input checks.json
|
||||
gh api -X "PATCH" repos/github/codeql-action/branches/v1/protection/required_status_checks --input checks.json
|
||||
````
|
||||
|
||||
2. Go to the [branch protection rules settings page](https://github.com/github/codeql-action/settings/branches) and validate that the rules have been updated.
|
||||
|
||||
|
||||
## Resources
|
||||
|
||||
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
||||
|
||||
15
lib/actions-util.js
generated
15
lib/actions-util.js
generated
@@ -76,7 +76,7 @@ exports.getToolCacheDirectory = getToolCacheDirectory;
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
const getCommitOid = async function (ref = "HEAD") {
|
||||
const getCommitOid = async function (checkoutPath, ref = "HEAD") {
|
||||
// Try to use git to get the current commit SHA. If that fails then
|
||||
// log but otherwise silently fall back to using the SHA from the environment.
|
||||
// The only time these two values will differ is during analysis of a PR when
|
||||
@@ -96,6 +96,7 @@ const getCommitOid = async function (ref = "HEAD") {
|
||||
process.stderr.write(data);
|
||||
},
|
||||
},
|
||||
cwd: checkoutPath,
|
||||
}).exec();
|
||||
return commitOid.trim();
|
||||
}
|
||||
@@ -115,6 +116,7 @@ const determineMergeBaseCommitOid = async function () {
|
||||
return undefined;
|
||||
}
|
||||
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
const checkoutPath = (0, exports.getOptionalInput)("checkout_path");
|
||||
try {
|
||||
let commitOid = "";
|
||||
let baseOid = "";
|
||||
@@ -139,6 +141,7 @@ const determineMergeBaseCommitOid = async function () {
|
||||
process.stderr.write(data);
|
||||
},
|
||||
},
|
||||
cwd: checkoutPath,
|
||||
}).exec();
|
||||
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
|
||||
if (commitOid === mergeSha &&
|
||||
@@ -427,6 +430,9 @@ async function getRef() {
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const refInput = (0, exports.getOptionalInput)("ref");
|
||||
const shaInput = (0, exports.getOptionalInput)("sha");
|
||||
const checkoutPath = (0, exports.getOptionalInput)("checkout_path") ||
|
||||
(0, exports.getOptionalInput)("source-root") ||
|
||||
(0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE");
|
||||
const hasRefInput = !!refInput;
|
||||
const hasShaInput = !!shaInput;
|
||||
// If one of 'ref' or 'sha' are provided, both are required
|
||||
@@ -448,15 +454,14 @@ async function getRef() {
|
||||
if (!pull_ref_regex.test(ref)) {
|
||||
return ref;
|
||||
}
|
||||
const head = await (0, exports.getCommitOid)("HEAD");
|
||||
const head = await (0, exports.getCommitOid)(checkoutPath, "HEAD");
|
||||
// in actions/checkout@v2 we can check if git rev-parse HEAD == GITHUB_SHA
|
||||
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||
// using GITHUB_REF. There is a subtle race condition where
|
||||
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
||||
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||
const hasChangedRef = sha !== head &&
|
||||
(await (0, exports.getCommitOid)(ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !==
|
||||
head;
|
||||
(await (0, exports.getCommitOid)(checkoutPath, ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !== head;
|
||||
if (hasChangedRef) {
|
||||
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
||||
core.debug(`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`);
|
||||
@@ -550,7 +555,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
||||
statusReport.runner_os_release = os.release();
|
||||
}
|
||||
if (codeQlCliVersion !== undefined) {
|
||||
statusReport.codeql_cli_version = codeQlCliVersion;
|
||||
statusReport.codeql_version = codeQlCliVersion;
|
||||
}
|
||||
return statusReport;
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
24
lib/actions-util.test.js
generated
24
lib/actions-util.test.js
generated
@@ -39,6 +39,8 @@ function errorCodes(actual, expected) {
|
||||
await t.throwsAsync(actionsutil.getRef);
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
@@ -49,7 +51,10 @@ function errorCodes(actual, expected) {
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||
@@ -61,17 +66,23 @@ function errorCodes(actual, expected) {
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||
callback.withArgs("refs/pull/1/merge").resolves("a".repeat(40));
|
||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
|
||||
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
||||
@@ -86,7 +97,10 @@ function errorCodes(actual, expected) {
|
||||
callback.restore();
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
||||
await t.throwsAsync(async () => {
|
||||
@@ -97,7 +111,11 @@ function errorCodes(actual, expected) {
|
||||
});
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() throws an error if only `sha` is provided as an input", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
process.env["GITHUB_WORKSPACE"] = "/tmp";
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
||||
await t.throwsAsync(async () => {
|
||||
@@ -108,6 +126,7 @@ function errorCodes(actual, expected) {
|
||||
});
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("computeAutomationID()", async (t) => {
|
||||
let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
||||
@@ -461,6 +480,7 @@ on: ["push"]
|
||||
});
|
||||
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const envFile = path.join(tmpDir, "event.json");
|
||||
fs.writeFileSync(envFile, JSON.stringify({
|
||||
repository: {
|
||||
|
||||
File diff suppressed because one or more lines are too long
3
lib/analysis-paths.test.js
generated
3
lib/analysis-paths.test.js
generated
@@ -45,6 +45,7 @@ const util = __importStar(require("./util"));
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
@@ -69,6 +70,7 @@ const util = __importStar(require("./util"));
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||
@@ -94,6 +96,7 @@ const util = __importStar(require("./util"));
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
5
lib/analyze.js
generated
5
lib/analyze.js
generated
@@ -131,11 +131,12 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
if (!hasBuiltinQueries && !hasCustomQueries && !hasPackWithCustomQueries) {
|
||||
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
try {
|
||||
if (hasPackWithCustomQueries) {
|
||||
if (hasPackWithCustomQueries &&
|
||||
!(await util.codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_CONFIG_FILES))) {
|
||||
logger.info("Performing analysis with custom CodeQL Packs.");
|
||||
logger.startGroup(`Downloading custom packs for ${language}`);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const results = await codeql.packDownload(packsWithVersion);
|
||||
logger.info(`Downloaded packs: ${results.packs
|
||||
.map((r) => `${r.name}@${r.version || "latest"}`)
|
||||
|
||||
File diff suppressed because one or more lines are too long
1
lib/analyze.test.js
generated
1
lib/analyze.test.js
generated
@@ -128,6 +128,7 @@ const util = __importStar(require("./util"));
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||
recursive: true,
|
||||
|
||||
File diff suppressed because one or more lines are too long
30
lib/codeql.js
generated
30
lib/codeql.js
generated
@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_CONFIG_FILES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
@@ -76,7 +76,7 @@ const CODEQL_VERSION_GROUP_RULES = "2.5.5";
|
||||
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
|
||||
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
|
||||
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
|
||||
const CODEQL_VERSION_CONFIG_FILES = "2.8.2"; // Versions before 2.8.2 weren't tolerant to unknown properties
|
||||
exports.CODEQL_VERSION_CONFIG_FILES = "2.8.2"; // Versions before 2.8.2 weren't tolerant to unknown properties
|
||||
exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
|
||||
/**
|
||||
* This variable controls using the new style of tracing from the CodeQL
|
||||
@@ -468,9 +468,27 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
extraArgs.push(`--trace-process-level=${processLevel || 3}`);
|
||||
}
|
||||
}
|
||||
if (await util.codeQlVersionAbove(codeql, CODEQL_VERSION_CONFIG_FILES)) {
|
||||
if (await util.codeQlVersionAbove(codeql, exports.CODEQL_VERSION_CONFIG_FILES)) {
|
||||
const configLocation = path.resolve(config.tempDir, "user-config.yaml");
|
||||
fs.writeFileSync(configLocation, yaml.dump(config.originalUserInput));
|
||||
const augmentedConfig = config.originalUserInput;
|
||||
if (config.injectedMlQueries) {
|
||||
// We need to inject the ML queries into the original user input before
|
||||
// we pass this on to the CLI, to make sure these get run.
|
||||
let packString = util_1.ML_POWERED_JS_QUERIES_PACK.packName;
|
||||
if (util_1.ML_POWERED_JS_QUERIES_PACK.version)
|
||||
packString = `${packString}@${util_1.ML_POWERED_JS_QUERIES_PACK.version}`;
|
||||
if (augmentedConfig.packs === undefined)
|
||||
augmentedConfig.packs = [];
|
||||
if (Array.isArray(augmentedConfig.packs)) {
|
||||
augmentedConfig.packs.push(packString);
|
||||
}
|
||||
else {
|
||||
if (!augmentedConfig.packs.javascript)
|
||||
augmentedConfig.packs["javascript"] = [];
|
||||
augmentedConfig.packs["javascript"].push(packString);
|
||||
}
|
||||
}
|
||||
fs.writeFileSync(configLocation, yaml.dump(augmentedConfig));
|
||||
extraArgs.push(`--codescanning-config=${configLocation}`);
|
||||
}
|
||||
await runTool(cmd, [
|
||||
@@ -593,7 +611,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
if (extraSearchPath !== undefined) {
|
||||
codeqlArgs.push("--additional-packs", extraSearchPath);
|
||||
}
|
||||
if (!(await util.codeQlVersionAbove(this, CODEQL_VERSION_CONFIG_FILES))) {
|
||||
if (!(await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_CONFIG_FILES))) {
|
||||
codeqlArgs.push(querySuitePath);
|
||||
}
|
||||
await runTool(cmd, codeqlArgs);
|
||||
@@ -622,7 +640,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
codeqlArgs.push("--sarif-category", automationDetailsId);
|
||||
}
|
||||
codeqlArgs.push(databasePath);
|
||||
if (!(await util.codeQlVersionAbove(this, CODEQL_VERSION_CONFIG_FILES))) {
|
||||
if (!(await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_CONFIG_FILES))) {
|
||||
codeqlArgs.push(...querySuitePaths);
|
||||
}
|
||||
// capture stdout, which contains analysis summaries
|
||||
|
||||
File diff suppressed because one or more lines are too long
28
lib/config-utils.js
generated
28
lib/config-utils.js
generated
@@ -118,9 +118,11 @@ const builtinSuites = ["security-extended", "security-and-quality"];
|
||||
/**
|
||||
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
||||
* Throws an error if suiteName is not a valid builtin suite.
|
||||
* May inject ML queries, and the return value will declare if this was done.
|
||||
*/
|
||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureFlags, configFile) {
|
||||
var _a;
|
||||
let injectedMlQueries = false;
|
||||
const found = builtinSuites.find((suite) => suite === suiteName);
|
||||
if (!found) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
||||
@@ -137,9 +139,11 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suite
|
||||
packs.javascript = [];
|
||||
}
|
||||
packs.javascript.push(util_1.ML_POWERED_JS_QUERIES_PACK);
|
||||
injectedMlQueries = true;
|
||||
}
|
||||
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
|
||||
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
||||
return injectedMlQueries;
|
||||
}
|
||||
/**
|
||||
* Retrieve the set of queries at localQueryPath and add them to resultMap.
|
||||
@@ -196,6 +200,11 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
|
||||
* parsing the 'uses' actions in the workflow file. So it can handle
|
||||
* local paths starting with './', or references to remote repos, or
|
||||
* a finite set of hardcoded terms for builtin suites.
|
||||
*
|
||||
* This may inject ML queries into the packs to use, and the return value will
|
||||
* declare if this was done.
|
||||
*
|
||||
* @returns whether or not we injected ML queries into the packs
|
||||
*/
|
||||
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureFlags, logger, configFile) {
|
||||
queryUses = queryUses.trim();
|
||||
@@ -205,15 +214,15 @@ async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, te
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), workspacePath, configFile);
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
|
||||
await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureFlags, configFile);
|
||||
return;
|
||||
return await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureFlags, configFile);
|
||||
}
|
||||
// Otherwise, must be a reference to another repo
|
||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
||||
return false;
|
||||
}
|
||||
// Regex validating stars in paths or paths-ignore entries.
|
||||
// The intention is to only allow ** to appear when immediately
|
||||
@@ -422,12 +431,15 @@ async function getLanguages(codeQL, languagesInput, repository, apiDetails, logg
|
||||
return parsedLanguages;
|
||||
}
|
||||
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureFlags, logger) {
|
||||
let injectedMlQueries = false;
|
||||
queriesInput = queriesInput.trim();
|
||||
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
||||
queriesInput = queriesInput.replace(/^\+/, "");
|
||||
for (const query of queriesInput.split(",")) {
|
||||
await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
const didInject = await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
injectedMlQueries = injectedMlQueries || didInject;
|
||||
}
|
||||
return injectedMlQueries;
|
||||
}
|
||||
// Returns true if either no queries were provided in the workflow.
|
||||
// or if the queries in the workflow were provided in "additive" mode,
|
||||
@@ -454,8 +466,9 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
|
||||
}
|
||||
await addDefaultQueries(codeQL, languages, queries);
|
||||
const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
|
||||
let injectedMlQueries = false;
|
||||
if (queriesInput) {
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
injectedMlQueries = await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
}
|
||||
return {
|
||||
languages,
|
||||
@@ -472,6 +485,7 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
injectedMlQueries,
|
||||
};
|
||||
}
|
||||
exports.getDefaultConfig = getDefaultConfig;
|
||||
@@ -524,8 +538,9 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
// they should take precedence over the queries in the config file
|
||||
// unless they're prefixed with "+", in which case they supplement those
|
||||
// in the config file.
|
||||
let injectedMlQueries = false;
|
||||
if (queriesInput) {
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
injectedMlQueries = await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
}
|
||||
if (shouldAddConfigFileQueries(queriesInput) &&
|
||||
QUERIES_PROPERTY in parsedYAML) {
|
||||
@@ -578,6 +593,7 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
injectedMlQueries,
|
||||
};
|
||||
}
|
||||
/**
|
||||
|
||||
File diff suppressed because one or more lines are too long
1
lib/config-utils.test.js
generated
1
lib/config-utils.test.js
generated
@@ -221,6 +221,7 @@ function mockListLanguages(languages) {
|
||||
debugMode: false,
|
||||
debugArtifactName: "my-artifact",
|
||||
debugDatabaseName: "my-db",
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
const languages = "javascript";
|
||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||
|
||||
File diff suppressed because one or more lines are too long
1
lib/database-upload.test.js
generated
1
lib/database-upload.test.js
generated
@@ -58,6 +58,7 @@ function getTestConfig(tmpDir) {
|
||||
debugMode: false,
|
||||
debugArtifactName: util_1.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util_1.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
}
|
||||
async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20220311"
|
||||
"bundleVersion": "codeql-bundle-20220322"
|
||||
}
|
||||
|
||||
1
lib/testing-utils.js
generated
1
lib/testing-utils.js
generated
@@ -90,6 +90,7 @@ exports.setupTests = setupTests;
|
||||
function setupActionsVars(tempDir, toolsDir) {
|
||||
process.env["RUNNER_TEMP"] = tempDir;
|
||||
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
|
||||
process.env["GITHUB_WORKSPACE"] = tempDir;
|
||||
}
|
||||
exports.setupActionsVars = setupActionsVars;
|
||||
function getRecordingLogger(messages) {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAiB;IAC1C,MAAM,SAAS,GAAG,IAA2B,CAAC;IAE9C,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAiB;IAC1C,MAAM,SAAS,GAAG,IAA2B,CAAC;IAE9C,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;IAC5C,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,OAAO,CAAC;AAC5C,CAAC;AAJD,4CAIC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}
|
||||
1
lib/tracer-config.test.js
generated
1
lib/tracer-config.test.js
generated
@@ -47,6 +47,7 @@ function getTestConfig(tmpDir) {
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
}
|
||||
// A very minimal setup
|
||||
|
||||
File diff suppressed because one or more lines are too long
7
lib/upload-lib.js
generated
7
lib/upload-lib.js
generated
@@ -95,7 +95,10 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, logger) {
|
||||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env["TEST_MODE"] === "true" || false;
|
||||
if (testMode) {
|
||||
logger.debug("In test mode. Results are not uploaded.");
|
||||
const payloadSaveFile = path.join(actionsUtil.getTemporaryDirectory(), "payload.json");
|
||||
logger.info(`In test mode. Results are not uploaded. Saving to ${payloadSaveFile}`);
|
||||
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
|
||||
fs.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
|
||||
return;
|
||||
}
|
||||
const client = api.getApiClient(apiDetails);
|
||||
@@ -134,7 +137,7 @@ exports.findSarifFilesInDir = findSarifFilesInDir;
|
||||
// depending on what the path happens to refer to.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFromActions(sarifPath, gitHubVersion, apiDetails, logger) {
|
||||
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getOptionalInput("category"), util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), gitHubVersion, apiDetails, logger);
|
||||
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(actionsUtil.getRequiredInput("checkout_path")), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getOptionalInput("category"), util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), gitHubVersion, apiDetails, logger);
|
||||
}
|
||||
exports.uploadFromActions = uploadFromActions;
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
|
||||
File diff suppressed because one or more lines are too long
1
lib/util.test.js
generated
1
lib/util.test.js
generated
@@ -257,6 +257,7 @@ for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
t.is(util.getMlPoweredJsQueriesStatus(config), expectedStatus);
|
||||
});
|
||||
|
||||
File diff suppressed because one or more lines are too long
5
node_modules/.package-lock.json
generated
vendored
5
node_modules/.package-lock.json
generated
vendored
@@ -3593,8 +3593,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/minimist": {
|
||||
"version": "1.2.5",
|
||||
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
|
||||
"version": "1.2.6",
|
||||
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
|
||||
"integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/ms": {
|
||||
|
||||
8
node_modules/minimist/index.js
generated
vendored
8
node_modules/minimist/index.js
generated
vendored
@@ -70,7 +70,7 @@ module.exports = function (args, opts) {
|
||||
var o = obj;
|
||||
for (var i = 0; i < keys.length-1; i++) {
|
||||
var key = keys[i];
|
||||
if (key === '__proto__') return;
|
||||
if (isConstructorOrProto(o, key)) return;
|
||||
if (o[key] === undefined) o[key] = {};
|
||||
if (o[key] === Object.prototype || o[key] === Number.prototype
|
||||
|| o[key] === String.prototype) o[key] = {};
|
||||
@@ -79,7 +79,7 @@ module.exports = function (args, opts) {
|
||||
}
|
||||
|
||||
var key = keys[keys.length - 1];
|
||||
if (key === '__proto__') return;
|
||||
if (isConstructorOrProto(o, key)) return;
|
||||
if (o === Object.prototype || o === Number.prototype
|
||||
|| o === String.prototype) o = {};
|
||||
if (o === Array.prototype) o = [];
|
||||
@@ -243,3 +243,7 @@ function isNumber (x) {
|
||||
return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x);
|
||||
}
|
||||
|
||||
|
||||
function isConstructorOrProto (obj, key) {
|
||||
return key === 'constructor' && typeof obj[key] === 'function' || key === '__proto__';
|
||||
}
|
||||
|
||||
2
node_modules/minimist/package.json
generated
vendored
2
node_modules/minimist/package.json
generated
vendored
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "minimist",
|
||||
"version": "1.2.5",
|
||||
"version": "1.2.6",
|
||||
"description": "parse argument options",
|
||||
"main": "index.js",
|
||||
"devDependencies": {
|
||||
|
||||
5
node_modules/minimist/readme.markdown
generated
vendored
5
node_modules/minimist/readme.markdown
generated
vendored
@@ -34,7 +34,10 @@ $ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz
|
||||
Previous versions had a prototype pollution bug that could cause privilege
|
||||
escalation in some circumstances when handling untrusted user input.
|
||||
|
||||
Please use version 1.2.3 or later: https://snyk.io/vuln/SNYK-JS-MINIMIST-559764
|
||||
Please use version 1.2.6 or later:
|
||||
|
||||
* https://security.snyk.io/vuln/SNYK-JS-MINIMIST-2429795 (version <=1.2.5)
|
||||
* https://snyk.io/vuln/SNYK-JS-MINIMIST-559764 (version <=1.2.3)
|
||||
|
||||
# methods
|
||||
|
||||
|
||||
16
node_modules/minimist/test/proto.js
generated
vendored
16
node_modules/minimist/test/proto.js
generated
vendored
@@ -42,3 +42,19 @@ test('proto pollution (constructor)', function (t) {
|
||||
t.equal(argv.y, undefined);
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('proto pollution (constructor function)', function (t) {
|
||||
var argv = parse(['--_.concat.constructor.prototype.y', '123']);
|
||||
function fnToBeTested() {}
|
||||
t.equal(fnToBeTested.y, undefined);
|
||||
t.equal(argv.y, undefined);
|
||||
t.end();
|
||||
});
|
||||
|
||||
// powered by snyk - https://github.com/backstage/backstage/issues/10343
|
||||
test('proto pollution (constructor function) snyk', function (t) {
|
||||
var argv = parse('--_.constructor.constructor.prototype.foo bar'.split(' '));
|
||||
t.equal((function(){}).foo, undefined);
|
||||
t.equal(argv.y, undefined);
|
||||
t.end();
|
||||
})
|
||||
|
||||
10
package-lock.json
generated
10
package-lock.json
generated
@@ -3646,8 +3646,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/minimist": {
|
||||
"version": "1.2.5",
|
||||
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
|
||||
"version": "1.2.6",
|
||||
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
|
||||
"integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/ms": {
|
||||
@@ -8003,8 +8004,9 @@
|
||||
}
|
||||
},
|
||||
"minimist": {
|
||||
"version": "1.2.5",
|
||||
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
|
||||
"version": "1.2.6",
|
||||
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
|
||||
"integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
|
||||
"dev": true
|
||||
},
|
||||
"ms": {
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
"test-debug": "ava src/**.test.ts --serial --verbose --timeout=20m",
|
||||
"lint": "eslint --report-unused-disable-directives --max-warnings=0 . --ext .js,.ts",
|
||||
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --ext .js,.ts --fix",
|
||||
"removeNPMAbsolutePaths": "removeNPMAbsolutePaths . --force",
|
||||
"version": "cd runner && npm version patch && cd .. && npm run removeNPMAbsolutePaths && git add runner"
|
||||
"removeNPMAbsolutePaths": "removeNPMAbsolutePaths . --force"
|
||||
},
|
||||
"ava": {
|
||||
"typescript": {
|
||||
|
||||
75
pr-checks/checks/with-checkout-path.yml
Normal file
75
pr-checks/checks/with-checkout-path.yml
Normal file
@@ -0,0 +1,75 @@
|
||||
name: "Use a custom `checkout_path`"
|
||||
description: "Checks that a custom `checkout_path` will find the proper commit_oid"
|
||||
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
|
||||
# `windows-latest`.
|
||||
# Must test on all three platforms since this test does path manipulation
|
||||
os: [ubuntu-latest, macos-latest, windows-2019]
|
||||
steps:
|
||||
# Check out the actions repo again, but at a different location.
|
||||
# choose an arbitrary SHA so that we can later test that the commit_oid is not from main
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
ref: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
path: x/y/z/some-path
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
# it's enough to test one compiled language and one interpreted language
|
||||
languages: csharp,javascript
|
||||
source-path: x/y/z/some-path/tests/multi-language-repo
|
||||
debug: true
|
||||
- name: Build code (non-windows)
|
||||
shell: bash
|
||||
if: ${{ runner.os != 'Windows' }}
|
||||
run: |
|
||||
$CODEQL_RUNNER x/y/z/some-path/tests/multi-language-repo/build.sh
|
||||
- name: Build code (windows)
|
||||
shell: bash
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
run: |
|
||||
x/y/z/some-path/tests/multi-language-repo/build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
checkout_path: x/y/z/some-path/tests/multi-language-repo
|
||||
ref: v1.1.0
|
||||
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
upload: false
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
ref: v1.1.0
|
||||
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
checkout_path: x/y/z/some-path/tests/multi-language-repo
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- name: Verify SARIF after upload
|
||||
shell: bash
|
||||
run: |
|
||||
EXPECTED_COMMIT_OID="474bbf07f9247ffe1856c6a0f94aeeb10e7afee6"
|
||||
EXPECTED_REF="v1.1.0"
|
||||
EXPECTED_CHECKOUT_URI_SUFFIX="/x/y/z/some-path/tests/multi-language-repo"
|
||||
|
||||
ACTUAL_COMMIT_OID="$(cat "$RUNNER_TEMP/payload.json" | jq -r .commit_oid)"
|
||||
ACTUAL_REF="$(cat "$RUNNER_TEMP/payload.json" | jq -r .ref)"
|
||||
ACTUAL_CHECKOUT_URI="$(cat "$RUNNER_TEMP/payload.json" | jq -r .checkout_uri)"
|
||||
|
||||
if [[ "$EXPECTED_COMMIT_OID" != "$ACTUAL_COMMIT_OID" ]]; then
|
||||
echo "::error Invalid commit oid. Expected: $EXPECTED_COMMIT_OID Actual: $ACTUAL_COMMIT_OID"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$EXPECTED_REF" != "$ACTUAL_REF" ]]; then
|
||||
echo "::error Invalid ref. Expected: '$EXPECTED_REF' Actual: '$ACTUAL_REF'"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$ACTUAL_CHECKOUT_URI" != *$EXPECTED_CHECKOUT_URI_SUFFIX ]]; then
|
||||
echo "::error Invalid checkout URI suffix. Expected suffix: $EXPECTED_CHECKOUT_URI_SUFFIX Actual uri: $ACTUAL_CHECKOUT_URI"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
exit 1
|
||||
fi
|
||||
@@ -85,7 +85,7 @@ for file in os.listdir('checks'):
|
||||
}
|
||||
},
|
||||
'name': checkSpecification['name'],
|
||||
'timeout-minutes': 30,
|
||||
'timeout-minutes': 45,
|
||||
'runs-on': '${{ matrix.os }}',
|
||||
'steps': steps
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ python3 -m pip install --user 'virtualenv<20.11'
|
||||
python3 -m pip install --user poetry!=1.0.10
|
||||
python3 -m pip install --user pipenv
|
||||
|
||||
if command -v python2 &> /dev/null; then
|
||||
if command -v python2 >/dev/null 2>&1; then
|
||||
# Setup Python 2 dependency installation tools.
|
||||
# The Ubuntu 20.04 GHA environment does not come with a Python 2 pip
|
||||
curl --location --fail https://bootstrap.pypa.io/pip/2.7/get-pip.py | python2
|
||||
|
||||
@@ -6,7 +6,7 @@ import * as yaml from "js-yaml";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsutil from "./actions-util";
|
||||
import { setupTests } from "./testing-utils";
|
||||
import { setupActionsVars, setupTests } from "./testing-utils";
|
||||
import { getMode, initializeEnvironment, Mode, withTmpDir } from "./util";
|
||||
|
||||
function errorCodes(
|
||||
@@ -24,6 +24,8 @@ test("getRef() throws on the empty string", async (t) => {
|
||||
});
|
||||
|
||||
test("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
@@ -36,8 +38,11 @@ test("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t)
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||
@@ -51,21 +56,27 @@ test("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
|
||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||
callback.withArgs("refs/pull/1/merge").resolves("a".repeat(40));
|
||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
|
||||
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
|
||||
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
||||
@@ -83,8 +94,11 @@ test("getRef() returns ref provided as an input and ignores current HEAD", async
|
||||
callback.restore();
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
||||
|
||||
@@ -94,13 +108,18 @@ test("getRef() throws an error if only `ref` is provided as an input", async (t)
|
||||
},
|
||||
{
|
||||
instanceOf: Error,
|
||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
message:
|
||||
"Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
}
|
||||
);
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() throws an error if only `sha` is provided as an input", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
process.env["GITHUB_WORKSPACE"] = "/tmp";
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
||||
|
||||
@@ -110,11 +129,13 @@ test("getRef() throws an error if only `sha` is provided as an input", async (t)
|
||||
},
|
||||
{
|
||||
instanceOf: Error,
|
||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
message:
|
||||
"Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
}
|
||||
);
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("computeAutomationID()", async (t) => {
|
||||
let actualAutomationID = actionsutil.computeAutomationID(
|
||||
@@ -709,6 +730,7 @@ test("initializeEnvironment", (t) => {
|
||||
|
||||
test("isAnalyzingDefaultBranch()", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const envFile = path.join(tmpDir, "event.json");
|
||||
fs.writeFileSync(
|
||||
envFile,
|
||||
|
||||
@@ -66,7 +66,10 @@ export function getToolCacheDirectory(): string {
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
export const getCommitOid = async function (ref = "HEAD"): Promise<string> {
|
||||
export const getCommitOid = async function (
|
||||
checkoutPath: string,
|
||||
ref = "HEAD"
|
||||
): Promise<string> {
|
||||
// Try to use git to get the current commit SHA. If that fails then
|
||||
// log but otherwise silently fall back to using the SHA from the environment.
|
||||
// The only time these two values will differ is during analysis of a PR when
|
||||
@@ -89,6 +92,7 @@ export const getCommitOid = async function (ref = "HEAD"): Promise<string> {
|
||||
process.stderr.write(data);
|
||||
},
|
||||
},
|
||||
cwd: checkoutPath,
|
||||
}
|
||||
).exec();
|
||||
return commitOid.trim();
|
||||
@@ -113,6 +117,7 @@ export const determineMergeBaseCommitOid = async function (): Promise<
|
||||
}
|
||||
|
||||
const mergeSha = getRequiredEnvParam("GITHUB_SHA");
|
||||
const checkoutPath = getOptionalInput("checkout_path");
|
||||
|
||||
try {
|
||||
let commitOid = "";
|
||||
@@ -140,6 +145,7 @@ export const determineMergeBaseCommitOid = async function (): Promise<
|
||||
process.stderr.write(data);
|
||||
},
|
||||
},
|
||||
cwd: checkoutPath,
|
||||
}
|
||||
).exec();
|
||||
|
||||
@@ -504,6 +510,10 @@ export async function getRef(): Promise<string> {
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const refInput = getOptionalInput("ref");
|
||||
const shaInput = getOptionalInput("sha");
|
||||
const checkoutPath =
|
||||
getOptionalInput("checkout_path") ||
|
||||
getOptionalInput("source-root") ||
|
||||
getRequiredEnvParam("GITHUB_WORKSPACE");
|
||||
|
||||
const hasRefInput = !!refInput;
|
||||
const hasShaInput = !!shaInput;
|
||||
@@ -532,7 +542,7 @@ export async function getRef(): Promise<string> {
|
||||
return ref;
|
||||
}
|
||||
|
||||
const head = await getCommitOid("HEAD");
|
||||
const head = await getCommitOid(checkoutPath, "HEAD");
|
||||
|
||||
// in actions/checkout@v2 we can check if git rev-parse HEAD == GITHUB_SHA
|
||||
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||
@@ -541,8 +551,10 @@ export async function getRef(): Promise<string> {
|
||||
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||
const hasChangedRef =
|
||||
sha !== head &&
|
||||
(await getCommitOid(ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !==
|
||||
head;
|
||||
(await getCommitOid(
|
||||
checkoutPath,
|
||||
ref.replace(/^refs\/pull\//, "refs/remotes/pull/")
|
||||
)) !== head;
|
||||
|
||||
if (hasChangedRef) {
|
||||
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
||||
@@ -611,7 +623,7 @@ export interface StatusReportBase {
|
||||
/** Action version (x.y.z from package.json). */
|
||||
action_version: string;
|
||||
/** CodeQL CLI version (x.y.z from the CLI). */
|
||||
codeql_cli_version?: string;
|
||||
codeql_version?: string;
|
||||
}
|
||||
|
||||
export function getActionsStatus(
|
||||
@@ -713,7 +725,7 @@ export async function createStatusReportBase(
|
||||
statusReport.runner_os_release = os.release();
|
||||
}
|
||||
if (codeQlCliVersion !== undefined) {
|
||||
statusReport.codeql_cli_version = codeQlCliVersion;
|
||||
statusReport.codeql_version = codeQlCliVersion;
|
||||
}
|
||||
|
||||
return statusReport;
|
||||
|
||||
@@ -25,6 +25,7 @@ test("emptyPaths", async (t) => {
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
@@ -50,6 +51,7 @@ test("nonEmptyPaths", async (t) => {
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||
@@ -79,6 +81,7 @@ test("exclude temp dir", async (t) => {
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
|
||||
@@ -122,6 +122,7 @@ test("status report fields and search path setting", async (t) => {
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||
recursive: true,
|
||||
|
||||
@@ -6,6 +6,7 @@ import * as yaml from "js-yaml";
|
||||
|
||||
import * as analysisPaths from "./analysis-paths";
|
||||
import {
|
||||
CODEQL_VERSION_CONFIG_FILES,
|
||||
CODEQL_VERSION_COUNTS_LINES,
|
||||
CODEQL_VERSION_NEW_TRACING,
|
||||
getCodeQL,
|
||||
@@ -235,12 +236,15 @@ export async function runQueries(
|
||||
);
|
||||
}
|
||||
|
||||
const codeql = await getCodeQL(config.codeQLCmd);
|
||||
try {
|
||||
if (hasPackWithCustomQueries) {
|
||||
if (
|
||||
hasPackWithCustomQueries &&
|
||||
!(await util.codeQlVersionAbove(codeql, CODEQL_VERSION_CONFIG_FILES))
|
||||
) {
|
||||
logger.info("Performing analysis with custom CodeQL Packs.");
|
||||
logger.startGroup(`Downloading custom packs for ${language}`);
|
||||
|
||||
const codeql = await getCodeQL(config.codeQLCmd);
|
||||
const results = await codeql.packDownload(packsWithVersion);
|
||||
logger.info(
|
||||
`Downloaded packs: ${results.packs
|
||||
|
||||
@@ -18,7 +18,7 @@ import { Logger } from "./logging";
|
||||
import * as toolcache from "./toolcache";
|
||||
import { toolrunnerErrorCatcher } from "./toolrunner-error-catcher";
|
||||
import * as util from "./util";
|
||||
import { isGoodVersion } from "./util";
|
||||
import { isGoodVersion, ML_POWERED_JS_QUERIES_PACK } from "./util";
|
||||
|
||||
type Options = Array<string | number | boolean>;
|
||||
|
||||
@@ -220,7 +220,7 @@ const CODEQL_VERSION_GROUP_RULES = "2.5.5";
|
||||
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
|
||||
export const CODEQL_VERSION_COUNTS_LINES = "2.6.2";
|
||||
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
|
||||
const CODEQL_VERSION_CONFIG_FILES = "2.8.2"; // Versions before 2.8.2 weren't tolerant to unknown properties
|
||||
export const CODEQL_VERSION_CONFIG_FILES = "2.8.2"; // Versions before 2.8.2 weren't tolerant to unknown properties
|
||||
export const CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
|
||||
|
||||
/**
|
||||
@@ -737,7 +737,23 @@ async function getCodeQLForCmd(
|
||||
}
|
||||
if (await util.codeQlVersionAbove(codeql, CODEQL_VERSION_CONFIG_FILES)) {
|
||||
const configLocation = path.resolve(config.tempDir, "user-config.yaml");
|
||||
fs.writeFileSync(configLocation, yaml.dump(config.originalUserInput));
|
||||
const augmentedConfig = config.originalUserInput;
|
||||
if (config.injectedMlQueries) {
|
||||
// We need to inject the ML queries into the original user input before
|
||||
// we pass this on to the CLI, to make sure these get run.
|
||||
let packString = ML_POWERED_JS_QUERIES_PACK.packName;
|
||||
if (ML_POWERED_JS_QUERIES_PACK.version)
|
||||
packString = `${packString}@${ML_POWERED_JS_QUERIES_PACK.version}`;
|
||||
if (augmentedConfig.packs === undefined) augmentedConfig.packs = [];
|
||||
if (Array.isArray(augmentedConfig.packs)) {
|
||||
augmentedConfig.packs.push(packString);
|
||||
} else {
|
||||
if (!augmentedConfig.packs.javascript)
|
||||
augmentedConfig.packs["javascript"] = [];
|
||||
augmentedConfig.packs["javascript"].push(packString);
|
||||
}
|
||||
}
|
||||
fs.writeFileSync(configLocation, yaml.dump(augmentedConfig));
|
||||
extraArgs.push(`--codescanning-config=${configLocation}`);
|
||||
}
|
||||
await runTool(cmd, [
|
||||
|
||||
@@ -357,6 +357,7 @@ test("load non-empty input", async (t) => {
|
||||
debugMode: false,
|
||||
debugArtifactName: "my-artifact",
|
||||
debugDatabaseName: "my-db",
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
|
||||
const languages = "javascript";
|
||||
|
||||
@@ -147,6 +147,10 @@ export interface Config {
|
||||
* Specifies the name of the database in the debugging artifact.
|
||||
*/
|
||||
debugDatabaseName: string;
|
||||
/**
|
||||
* Whether we injected ML queries into this configuration.
|
||||
*/
|
||||
injectedMlQueries: boolean;
|
||||
}
|
||||
|
||||
export type Packs = Partial<Record<Language, PackWithVersion[]>>;
|
||||
@@ -274,6 +278,7 @@ const builtinSuites = ["security-extended", "security-and-quality"] as const;
|
||||
/**
|
||||
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
||||
* Throws an error if suiteName is not a valid builtin suite.
|
||||
* May inject ML queries, and the return value will declare if this was done.
|
||||
*/
|
||||
async function addBuiltinSuiteQueries(
|
||||
languages: string[],
|
||||
@@ -283,7 +288,8 @@ async function addBuiltinSuiteQueries(
|
||||
suiteName: string,
|
||||
featureFlags: FeatureFlags,
|
||||
configFile?: string
|
||||
) {
|
||||
): Promise<boolean> {
|
||||
let injectedMlQueries = false;
|
||||
const found = builtinSuites.find((suite) => suite === suiteName);
|
||||
if (!found) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
||||
@@ -305,10 +311,12 @@ async function addBuiltinSuiteQueries(
|
||||
packs.javascript = [];
|
||||
}
|
||||
packs.javascript.push(ML_POWERED_JS_QUERIES_PACK);
|
||||
injectedMlQueries = true;
|
||||
}
|
||||
|
||||
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
|
||||
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
||||
return injectedMlQueries;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -410,6 +418,11 @@ async function addRemoteQueries(
|
||||
* parsing the 'uses' actions in the workflow file. So it can handle
|
||||
* local paths starting with './', or references to remote repos, or
|
||||
* a finite set of hardcoded terms for builtin suites.
|
||||
*
|
||||
* This may inject ML queries into the packs to use, and the return value will
|
||||
* declare if this was done.
|
||||
*
|
||||
* @returns whether or not we injected ML queries into the packs
|
||||
*/
|
||||
async function parseQueryUses(
|
||||
languages: string[],
|
||||
@@ -423,7 +436,7 @@ async function parseQueryUses(
|
||||
featureFlags: FeatureFlags,
|
||||
logger: Logger,
|
||||
configFile?: string
|
||||
) {
|
||||
): Promise<boolean> {
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
@@ -438,12 +451,12 @@ async function parseQueryUses(
|
||||
workspacePath,
|
||||
configFile
|
||||
);
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
|
||||
await addBuiltinSuiteQueries(
|
||||
return await addBuiltinSuiteQueries(
|
||||
languages,
|
||||
codeQL,
|
||||
resultMap,
|
||||
@@ -452,7 +465,6 @@ async function parseQueryUses(
|
||||
featureFlags,
|
||||
configFile
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, must be a reference to another repo
|
||||
@@ -465,6 +477,7 @@ async function parseQueryUses(
|
||||
logger,
|
||||
configFile
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Regex validating stars in paths or paths-ignore entries.
|
||||
@@ -822,13 +835,14 @@ async function addQueriesAndPacksFromWorkflow(
|
||||
apiDetails: api.GitHubApiExternalRepoDetails,
|
||||
featureFlags: FeatureFlags,
|
||||
logger: Logger
|
||||
) {
|
||||
): Promise<boolean> {
|
||||
let injectedMlQueries = false;
|
||||
queriesInput = queriesInput.trim();
|
||||
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
||||
queriesInput = queriesInput.replace(/^\+/, "");
|
||||
|
||||
for (const query of queriesInput.split(",")) {
|
||||
await parseQueryUses(
|
||||
const didInject = await parseQueryUses(
|
||||
languages,
|
||||
codeQL,
|
||||
resultMap,
|
||||
@@ -840,7 +854,9 @@ async function addQueriesAndPacksFromWorkflow(
|
||||
featureFlags,
|
||||
logger
|
||||
);
|
||||
injectedMlQueries = injectedMlQueries || didInject;
|
||||
}
|
||||
return injectedMlQueries;
|
||||
}
|
||||
|
||||
// Returns true if either no queries were provided in the workflow.
|
||||
@@ -892,8 +908,9 @@ export async function getDefaultConfig(
|
||||
}
|
||||
await addDefaultQueries(codeQL, languages, queries);
|
||||
const packs = parsePacksFromInput(packsInput, languages) ?? {};
|
||||
let injectedMlQueries = false;
|
||||
if (queriesInput) {
|
||||
await addQueriesAndPacksFromWorkflow(
|
||||
injectedMlQueries = await addQueriesAndPacksFromWorkflow(
|
||||
codeQL,
|
||||
queriesInput,
|
||||
languages,
|
||||
@@ -922,6 +939,7 @@ export async function getDefaultConfig(
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
injectedMlQueries,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1008,8 +1026,9 @@ async function loadConfig(
|
||||
// they should take precedence over the queries in the config file
|
||||
// unless they're prefixed with "+", in which case they supplement those
|
||||
// in the config file.
|
||||
let injectedMlQueries = false;
|
||||
if (queriesInput) {
|
||||
await addQueriesAndPacksFromWorkflow(
|
||||
injectedMlQueries = await addQueriesAndPacksFromWorkflow(
|
||||
codeQL,
|
||||
queriesInput,
|
||||
languages,
|
||||
@@ -1101,6 +1120,7 @@ async function loadConfig(
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
injectedMlQueries,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -56,6 +56,7 @@ function getTestConfig(tmpDir: string): Config {
|
||||
debugMode: false,
|
||||
debugArtifactName: DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20220311"
|
||||
"bundleVersion": "codeql-bundle-20220322"
|
||||
}
|
||||
|
||||
@@ -92,6 +92,7 @@ export function setupTests(test: TestFn<any>) {
|
||||
export function setupActionsVars(tempDir: string, toolsDir: string) {
|
||||
process.env["RUNNER_TEMP"] = tempDir;
|
||||
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
|
||||
process.env["GITHUB_WORKSPACE"] = tempDir;
|
||||
}
|
||||
|
||||
export interface LoggedMessage {
|
||||
|
||||
@@ -32,6 +32,7 @@ function getTestConfig(tmpDir: string): configUtils.Config {
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -100,7 +100,15 @@ async function uploadPayload(
|
||||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env["TEST_MODE"] === "true" || false;
|
||||
if (testMode) {
|
||||
logger.debug("In test mode. Results are not uploaded.");
|
||||
const payloadSaveFile = path.join(
|
||||
actionsUtil.getTemporaryDirectory(),
|
||||
"payload.json"
|
||||
);
|
||||
logger.info(
|
||||
`In test mode. Results are not uploaded. Saving to ${payloadSaveFile}`
|
||||
);
|
||||
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
|
||||
fs.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -165,7 +173,9 @@ export async function uploadFromActions(
|
||||
return await uploadFiles(
|
||||
getSarifFilePaths(sarifPath),
|
||||
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
|
||||
await actionsUtil.getCommitOid(),
|
||||
await actionsUtil.getCommitOid(
|
||||
actionsUtil.getRequiredInput("checkout_path")
|
||||
),
|
||||
await actionsUtil.getRef(),
|
||||
await actionsUtil.getAnalysisKey(),
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
|
||||
@@ -347,6 +347,7 @@ for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
|
||||
t.is(util.getMlPoweredJsQueriesStatus(config), expectedStatus);
|
||||
|
||||
Reference in New Issue
Block a user