Compare commits

..

5 Commits

Author SHA1 Message Date
Andrew Eisenberg
065c7165e1 Add new file 2021-12-07 15:03:01 -08:00
Andrew Eisenberg
d914262770 Upload artifacts and delete build.sh 2021-12-07 14:45:19 -08:00
Andrew Eisenberg
ff5d52f66c Only run runner-analyze-csharp-autobuild-macos 2021-12-07 14:38:18 -08:00
Andrew Eisenberg
e811ebfbb0 This should put the action into debug mode 2021-12-07 13:51:37 -08:00
Andrew Eisenberg
458b30ebc6 Initialize dotnet in pr-check 2021-12-07 13:01:50 -08:00
4552 changed files with 193888 additions and 380787 deletions

View File

@@ -1,4 +1,5 @@
**/webpack.config.js
lib/**
runner/dist/**
src/testdata/**
tests/**

View File

@@ -10,8 +10,7 @@
"plugin:@typescript-eslint/recommended",
"plugin:@typescript-eslint/recommended-requiring-type-checking",
"plugin:github/recommended",
"plugin:github/typescript",
"plugin:import/typescript"
"plugin:github/typescript"
],
"rules": {
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],

View File

@@ -1,62 +0,0 @@
name: Check Code-Scanning Config
description: |
Checks the code scanning configuration file generated by the
action to ensure it contains the expected contents
inputs:
languages:
required: false
description: The languages field passed to the init action.
packs:
required: false
description: The packs field passed to the init action.
queries:
required: false
description: The queries field passed to the init action.
config-file-test:
required: false
description: |
The location of the config file to use. If empty,
then no config file is used.
expected-config-file-contents:
required: true
description: |
A JSON string containing the exact contents of the config file.
tools:
required: true
description: |
The url of codeql to use.
runs:
using: composite
steps:
- uses: ./../action/init
with:
languages: ${{ inputs.languages }}
config-file: ${{ inputs.config-file-test }}
queries: ${{ inputs.queries }}
packs: ${{ inputs.packs }}
tools: ${{ inputs.tools }}
db-location: ${{ runner.temp }}/codescanning-config-cli-test
env:
CODEQL_ACTION_TEST_MODE: 'true'
- name: Install dependencies
shell: bash
run: npm install --location=global ts-node js-yaml
- name: Check config
working-directory: ${{ github.action_path }}
shell: bash
run: ts-node ./index.ts "${{ runner.temp }}/user-config.yaml" '${{ inputs.expected-config-file-contents }}'
- name: Clean up
shell: bash
if: always()
run: |
rm -rf ${{ runner.temp }}/codescanning-config-cli-test
rm -rf ${{ runner.temp }}/user-config.yaml

View File

@@ -1,39 +0,0 @@
import * as core from '@actions/core'
import * as yaml from 'js-yaml'
import * as fs from 'fs'
import * as assert from 'assert'
const actualConfig = loadActualConfig()
const rawExpectedConfig = process.argv[3].trim()
if (!rawExpectedConfig) {
core.info('No expected configuration provided')
} else {
core.startGroup('Expected generated user config')
core.info(yaml.dump(JSON.parse(rawExpectedConfig)))
core.endGroup()
}
const expectedConfig = rawExpectedConfig ? JSON.parse(rawExpectedConfig) : undefined;
assert.deepStrictEqual(
actualConfig,
expectedConfig,
'Expected configuration does not match actual configuration'
);
function loadActualConfig() {
if (!fs.existsSync(process.argv[2])) {
core.info('No configuration file found')
return undefined
} else {
const rawActualConfig = fs.readFileSync(process.argv[2], 'utf8')
core.startGroup('Actual generated user config')
core.info(rawActualConfig)
core.endGroup()
return yaml.load(rawActualConfig)
}
}

View File

@@ -1,20 +0,0 @@
name: Check SARIF
description: Checks a SARIF file to see if certain queries were run and others were not run.
inputs:
sarif-file:
required: true
description: The SARIF file to check
queries-run:
required: true
description: |
Comma separated list of query ids that should be included in this SARIF file.
queries-not-run:
required: true
description: |
Comma separated list of query ids that should NOT be included in this SARIF file.
runs:
using: node12
main: index.js

View File

@@ -1,43 +0,0 @@
'use strict'
const core = require('@actions/core')
const fs = require('fs')
const sarif = JSON.parse(fs.readFileSync(core.getInput('sarif-file'), 'utf8'))
const rules = sarif.runs[0].tool.extensions.flatMap(ext => ext.rules || [])
const ruleIds = rules.map(rule => rule.id)
// Check that all the expected queries ran
const expectedQueriesRun = getQueryIdsInput('queries-run')
const queriesThatShouldHaveRunButDidNot = expectedQueriesRun.filter(queryId => !ruleIds.includes(queryId))
if (queriesThatShouldHaveRunButDidNot.length > 0) {
core.setFailed(`The following queries were expected to run but did not: ${queriesThatShouldHaveRunButDidNot.join(', ')}`)
}
// Check that all the unexpected queries did not run
const expectedQueriesNotRun = getQueryIdsInput('queries-not-run')
const queriesThatShouldNotHaveRunButDid = expectedQueriesNotRun.filter(queryId => ruleIds.includes(queryId))
if (queriesThatShouldNotHaveRunButDid.length > 0) {
core.setFailed(`The following queries were NOT expected to have run but did: ${queriesThatShouldNotHaveRunButDid.join(', ')}`)
}
core.startGroup('All queries run')
rules.forEach(rule => {
core.info(`${rule.id}: ${(rule.properties && rule.properties.name) || rule.name}`)
})
core.endGroup()
core.startGroup('Full SARIF')
core.info(JSON.stringify(sarif, null, 2))
core.endGroup()
function getQueryIdsInput(name) {
return core.getInput(name)
.split(',')
.map(q => q.trim())
.filter(q => q.length > 0)
}

View File

@@ -22,17 +22,18 @@ runs:
run: |
if [[ ${{ inputs.version }} == "nightly-latest" ]]; then
export LATEST=`gh release list --repo dsp-testing/codeql-cli-nightlies -L 1 | cut -f 3`
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$LATEST/codeql-bundle.tar.gz" >> $GITHUB_OUTPUT
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$LATEST/codeql-bundle.tar.gz"
elif [[ ${{ inputs.version }} == *"nightly"* ]]; then
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz" >> $GITHUB_OUTPUT
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz"
elif [[ ${{ inputs.version }} == *"stable"* ]]; then
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
echo "tools-url=https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz" >> $GITHUB_OUTPUT
echo "Hello $VERSION"
echo "::set-output name=tools-url::https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz"
elif [[ ${{ inputs.version }} == "latest" ]]; then
echo "tools-url=latest" >> $GITHUB_OUTPUT
echo "::set-output name=tools-url::latest"
elif [[ ${{ inputs.version }} == "cached" ]]; then
echo "tools-url=" >> $GITHUB_OUTPUT
echo "::set-output name=tools-url::"
else
echo "::error Unrecognized version specified!"
fi

View File

@@ -1,54 +0,0 @@
name: Query Filter Test
description: Runs a test of query filters using the check SARIF action
inputs:
sarif-file:
required: true
description: The SARIF file to check
queries-run:
required: true
description: |
Comma separated list of query ids that should be included in this SARIF file.
queries-not-run:
required: true
description: |
Comma separated list of query ids that should NOT be included in this SARIF file.
config-file:
required: true
description: |
The location of the codeql configuration file to use.
tools:
required: true
description: |
The url of codeql to use.
runs:
using: composite
steps:
- uses: ./../action/init
with:
languages: javascript
config-file: ${{ inputs.config-file }}
tools: ${{ inputs.tools }}
db-location: ${{ runner.temp }}/query-filter-test
env:
CODEQL_ACTION_TEST_MODE: "true"
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
upload-database: false
upload: false
env:
CODEQL_ACTION_TEST_MODE: "true"
- name: Check SARIF
uses: ./../action/.github/check-sarif
with:
sarif-file: ${{ inputs.sarif-file }}
queries-run: ${{ inputs.queries-run}}
queries-not-run: ${{ inputs.queries-not-run}}
- name: Cleanup after test
shell: bash
run: rm -rf "$RUNNER_TEMP/results" "$RUNNER_TEMP/query-filter-test"

View File

@@ -1,11 +1,14 @@
import argparse
import datetime
from github import Github
import json
import os
import random
import requests
import subprocess
import sys
import json
import datetime
import os
EMPTY_CHANGELOG = """# CodeQL Action Changelog
EMPTY_CHANGELOG = """# CodeQL Action and CodeQL Runner Changelog
## [UNRELEASED]
@@ -13,25 +16,21 @@ No user facing changes.
"""
# Value of the mode flag for a v1 release
V1_MODE = 'v1-release'
# Value of the mode flag for a v2 release
V2_MODE = 'v2-release'
SOURCE_BRANCH_FOR_MODE = { V1_MODE: 'releases/v2', V2_MODE: 'main' }
TARGET_BRANCH_FOR_MODE = { V1_MODE: 'releases/v1', V2_MODE: 'releases/v2' }
# The branch being merged from.
# This is the one that contains day-to-day development work.
MAIN_BRANCH = 'main'
# The branch being merged into.
# This is the release branch that users reference.
LATEST_RELEASE_BRANCH = 'v1'
# Name of the remote
ORIGIN = 'origin'
# Runs git with the given args and returns the stdout.
# Raises an error if git does not exit successfully (unless passed
# allow_non_zero_exit_code=True).
def run_git(*args, allow_non_zero_exit_code=False):
# Raises an error if git does not exit successfully.
def run_git(*args):
cmd = ['git', *args]
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if not allow_non_zero_exit_code and p.returncode != 0:
if (p.returncode != 0):
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
return p.stdout.decode('ascii')
@@ -39,10 +38,8 @@ def run_git(*args, allow_non_zero_exit_code=False):
def branch_exists_on_remote(branch_name):
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
# Opens a PR from the given branch to the target branch
def open_pr(
repo, all_commits, source_branch_short_sha, new_branch_name, source_branch, target_branch,
conductor, is_v2_release, labels, conflicted_files):
# Opens a PR from the given branch to the release branch
def open_pr(repo, all_commits, short_main_sha, branch_name):
# Sort the commits into the pull requests that introduced them,
# and any commits that don't have a pull request
pull_requests = []
@@ -64,10 +61,11 @@ def open_pr(
# Start constructing the body text
body = []
body.append('Merging ' + source_branch_short_sha + ' into ' + target_branch)
body.append('Merging ' + short_main_sha + ' into ' + LATEST_RELEASE_BRANCH)
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
body.append('')
body.append(f'Conductor for this PR is @{conductor}.')
body.append('Conductor for this PR is @' + conductor)
# List all PRs merged
if len(pull_requests) > 0:
@@ -75,62 +73,50 @@ def open_pr(
body.append('Contains the following pull requests:')
for pr in pull_requests:
merger = get_merger_of_pr(repo, pr)
body.append(f'- #{pr.number} (@{merger})')
body.append('- #' + str(pr.number) + ' - ' + pr.title +' (@' + merger + ')')
# List all commits not part of a PR
if len(commits_without_pull_requests) > 0:
body.append('')
body.append('Contains the following commits not from a pull request:')
for commit in commits_without_pull_requests:
author_description = f' (@{commit.author.login})' if commit.author is not None else ''
body.append(f'- {commit.sha} - {get_truncated_commit_message(commit)}{author_description}')
body.append('- ' + commit.sha + ' - ' + get_truncated_commit_message(commit) + ' (@' + commit.author.login + ')')
body.append('')
body.append('Please do the following:')
if len(conflicted_files) > 0:
body.append(' - [ ] Ensure `package.json` file contains the correct version.')
body.append(' - [ ] Add commits to this branch to resolve the merge conflicts ' +
'in the following files:')
body.extend([f' - [ ] `{file}`' for file in conflicted_files])
body.append(' - [ ] Ensure another maintainer has reviewed the additional commits you added to this ' +
'branch to resolve the merge conflicts.')
body.append(' - [ ] Ensure the CHANGELOG displays the correct version and date.')
body.append(' - [ ] Ensure the CHANGELOG includes all relevant, user-facing changes since the last release.')
body.append(' - [ ] Check that there are not any unexpected commits being merged into the ' + target_branch + ' branch.')
body.append(' - [ ] Ensure the docs team is aware of any documentation changes that need to be released.')
body.append('Please review the following:')
body.append(' - [ ] The CHANGELOG displays the correct version and date.')
body.append(' - [ ] The CHANGELOG includes all relevant, user-facing changes since the last release.')
body.append(' - [ ] There are no unexpected commits being merged into the ' + LATEST_RELEASE_BRANCH + ' branch.')
body.append(' - [ ] The docs team is aware of any documentation changes that need to be released.')
body.append(' - [ ] The mergeback PR is merged back into ' + MAIN_BRANCH + ' after this PR is merged.')
if not is_v2_release:
body.append(' - [ ] Remove and re-add the "Update dependencies" label to the PR to trigger just this workflow.')
body.append(' - [ ] Wait for the "Update dependencies" workflow to push a commit updating the dependencies.')
body.append(' - [ ] Mark the PR as ready for review to trigger the full set of PR checks.')
body.append(' - [ ] Approve and merge this PR. Make sure `Create a merge commit` is selected rather than `Squash and merge` or `Rebase and merge`.')
if is_v2_release:
body.append(' - [ ] Merge the mergeback PR that will automatically be created once this PR is merged.')
body.append(' - [ ] Merge the v1 release PR that will automatically be created once this PR is merged.')
title = 'Merge ' + source_branch + ' into ' + target_branch
title = 'Merge ' + MAIN_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
# Create the pull request
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft so that
# a maintainer can take the PR out of draft, thereby triggering the PR checks.
pr = repo.create_pull(title=title, body='\n'.join(body), head=new_branch_name, base=target_branch, draft=True)
pr.add_to_labels(*labels)
pr = repo.create_pull(title=title, body='\n'.join(body), head=branch_name, base=LATEST_RELEASE_BRANCH, draft=True)
print('Created PR #' + str(pr.number))
# Assign the conductor
pr.add_to_assignees(conductor)
print('Assigned PR to ' + conductor)
# Gets a list of the SHAs of all commits that have happened on the source branch
# since the last release to the target branch.
# This will not include any commits that exist on the target branch
# that aren't on the source branch.
def get_commit_difference(repo, source_branch, target_branch):
# Passing split nothing means that the empty string splits to nothing: compare `''.split() == []`
# to `''.split('\n') == ['']`.
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + target_branch + '..' + ORIGIN + '/' + source_branch).strip().split()
# Gets the person who should be in charge of the mergeback PR
def get_conductor(repo, pull_requests, other_commits):
# If there are any PRs then use whoever merged the last one
if len(pull_requests) > 0:
return get_merger_of_pr(repo, pull_requests[-1])
# Otherwise take the author of the latest commit
return other_commits[-1].author.login
# Gets a list of the SHAs of all commits that have happened on main
# since the release branched off.
# This will not include any commits that exist on the release branch
# that aren't on main.
def get_commit_difference(repo):
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '..' + ORIGIN + '/' + MAIN_BRANCH).strip().split('\n')
# Convert to full-fledged commit objects
commits = [repo.get_commit(c) for c in commits]
@@ -150,7 +136,7 @@ def get_truncated_commit_message(commit):
else:
return message
# Converts a commit into the PR that introduced it to the source branch.
# Converts a commit into the PR that introduced it to the main branch.
# Returns the PR object, or None if no PR could be found.
def get_pr_for_commit(repo, commit):
prs = commit.get_pulls()
@@ -193,65 +179,29 @@ def update_changelog(version):
def main():
parser = argparse.ArgumentParser('update-release-branch.py')
if len(sys.argv) != 3:
raise Exception('Usage: update-release.branch.py <github token> <repository nwo>')
github_token = sys.argv[1]
repository_nwo = sys.argv[2]
parser.add_argument(
'--github-token',
type=str,
required=True,
help='GitHub token, typically from GitHub Actions.'
)
parser.add_argument(
'--repository-nwo',
type=str,
required=True,
help='The nwo of the repository, for example github/codeql-action.'
)
parser.add_argument(
'--mode',
type=str,
required=True,
choices=[V2_MODE, V1_MODE],
help=f"Which release to perform. '{V2_MODE}' uses {SOURCE_BRANCH_FOR_MODE[V2_MODE]} as the source " +
f"branch and {TARGET_BRANCH_FOR_MODE[V2_MODE]} as the target branch. " +
f"'{V1_MODE}' uses {SOURCE_BRANCH_FOR_MODE[V1_MODE]} as the source branch and " +
f"{TARGET_BRANCH_FOR_MODE[V1_MODE]} as the target branch."
)
parser.add_argument(
'--conductor',
type=str,
required=True,
help='The GitHub handle of the person who is conducting the release process.'
)
args = parser.parse_args()
source_branch = SOURCE_BRANCH_FOR_MODE[args.mode]
target_branch = TARGET_BRANCH_FOR_MODE[args.mode]
repo = Github(args.github_token).get_repo(args.repository_nwo)
repo = Github(github_token).get_repo(repository_nwo)
version = get_current_version()
if args.mode == V1_MODE:
# Change the version number to a v1 equivalent
version = get_current_version()
version = f'1{version[1:]}'
# Print what we intend to go
print('Considering difference between ' + source_branch + ' and ' + target_branch)
source_branch_short_sha = run_git('rev-parse', '--short', ORIGIN + '/' + source_branch).strip()
print('Current head of ' + source_branch + ' is ' + source_branch_short_sha)
print('Considering difference between ' + MAIN_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
short_main_sha = run_git('rev-parse', '--short', ORIGIN + '/' + MAIN_BRANCH).strip()
print('Current head of ' + MAIN_BRANCH + ' is ' + short_main_sha)
# See if there are any commits to merge in
commits = get_commit_difference(repo=repo, source_branch=source_branch, target_branch=target_branch)
commits = get_commit_difference(repo)
if len(commits) == 0:
print('No commits to merge from ' + source_branch + ' to ' + target_branch)
print('No commits to merge from ' + MAIN_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
return
# The branch name is based off of the name of branch being merged into
# and the SHA of the branch being merged from. Thus if the branch already
# exists we can assume we don't need to recreate it.
new_branch_name = 'update-v' + version + '-' + source_branch_short_sha
new_branch_name = 'update-v' + version + '-' + short_main_sha
print('Branch name is ' + new_branch_name)
# Check if the branch already exists. If so we can abort as this script
@@ -262,90 +212,19 @@ def main():
# Create the new branch and push it to the remote
print('Creating branch ' + new_branch_name)
# The process of creating the v1 release can run into merge conflicts. We commit the unresolved
# conflicts so a maintainer can easily resolve them (vs erroring and requiring maintainers to
# reconstruct the release manually)
conflicted_files = []
if args.mode == V1_MODE:
# If we're performing a backport, start from the target branch
print(f'Creating {new_branch_name} from the {ORIGIN}/{target_branch} branch')
run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/{target_branch}')
# Revert the commit that we made as part of the last release that updated the version number and
# changelog to refer to 1.x.x variants. This avoids merge conflicts in the changelog and
# package.json files when we merge in the v2 branch.
# This commit will not exist the first time we release the v1 branch from the v2 branch, so we
# use `git log --grep` to conditionally revert the commit.
print('Reverting the 1.x.x version number and changelog updates from the last release to avoid conflicts')
v1_update_commits = run_git('log', '--grep', '^Update version and changelog for v', '--format=%H').split()
if len(v1_update_commits) > 0:
print(f' Reverting {v1_update_commits[0]}')
# Only revert the newest commit as older ones will already have been reverted in previous
# releases.
run_git('revert', v1_update_commits[0], '--no-edit')
# Also revert the "Update checked-in dependencies" commit created by Actions.
update_dependencies_commit = run_git('log', '--grep', '^Update checked-in dependencies', '--format=%H').split()[0]
print(f' Reverting {update_dependencies_commit}')
run_git('revert', update_dependencies_commit, '--no-edit')
else:
print(' Nothing to revert.')
print(f'Merging {ORIGIN}/{source_branch} into the release prep branch')
# Commit any conflicts (see the comment for `conflicted_files`)
run_git('merge', f'{ORIGIN}/{source_branch}', allow_non_zero_exit_code=True)
conflicted_files = run_git('diff', '--name-only', '--diff-filter', 'U').splitlines()
if len(conflicted_files) > 0:
run_git('add', '.')
run_git('commit', '--no-edit')
# Migrate the package version number from a v2 version number to a v1 version number
print(f'Setting version number to {version}')
subprocess.check_output(['npm', 'version', version, '--no-git-tag-version'])
run_git('add', 'package.json', 'package-lock.json')
# Migrate the changelog notes from v2 version numbers to v1 version numbers
print('Migrating changelog notes from v2 to v1')
subprocess.check_output(['sed', '-i', 's/^## 2\./## 1./g', 'CHANGELOG.md'])
# Remove changelog notes from v2 that don't apply to v1
subprocess.check_output(['sed', '-i', '/^- \[v2+ only\]/d', 'CHANGELOG.md'])
# Amend the commit generated by `npm version` to update the CHANGELOG
run_git('add', 'CHANGELOG.md')
run_git('commit', '-m', f'Update version and changelog for v{version}')
else:
# If we're performing a standard release, there won't be any new commits on the target branch,
# as these will have already been merged back into the source branch. Therefore we can just
# start from the source branch.
run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/{source_branch}')
run_git('checkout', '-b', new_branch_name, ORIGIN + '/' + MAIN_BRANCH)
print('Updating changelog')
update_changelog(version)
# Create a commit that updates the CHANGELOG
run_git('add', 'CHANGELOG.md')
run_git('commit', '-m', f'Update changelog for v{version}')
run_git('commit', '-m', version)
run_git('push', ORIGIN, new_branch_name)
# Open a PR to update the branch
open_pr(
repo,
commits,
source_branch_short_sha,
new_branch_name,
source_branch=source_branch,
target_branch=target_branch,
conductor=args.conductor,
is_v2_release=args.mode == V2_MODE,
labels=['Update dependencies'] if args.mode == V1_MODE else [],
conflicted_files=conflicted_files
)
open_pr(repo, commits, short_main_sha, new_branch_name)
if __name__ == '__main__':
main()

View File

@@ -1,94 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: "PR Check - Analyze: 'ref' and 'sha' from inputs"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
analyze-ref-input:
strategy:
matrix:
include:
- os: ubuntu-20.04
version: stable-20211005
- os: macos-latest
version: stable-20211005
- os: windows-2019
version: stable-20211005
- os: ubuntu-20.04
version: stable-20220120
- os: macos-latest
version: stable-20220120
- os: windows-2019
version: stable-20220120
- os: ubuntu-latest
version: stable-20220401
- os: macos-latest
version: stable-20220401
- os: windows-latest
version: stable-20220401
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-latest
version: nightly-latest
name: "Analyze: 'ref' and 'sha' from inputs"
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Set up Go
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,68 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - autobuild-action
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
autobuild-action:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-latest
version: latest
name: autobuild-action
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
languages: csharp
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/autobuild
env:
# Explicitly disable the CLR tracer.
COR_ENABLE_PROFILING: ''
COR_PROFILER: ''
COR_PROFILER_PATH_64: ''
CORECLR_ENABLE_PROFILING: ''
CORECLR_PROFILER: ''
CORECLR_PROFILER_PATH_64: ''
- uses: ./../action/analyze
- name: Check database
shell: bash
run: |
cd "$RUNNER_TEMP/codeql_databases"
if [[ ! -d csharp ]]; then
echo "Did not find a C# database"
exit 1
fi
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,90 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Export file baseline information
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
export-file-baseline-information:
strategy:
matrix:
include:
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-latest
version: nightly-latest
name: Export file baseline information
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
# Windows doesn't support Swift, and only macOS latest and nightly-latest support Swift 5.7.1.
if: runner.os == 'Linux' || (runner.os == 'macOS' && matrix.version == 'cached')
with:
swift-version: '5.7'
- uses: ./../action/init
with:
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
CODEQL_FILE_BASELINE_INFORMATION: true
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: true
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
env:
CODEQL_FILE_BASELINE_INFORMATION: true
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: true
- name: Upload SARIF
uses: actions/upload-artifact@v3
with:
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
retention-days: 7
- name: Check results
shell: bash
run: |
cd "$RUNNER_TEMP/results"
expected_baseline_languages="cpp cs go java js py rb swift"
for lang in ${expected_baseline_languages}; do
rule_name="${lang}/baseline/expected-extracted-files"
found_notification=$(jq --arg rule_name "${rule_name}" '[.runs[0].tool.driver.notifications |
select(. != null) | flatten | .[].id] | any(. == $rule_name)' javascript.sarif)
if [[ "${found_notification}" != "true" ]]; then
echo "Expected SARIF output to contain notification '${rule_name}', but found no such notification."
exit 1
else
echo "Found notification '${rule_name}'."
fi
done
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,66 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Extractor ram and threads options test
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
extractor-ram-threads:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
name: Extractor ram and threads options test
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
languages: java
ram: 230
threads: 1
- name: Assert Results
shell: bash
run: |
if [ "${CODEQL_RAM}" != "230" ]; then
echo "CODEQL_RAM is '${CODEQL_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_RAM}" != "230" ]; then
echo "CODEQL_EXTRACTOR_JAVA_RAM is '${CODEQL_EXTRACTOR_JAVA_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_THREADS}" != "1" ]; then
echo "CODEQL_THREADS is '${CODEQL_THREADS}' instead of 1"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_THREADS}" != "1" ]; then
echo "CODEQL_EXTRACTOR_JAVA_THREADS is '${CODEQL_EXTRACTOR_JAVA_THREADS}' instead of 1"
exit 1
fi
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,91 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: 'PR Check - Go: Custom queries'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
go-custom-queries:
strategy:
matrix:
include:
- os: ubuntu-20.04
version: stable-20211005
- os: macos-latest
version: stable-20211005
- os: windows-2019
version: stable-20211005
- os: ubuntu-20.04
version: stable-20220120
- os: macos-latest
version: stable-20220120
- os: windows-2019
version: stable-20220120
- os: ubuntu-latest
version: stable-20220401
- os: macos-latest
version: stable-20220401
- os: windows-latest
version: stable-20220401
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-latest
version: nightly-latest
name: 'Go: Custom queries'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Set up Go
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: ./../action/init
with:
languages: go
config-file: ./.github/codeql/custom-queries.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,88 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: 'PR Check - Go: tracing with autobuilder step'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
go-tracing-autobuilder:
strategy:
matrix:
include:
- os: ubuntu-20.04
version: stable-20211005
- os: macos-latest
version: stable-20211005
- os: ubuntu-20.04
version: stable-20220120
- os: macos-latest
version: stable-20220120
- os: ubuntu-latest
version: stable-20220401
- os: macos-latest
version: stable-20220401
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
name: 'Go: tracing with autobuilder step'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Set up Go
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: ./../action/init
with:
languages: go
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/autobuild
- uses: ./../action/analyze
- shell: bash
run: |
if [[ "${CODEQL_ACTION_DID_AUTOBUILD_GOLANG}" != true ]]; then
echo "Expected the Go autobuilder to be run, but the" \
"CODEQL_ACTION_DID_AUTOBUILD_GOLANG environment variable was not true."
exit 1
fi
cd "$RUNNER_TEMP/codeql_databases"
if [[ ! -d go ]]; then
echo "Did not find a Go database"
exit 1
fi
env:
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,92 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: 'PR Check - Go: tracing with custom build steps'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
go-tracing-custom-build-steps:
strategy:
matrix:
include:
- os: ubuntu-20.04
version: stable-20211005
- os: macos-latest
version: stable-20211005
- os: ubuntu-20.04
version: stable-20220120
- os: macos-latest
version: stable-20220120
- os: ubuntu-latest
version: stable-20220401
- os: macos-latest
version: stable-20220401
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
name: 'Go: tracing with custom build steps'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Set up Go
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: ./../action/init
with:
languages: go
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
shell: bash
run: go build main.go
- uses: ./../action/analyze
- shell: bash
run: |
# Once we start running Bash 4.2 in all environments, we can replace the
# `! -z` flag with the more elegant `-v` which confirms that the variable
# is actually unset and not potentially set to a blank value.
if [[ ! -z "${CODEQL_ACTION_DID_AUTOBUILD_GOLANG}" ]]; then
echo "Expected the Go autobuilder not to be run, but the" \
"CODEQL_ACTION_DID_AUTOBUILD_GOLANG environment variable was set."
exit 1
fi
cd "$RUNNER_TEMP/codeql_databases"
if [[ ! -d go ]]; then
echo "Did not find a Go database"
exit 1
fi
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,82 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: 'PR Check - Go: tracing with legacy workflow'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
go-tracing-legacy-workflow:
strategy:
matrix:
include:
- os: ubuntu-20.04
version: stable-20211005
- os: macos-latest
version: stable-20211005
- os: ubuntu-20.04
version: stable-20220120
- os: macos-latest
version: stable-20220120
- os: ubuntu-latest
version: stable-20220401
- os: macos-latest
version: stable-20220401
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
name: 'Go: tracing with legacy workflow'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Set up Go
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: ./../action/init
with:
languages: go
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
- shell: bash
run: |
cd "$RUNNER_TEMP/codeql_databases"
if [[ ! -d go ]]; then
echo "Did not find a Go database"
exit 1
fi
env:
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,79 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: 'PR Check - Packaging: Download using registries'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
init-with-registries:
strategy:
matrix:
include:
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-latest
version: nightly-latest
name: 'Packaging: Download using registries'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Init with registries
uses: ./../action/init
with:
db-location: ${{ runner.temp }}/customDbLocation
tools: ${{ steps.prepare-test.outputs.tools-url }}
config-file: ./.github/codeql/codeql-config-registries.yml
languages: javascript
registries: |
- url: "https://ghcr.io/v2/"
packages: "*/*"
token: "${{ secrets.GITHUB_TOKEN }}"
- name: Verify packages installed
shell: bash
run: |
PRIVATE_PACK="$HOME/.codeql/packages/dsp-testing/private-pack"
CODEQL_PACK1="$HOME/.codeql/packages/dsp-testing/codeql-pack1"
if [[ -d $PRIVATE_PACK ]]
then
echo "$PRIVATE_PACK was installed."
else
echo "::error $PRIVATE_PACK pack was not installed."
exit 1
fi
if [[ -d $CODEQL_PACK1 ]]
then
echo "$CODEQL_PACK1 was installed."
else
echo "::error $CODEQL_PACK1 pack was not installed."
exit 1
fi
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,68 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Custom source root
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
javascript-source-root:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: ubuntu-latest
version: cached
- os: ubuntu-latest
version: nightly-latest
name: Custom source root
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Move codeql-action
shell: bash
run: |
mkdir ../new-source-root
mv * ../new-source-root
- uses: ./../action/init
with:
languages: javascript
source-root: ../new-source-root
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
with:
skip-queries: true
upload: false
- name: Assert database exists
shell: bash
run: |
cd "$RUNNER_TEMP/codeql_databases"
if [[ ! -d javascript ]]; then
echo "Did not find a JavaScript database"
exit 1
fi
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,135 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - ML-powered queries
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
ml-powered-queries:
strategy:
matrix:
include:
- os: ubuntu-20.04
version: stable-20220120
- os: macos-latest
version: stable-20220120
- os: windows-2019
version: stable-20220120
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-latest
version: nightly-latest
name: ML-powered queries
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Set up Go
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: ./../action/init
with:
languages: javascript
queries: security-extended
source-root: ./../action/tests/ml-powered-queries-repo
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v3
with:
name: ml-powered-queries-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
retention-days: 7
- name: Check sarif
uses: ./../action/.github/check-sarif
# Running on Windows requires CodeQL CLI 2.9.0+.
if: "!(matrix.version == 'stable-20220120' && runner.os == 'Windows')"
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: js/ml-powered/nosql-injection,js/ml-powered/path-injection,js/ml-powered/sql-injection,js/ml-powered/xss
queries-not-run: foo,bar
- name: Check results
env:
# Running on Windows requires CodeQL CLI 2.9.0+.
SHOULD_RUN_ML_POWERED_QUERIES: ${{ !(matrix.version == 'stable-20220120' &&
runner.os == 'Windows') }}
shell: bash
run: |
echo "Expecting ML-powered queries to be run: ${SHOULD_RUN_ML_POWERED_QUERIES}"
cd "$RUNNER_TEMP/results"
# We should run at least the ML-powered queries in `expected_rules`.
expected_rules="js/ml-powered/nosql-injection js/ml-powered/path-injection js/ml-powered/sql-injection js/ml-powered/xss"
for rule in ${expected_rules}; do
found_rule=$(jq --arg rule "${rule}" '[.runs[0].tool.extensions[].rules | select(. != null) |
flatten | .[].id] | any(. == $rule)' javascript.sarif)
echo "Did find rule '${rule}': ${found_rule}"
if [[ "${found_rule}" != "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
echo "Expected SARIF output to contain rule '${rule}', but found no such rule."
exit 1
elif [[ "${found_rule}" == "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
echo "Found rule '${rule}' in the SARIF output which shouldn't have been part of the analysis."
exit 1
fi
done
# We should have at least one alert from an ML-powered query.
num_alerts=$(jq '[.runs[0].results[] |
select(.properties.score != null and (.rule.id | startswith("js/ml-powered/")))] | length' \
javascript.sarif)
echo "Found ${num_alerts} alerts from ML-powered queries.";
if [[ "${num_alerts}" -eq 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
echo "Expected to find at least one alert from an ML-powered query but found ${num_alerts}."
exit 1
elif [[ "${num_alerts}" -ne 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
echo "Expected not to find any alerts from an ML-powered query but found ${num_alerts}."
exit 1
fi
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,143 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Multi-language repository
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
multi-language-autodetect:
strategy:
matrix:
include:
- os: ubuntu-20.04
version: stable-20211005
- os: macos-latest
version: stable-20211005
- os: ubuntu-20.04
version: stable-20220120
- os: macos-latest
version: stable-20220120
- os: ubuntu-latest
version: stable-20220401
- os: macos-latest
version: stable-20220401
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
name: Multi-language repository
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Set up Go
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
# Only macOS latest and nightly-latest support Swift 5.7.1
if: runner.os == 'Linux' || matrix.version == 'cached'
with:
swift-version: '5.7'
- uses: ./../action/init
with:
db-location: ${{ runner.temp }}/customDbLocation
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
id: analysis
- name: Check language autodetect for all languages excluding Ruby, Swift
shell: bash
run: |
CPP_DB=${{ fromJson(steps.analysis.outputs.db-locations).cpp }}
if [[ ! -d $CPP_DB ]] || [[ ! $CPP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for CPP, or created it in the wrong location."
exit 1
fi
CSHARP_DB=${{ fromJson(steps.analysis.outputs.db-locations).csharp }}
if [[ ! -d $CSHARP_DB ]] || [[ ! $CSHARP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for C Sharp, or created it in the wrong location."
exit 1
fi
GO_DB=${{ fromJson(steps.analysis.outputs.db-locations).go }}
if [[ ! -d $GO_DB ]] || [[ ! $GO_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Go, or created it in the wrong location."
exit 1
fi
JAVA_DB=${{ fromJson(steps.analysis.outputs.db-locations).java }}
if [[ ! -d $JAVA_DB ]] || [[ ! $JAVA_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Java, or created it in the wrong location."
exit 1
fi
JAVASCRIPT_DB=${{ fromJson(steps.analysis.outputs.db-locations).javascript }}
if [[ ! -d $JAVASCRIPT_DB ]] || [[ ! $JAVASCRIPT_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Javascript, or created it in the wrong location."
exit 1
fi
PYTHON_DB=${{ fromJson(steps.analysis.outputs.db-locations).python }}
if [[ ! -d $PYTHON_DB ]] || [[ ! $PYTHON_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Python, or created it in the wrong location."
exit 1
fi
- name: Check language autodetect for Ruby
if: (matrix.version == 'cached' || matrix.version == 'latest' || matrix.version
== 'nightly-latest')
shell: bash
run: |
RUBY_DB=${{ fromJson(steps.analysis.outputs.db-locations).ruby }}
if [[ ! -d $RUBY_DB ]] || [[ ! $RUBY_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Ruby, or created it in the wrong location."
exit 1
fi
- name: Check language autodetect for Swift
if: "!startsWith(matrix.os, 'windows') && (matrix.version == 'cached' || matrix.version\
\ == 'latest' || matrix.version == 'nightly-latest')"
shell: bash
run: |
SWIFT_DB=${{ fromJson(steps.analysis.outputs.db-locations).swift }}
if [[ ! -d $SWIFT_DB ]] || [[ ! $SWIFT_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Swift, or created it in the wrong location."
exit 1
fi
env:
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: 'true' # Remove when Swift is GA.
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,94 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: 'PR Check - Packaging: Config and input passed to the CLI'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
packaging-codescanning-config-inputs-js:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-latest
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-latest
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-latest
version: nightly-latest
name: 'Packaging: Config and input passed to the CLI'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging3.yml
packs: +dsp-testing/codeql-pack1@1.0.0
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
- name: Check results
uses: ./../action/.github/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results
shell: bash
run: |
cd "$RUNNER_TEMP/results"
# We should have 4 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
echo "Found matching rules '$RULES'"
if [ "$RULES" != "$EXPECTED_RULES" ]; then
echo "Did not match expected rules '$EXPECTED_RULES'."
exit 1
fi
env:
CODEQL_PASS_CONFIG_TO_CLI: true
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,92 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: 'PR Check - Packaging: Config and input'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
packaging-config-inputs-js:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-latest
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-latest
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-latest
version: nightly-latest
name: 'Packaging: Config and input'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging3.yml
packs: +dsp-testing/codeql-pack1@1.0.0
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
- name: Check results
uses: ./../action/.github/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results
shell: bash
run: |
cd "$RUNNER_TEMP/results"
# We should have 4 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
echo "Found matching rules '$RULES'"
if [ "$RULES" != "$EXPECTED_RULES" ]; then
echo "Did not match expected rules '$EXPECTED_RULES'."
exit 1
fi
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,91 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: 'PR Check - Packaging: Config file'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
packaging-config-js:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-latest
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-latest
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-latest
version: nightly-latest
name: 'Packaging: Config file'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging.yml
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
- name: Check results
uses: ./../action/.github/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results
shell: bash
run: |
cd "$RUNNER_TEMP/results"
# We should have 4 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
echo "Found matching rules '$RULES'"
if [ "$RULES" != "$EXPECTED_RULES" ]; then
echo "Did not match expected rules '$EXPECTED_RULES'."
exit 1
fi
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,92 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: 'PR Check - Packaging: Action input'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
packaging-inputs-js:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-latest
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-latest
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-latest
version: nightly-latest
name: 'Packaging: Action input'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging2.yml
languages: javascript
packs: dsp-testing/codeql-pack1@1.0.0, dsp-testing/codeql-pack2, dsp-testing/codeql-pack3:other-query.ql
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
- name: Check results
uses: ./../action/.github/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results
shell: bash
run: |
cd "$RUNNER_TEMP/results"
# We should have 4 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
echo "Found matching rules '$RULES'"
if [ "$RULES" != "$EXPECTED_RULES" ]; then
echo "Did not match expected rules '$EXPECTED_RULES'."
exit 1
fi
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,91 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Remote config file
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
remote-config:
strategy:
matrix:
include:
- os: ubuntu-20.04
version: stable-20211005
- os: macos-latest
version: stable-20211005
- os: windows-2019
version: stable-20211005
- os: ubuntu-20.04
version: stable-20220120
- os: macos-latest
version: stable-20220120
- os: windows-2019
version: stable-20220120
- os: ubuntu-latest
version: stable-20220401
- os: macos-latest
version: stable-20220401
- os: windows-latest
version: stable-20220401
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-latest
version: nightly-latest
name: Remote config file
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Set up Go
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,62 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - RuboCop multi-language
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
rubocop-multi-language:
strategy:
matrix:
include:
- os: ubuntu-latest
version: cached
name: RuboCop multi-language
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6
- name: Install Code Scanning integration
shell: bash
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
- name: Install dependencies
shell: bash
run: bundle install
- name: RuboCop run
shell: bash
run: |
bash -c "
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
[[ $? -ne 2 ]]
"
- uses: ./../action/upload-sarif
with:
sarif_file: rubocop.sarif
env:
CODEQL_ACTION_TEST_MODE: true

66
.github/workflows/__ruby.yml generated vendored
View File

@@ -1,66 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Ruby analysis
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
ruby:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
name: Ruby analysis
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
languages: ruby
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
id: analysis
- name: Check database
shell: bash
run: |
RUBY_DB="${{ fromJson(steps.analysis.outputs.db-locations).ruby }}"
if [[ ! -d "$RUBY_DB" ]]; then
echo "Did not create a database for Ruby."
exit 1
fi
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,91 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Split workflow
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
split-workflow:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
name: Split workflow
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging3.yml
packs: +dsp-testing/codeql-pack1@1.0.0
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
with:
skip-queries: true
output: ${{ runner.temp }}/results
- name: Assert No Results
shell: bash
run: |
if [ "$(ls -A $RUNNER_TEMP/results)" ]; then
echo "Expected results directory to be empty after skipping query execution!"
exit 1
fi
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
upload-database: false
- name: Assert Results
shell: bash
run: |
cd "$RUNNER_TEMP/results"
# We should have 4 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
echo "Found matching rules '$RULES'"
if [ "$RULES" != "$EXPECTED_RULES" ]; then
echo "Did not match expected rules '$EXPECTED_RULES'."
exit 1
fi
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,67 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Swift analysis using autobuild
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
swift-autobuild:
strategy:
matrix:
include:
- os: macos-latest
version: latest
- os: macos-latest
version: cached
- os: macos-latest
version: nightly-latest
name: Swift analysis using autobuild
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
# Only macOS latest and nightly-latest support Swift 5.7.1
if: runner.os == 'Linux' || matrix.version == 'cached'
with:
swift-version: '5.7'
- uses: ./../action/init
with:
languages: swift
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/autobuild
- uses: ./../action/analyze
id: analysis
- name: Check database
shell: bash
run: |
SWIFT_DB="${{ fromJson(steps.analysis.outputs.db-locations).swift }}"
if [[ ! -d "$SWIFT_DB" ]]; then
echo "Did not create a database for Swift."
exit 1
fi
env:
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: 'true'
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,76 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Swift analysis using a custom build command
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
swift-custom-build:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
name: Swift analysis using a custom build command
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
# Only macOS latest and nightly-latest support Swift 5.7.1
if: runner.os == 'Linux' || matrix.version == 'cached'
with:
swift-version: '5.7'
- uses: ./../action/init
with:
languages: swift
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
id: analysis
- name: Check database
shell: bash
run: |
SWIFT_DB="${{ fromJson(steps.analysis.outputs.db-locations).swift }}"
if [[ ! -d "$SWIFT_DB" ]]; then
echo "Did not create a database for Swift."
exit 1
fi
env:
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: 'true'
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,65 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Autobuild working directory
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
test-autobuild-working-dir:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
name: Autobuild working directory
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Test setup
shell: bash
run: |
# Make sure that Gradle build succeeds in autobuild-dir ...
cp -a ../action/tests/java-repo autobuild-dir
# ... and fails if attempted in the current directory
echo > build.gradle
- uses: ./../action/init
with:
languages: java
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/autobuild
with:
working-directory: autobuild-dir
- uses: ./../action/analyze
- name: Check database
shell: bash
run: |
cd "$RUNNER_TEMP/codeql_databases"
if [[ ! -d java ]]; then
echo "Did not find a Java database"
exit 1
fi
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,55 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Local CodeQL bundle
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
test-local-codeql:
strategy:
matrix:
include:
- os: ubuntu-latest
version: nightly-latest
name: Local CodeQL bundle
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Fetch a CodeQL bundle
shell: bash
env:
CODEQL_URL: ${{ steps.prepare-test.outputs.tools-url }}
run: |
wget "$CODEQL_URL"
- uses: ./../action/init
with:
tools: ./codeql-bundle.tar.gz
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
CODEQL_ACTION_TEST_MODE: true

56
.github/workflows/__test-proxy.yml generated vendored
View File

@@ -1,56 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Proxy test
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
test-proxy:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
name: Proxy test
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
env:
https_proxy: http://squid-proxy:3128
CODEQL_ACTION_TEST_MODE: true
container:
image: ubuntu:22.04
options: --dns 127.0.0.1
services:
squid-proxy:
image: ubuntu/squid:latest
ports:
- 3128:3128

View File

@@ -1,104 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Test unsetting environment variables
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
unset-environment:
strategy:
matrix:
include:
- os: ubuntu-20.04
version: stable-20211005
- os: ubuntu-20.04
version: stable-20220120
- os: ubuntu-latest
version: stable-20220401
- os: ubuntu-latest
version: cached
- os: ubuntu-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
name: Test unsetting environment variables
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Set up Go
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: ./../action/init
with:
db-location: ${{ runner.temp }}/customDbLocation
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
shell: bash
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
- uses: ./../action/analyze
id: analysis
- shell: bash
run: |
CPP_DB="${{ fromJson(steps.analysis.outputs.db-locations).cpp }}"
if [[ ! -d "$CPP_DB" ]] || [[ ! "$CPP_DB" == "${RUNNER_TEMP}/customDbLocation/cpp" ]]; then
echo "::error::Did not create a database for CPP, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/cpp' but actual was '${CPP_DB}'"
exit 1
fi
CSHARP_DB="${{ fromJson(steps.analysis.outputs.db-locations).csharp }}"
if [[ ! -d "$CSHARP_DB" ]] || [[ ! "$CSHARP_DB" == "${RUNNER_TEMP}/customDbLocation/csharp" ]]; then
echo "::error::Did not create a database for C Sharp, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/csharp' but actual was '${CSHARP_DB}'"
exit 1
fi
GO_DB="${{ fromJson(steps.analysis.outputs.db-locations).go }}"
if [[ ! -d "$GO_DB" ]] || [[ ! "$GO_DB" == "${RUNNER_TEMP}/customDbLocation/go" ]]; then
echo "::error::Did not create a database for Go, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/go' but actual was '${GO_DB}'"
exit 1
fi
JAVA_DB="${{ fromJson(steps.analysis.outputs.db-locations).java }}"
if [[ ! -d "$JAVA_DB" ]] || [[ ! "$JAVA_DB" == "${RUNNER_TEMP}/customDbLocation/java" ]]; then
echo "::error::Did not create a database for Java, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/java' but actual was '${JAVA_DB}'"
exit 1
fi
JAVASCRIPT_DB="${{ fromJson(steps.analysis.outputs.db-locations).javascript }}"
if [[ ! -d "$JAVASCRIPT_DB" ]] || [[ ! "$JAVASCRIPT_DB" == "${RUNNER_TEMP}/customDbLocation/javascript" ]]; then
echo "::error::Did not create a database for Javascript, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/javascript' but actual was '${JAVASCRIPT_DB}'"
exit 1
fi
PYTHON_DB="${{ fromJson(steps.analysis.outputs.db-locations).python }}"
if [[ ! -d "$PYTHON_DB" ]] || [[ ! "$PYTHON_DB" == "${RUNNER_TEMP}/customDbLocation/python" ]]; then
echo "::error::Did not create a database for Python, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/python' but actual was '${PYTHON_DB}'"
exit 1
fi
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,99 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: "PR Check - Upload-sarif: 'ref' and 'sha' from inputs"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
upload-ref-sha-input:
strategy:
matrix:
include:
- os: ubuntu-20.04
version: stable-20211005
- os: macos-latest
version: stable-20211005
- os: windows-2019
version: stable-20211005
- os: ubuntu-20.04
version: stable-20220120
- os: macos-latest
version: stable-20220120
- os: windows-2019
version: stable-20220120
- os: ubuntu-latest
version: stable-20220401
- os: macos-latest
version: stable-20220401
- os: windows-latest
version: stable-20220401
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-latest
version: nightly-latest
name: "Upload-sarif: 'ref' and 'sha' from inputs"
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Set up Go
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
upload: false
- uses: ./../action/upload-sarif
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,143 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Use a custom `checkout_path`
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
with-checkout-path:
strategy:
matrix:
include:
- os: ubuntu-20.04
version: stable-20211005
- os: macos-latest
version: stable-20211005
- os: windows-2019
version: stable-20211005
- os: ubuntu-20.04
version: stable-20220120
- os: macos-latest
version: stable-20220120
- os: windows-2019
version: stable-20220120
- os: ubuntu-latest
version: stable-20220401
- os: macos-latest
version: stable-20220401
- os: windows-latest
version: stable-20220401
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-latest
version: nightly-latest
name: Use a custom `checkout_path`
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Set up Go
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: actions/checkout@v3
with:
ref: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
path: x/y/z/some-path
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
# it's enough to test one compiled language and one interpreted language
languages: csharp,javascript
source-path: x/y/z/some-path/tests/multi-language-repo
debug: true
- name: Build code (non-windows)
shell: bash
if: ${{ runner.os != 'Windows' }}
run: |
$CODEQL_RUNNER x/y/z/some-path/tests/multi-language-repo/build.sh
- name: Build code (windows)
shell: bash
if: ${{ runner.os == 'Windows' }}
run: |
x/y/z/some-path/tests/multi-language-repo/build.sh
- uses: ./../action/analyze
with:
checkout_path: x/y/z/some-path/tests/multi-language-repo
ref: v1.1.0
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
upload: false
- uses: ./../action/upload-sarif
with:
ref: v1.1.0
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
checkout_path: x/y/z/some-path/tests/multi-language-repo
- name: Verify SARIF after upload
shell: bash
run: |
EXPECTED_COMMIT_OID="474bbf07f9247ffe1856c6a0f94aeeb10e7afee6"
EXPECTED_REF="v1.1.0"
EXPECTED_CHECKOUT_URI_SUFFIX="/x/y/z/some-path/tests/multi-language-repo"
ACTUAL_COMMIT_OID="$(cat "$RUNNER_TEMP/payload.json" | jq -r .commit_oid)"
ACTUAL_REF="$(cat "$RUNNER_TEMP/payload.json" | jq -r .ref)"
ACTUAL_CHECKOUT_URI="$(cat "$RUNNER_TEMP/payload.json" | jq -r .checkout_uri)"
if [[ "$EXPECTED_COMMIT_OID" != "$ACTUAL_COMMIT_OID" ]]; then
echo "::error Invalid commit oid. Expected: $EXPECTED_COMMIT_OID Actual: $ACTUAL_COMMIT_OID"
echo "$RUNNER_TEMP/payload.json"
exit 1
fi
if [[ "$EXPECTED_REF" != "$ACTUAL_REF" ]]; then
echo "::error Invalid ref. Expected: '$EXPECTED_REF' Actual: '$ACTUAL_REF'"
echo "$RUNNER_TEMP/payload.json"
exit 1
fi
if [[ "$ACTUAL_CHECKOUT_URI" != *$EXPECTED_CHECKOUT_URI_SUFFIX ]]; then
echo "::error Invalid checkout URI suffix. Expected suffix: $EXPECTED_CHECKOUT_URI_SUFFIX Actual uri: $ACTUAL_CHECKOUT_URI"
echo "$RUNNER_TEMP/payload.json"
exit 1
fi
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -1,25 +0,0 @@
name: Check Expected Release Files
on:
pull_request:
paths:
- .github/workflows/check-expected-release-files.yml
- src/defaults.json
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
# by other workflows.
types: [opened, synchronize, reopened, ready_for_review]
jobs:
check-expected-release-files:
runs-on: ubuntu-latest
steps:
- name: Checkout CodeQL Action
uses: actions/checkout@v3
- name: Check Expected Release Files
run: |
bundle_version="$(cat "./src/defaults.json" | jq -r ".bundleVersion")"
set -x
for expected_file in "codeql-bundle.tar.gz" "codeql-bundle-linux64.tar.gz" "codeql-bundle-osx64.tar.gz" "codeql-bundle-win64.tar.gz"; do
curl --location --fail --head --request GET "https://github.com/github/codeql-action/releases/download/$bundle_version/$expected_file" > /dev/null
done

View File

@@ -1,91 +0,0 @@
name: "CodeQL action"
on:
push:
branches: [main, releases/v1, releases/v2]
pull_request:
branches: [main, releases/v1, releases/v2]
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
# by other workflows.
types: [opened, synchronize, reopened, ready_for_review]
env:
CODEQL_ACTION_TESTING_ENVIRONMENT: codeql-action-pr-checks
jobs:
# Identify the CodeQL tool versions to use in the analysis job.
check-codeql-versions:
runs-on: ubuntu-latest
outputs:
versions: ${{ steps.compare.outputs.versions }}
permissions:
security-events: write
steps:
- uses: actions/checkout@v3
- name: Init with default CodeQL bundle from the VM image
id: init-default
uses: ./init
with:
languages: javascript
- name: Remove empty database
# allows us to run init a second time
run: |
rm -rf "$RUNNER_TEMP/codeql_databases"
- name: Init with latest CodeQL bundle
id: init-latest
uses: ./init
with:
tools: latest
languages: javascript
- name: Compare default and latest CodeQL bundle versions
id: compare
env:
CODEQL_DEFAULT: ${{ steps.init-default.outputs.codeql-path }}
CODEQL_LATEST: ${{ steps.init-latest.outputs.codeql-path }}
run: |
CODEQL_VERSION_DEFAULT="$("$CODEQL_DEFAULT" version --format terse)"
CODEQL_VERSION_LATEST="$("$CODEQL_LATEST" version --format terse)"
echo "Default CodeQL bundle version is $CODEQL_VERSION_DEFAULT"
echo "Latest CodeQL bundle version is $CODEQL_VERSION_LATEST"
# If we're running on a pull request, run with both bundles, even if `tools: latest` would
# be the same as `tools: null`. This allows us to make the job for each of the bundles a
# required status check.
#
# If we're running on push, then we can skip running with `tools: latest` when it would be
# the same as running with `tools: null`.
if [[ "$GITHUB_EVENT_NAME" != "pull_request" && "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then
VERSIONS_JSON='[null]'
else
VERSIONS_JSON='[null, "latest"]'
fi
# Output a JSON-encoded list with the distinct versions to test against.
echo "Suggested matrix config for analysis job: $VERSIONS_JSON"
echo "versions=${VERSIONS_JSON}" >> $GITHUB_OUTPUT
build:
needs: [check-codeql-versions]
strategy:
matrix:
os: [ubuntu-latest,windows-latest,macos-latest]
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
runs-on: ${{ matrix.os }}
permissions:
security-events: write
steps:
- uses: actions/checkout@v3
- uses: ./init
id: init
with:
languages: javascript
config-file: ./.github/codeql/codeql-config.yml
tools: ${{ matrix.tools }}
# confirm steps.init.outputs.codeql-path points to the codeql binary
- name: Print CodeQL Version
run: ${{steps.init.outputs.codeql-path}} version --format=json
- uses: ./analyze

View File

@@ -1,219 +0,0 @@
# Tests that the generated code scanning config file contains the expected contents
name: Code-Scanning config CLI tests
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CODEQL_PASS_CONFIG_TO_CLI: true
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
code-scanning-config-tests:
continue-on-error: true
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
# Code-Scanning config not created because environment variable is not set
name: Code Scanning Configuration tests
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Empty file
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: "{}"
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Packs from input
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"packs": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2" ]
}
languages: javascript
packs: dsp-testing/codeql-pack1@1.0.0, dsp-testing/codeql-pack2
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Packs from input with +
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"packs": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2" ]
}
languages: javascript
packs: + dsp-testing/codeql-pack1@1.0.0, dsp-testing/codeql-pack2
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Queries from input
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"queries": [{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql" }]
}
languages: javascript
queries: ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Queries from input with +
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"queries": [{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql" }]
}
languages: javascript
queries: + ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Queries and packs from input with +
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"queries": [{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql" }],
"packs": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2" ]
}
languages: javascript
queries: + ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
packs: + dsp-testing/codeql-pack1@1.0.0, dsp-testing/codeql-pack2
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Queries and packs from config
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"queries": [{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/foo2/show_ifs.ql" }],
"packs": {
"javascript": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2" ]
}
}
languages: javascript
config-file-test: .github/codeql/queries-and-packs-config.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Queries and packs from config overriden by input
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"queries": [{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql" }],
"packs": ["codeql/javascript-queries"]
}
languages: javascript
queries: ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
packs: codeql/javascript-queries
config-file-test: .github/codeql/queries-and-packs-config.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Queries and packs from config merging with input
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"queries": [
{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/foo2/show_ifs.ql" },
{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql" }
],
"packs": {
"javascript": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2", "codeql/javascript-queries" ]
}
}
languages: javascript
queries: + ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
packs: + codeql/javascript-queries
config-file-test: .github/codeql/queries-and-packs-config.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Multi-language packs from config
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"packs": {
"javascript": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2" ],
"ruby": ["codeql/ruby-queries"]
},
"queries": [
{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/foo2/show_ifs.ql" }
]
}
languages: javascript,ruby
config-file-test: .github/codeql/multi-language-packs-config.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Other config properties
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"name": "Config using all properties",
"packs": ["codeql/javascript-queries" ],
"disable-default-queries": true,
"paths-ignore": ["xxx"],
"paths": ["yyy"]
}
languages: javascript
packs: + codeql/javascript-queries
config-file-test: .github/codeql/other-config-properties.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Config not generated when env var is not set
if: success() || failure()
env:
CODEQL_PASS_CONFIG_TO_CLI: false
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: ""
languages: javascript
packs: + codeql/javascript-queries
config-file-test: .github/codeql/other-config-properties.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}

View File

@@ -1,91 +0,0 @@
# Checks logs, SARIF, and database bundle debug artifacts exist
# when the analyze step fails.
name: PR Check - Debug artifacts after failure
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
upload-artifacts:
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
name: Upload debug artifacts after failure in analyze
continue-on-error: true
env:
CODEQL_ACTION_TEST_MODE: true
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Dump GitHub event
run: cat "${GITHUB_EVENT_PATH}"
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: latest
- uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
debug: true
debug-artifact-name: my-debug-artifacts
debug-database-name: my-db
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
id: analysis
with:
expect-error: true
ram: 1
download-and-check-artifacts:
name: Download and check debug artifacts after failure in analyze
needs: upload-artifacts
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- name: Download all artifacts
uses: actions/download-artifact@v3
- name: Check expected artifacts exist
shell: bash
run: |
OPERATING_SYSTEMS="ubuntu-latest macos-latest"
LANGUAGES="cpp csharp go java javascript python"
for os in $OPERATING_SYSTEMS; do
pushd "./my-debug-artifacts-$os"
echo "Artifacts from run on $os:"
for language in $LANGUAGES; do
echo "- Checking $language"
if [[ ! -f "my-db-$language-partial.zip" ]] ; then
echo "Missing a partial database bundle for $language"
exit 1
fi
if [[ ! -d "log" ]] ; then
echo "Missing database initialization logs"
exit 1
fi
if [[ ! "$language" == "go" ]] && [[ ! -d "$language/log" ]] ; then
echo "Missing logs for $language"
exit 1
fi
done
popd
done
env:
GO111MODULE: auto

View File

@@ -1,117 +0,0 @@
# Checks logs, SARIF, and database bundle debug artifacts exist.
name: PR Check - Debug artifact upload
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
upload-artifacts:
strategy:
matrix:
include:
- os: ubuntu-20.04
version: stable-20211005
- os: macos-latest
version: stable-20211005
- os: ubuntu-20.04
version: stable-20220120
- os: macos-latest
version: stable-20220120
- os: ubuntu-latest
version: stable-20220401
- os: macos-latest
version: stable-20220401
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
name: Upload debug artifacts
env:
CODEQL_ACTION_TEST_MODE: true
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
debug: true
debug-artifact-name: my-debug-artifacts
debug-database-name: my-db
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
id: analysis
download-and-check-artifacts:
name: Download and check debug artifacts
needs: upload-artifacts
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- name: Download all artifacts
uses: actions/download-artifact@v3
- name: Check expected artifacts exist
shell: bash
run: |
VERSIONS="stable-20211005 stable-20220120 stable-20220401 cached latest nightly-latest"
LANGUAGES="cpp csharp go java javascript python"
for version in $VERSIONS; do
if [[ "$version" =~ stable-(20211005|20220120|20210809) ]]; then
# Note the absence of the period in "ubuntu-2004": this is present in the image name
# but not the artifact name
OPERATING_SYSTEMS="ubuntu-2004 macos-latest"
else
OPERATING_SYSTEMS="ubuntu-latest macos-latest"
fi
for os in $OPERATING_SYSTEMS; do
pushd "./my-debug-artifacts-$os-$version"
echo "Artifacts from version $version on $os:"
for language in $LANGUAGES; do
echo "- Checking $language"
if [[ ! -f "$language.sarif" ]] ; then
echo "Missing a SARIF file for $language"
exit 1
fi
if [[ ! -f "my-db-$language.zip" ]] ; then
echo "Missing a database bundle for $language"
exit 1
fi
if [[ ! -d "$language/log" ]] ; then
echo "Missing logs for $language"
exit 1
fi
done
popd
done
done
env:
GO111MODULE: auto

View File

@@ -1,47 +0,0 @@
name: Check queries that ran
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
expected-queries:
name: Expected Queries Tests
env:
CODEQL_ACTION_TEST_MODE: true
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: latest
- uses: ./../action/init
with:
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
upload-database: false
upload: false
- name: Check Sarif
uses: ./../action/.github/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: js/incomplete-hostname-regexp,js/path-injection
queries-not-run: foo,bar

View File

@@ -1,148 +0,0 @@
# This workflow runs after a release of the action. For v2 releases, it merges any changes from the
# release back into the main branch. Typically, this is just a single commit that updates the
# changelog. For v2 and v1 releases, it then (a) tags the merge commit on the release branch that
# represents the new release with an `vx.y.z` tag and (b) updates the `vx` tag to refer to this
# commit.
name: Tag release and merge back
on:
workflow_dispatch:
inputs:
baseBranch:
description: 'The base branch to merge into'
default: main
required: false
push:
branches:
- releases/v1
- releases/v2
jobs:
merge-back:
runs-on: ubuntu-latest
if: github.repository == 'github/codeql-action'
env:
BASE_BRANCH: "${{ github.event.inputs.baseBranch || 'main' }}"
HEAD_BRANCH: "${{ github.head_ref || github.ref }}"
steps:
- name: Dump environment
run: env
- name: Dump GitHub context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
run: echo "${GITHUB_CONTEXT}"
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
- name: Update git config
run: |
git config --global user.email "github-actions@github.com"
git config --global user.name "github-actions[bot]"
- name: Get version and new branch
id: getVersion
run: |
VERSION="v$(jq '.version' -r 'package.json')"
echo "version=${VERSION}" >> $GITHUB_OUTPUT
short_sha="${GITHUB_SHA:0:8}"
NEW_BRANCH="mergeback/${VERSION}-to-${BASE_BRANCH}-${short_sha}"
echo "newBranch=${NEW_BRANCH}" >> $GITHUB_OUTPUT
- name: Dump branches
env:
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
run: |
echo "BASE_BRANCH ${BASE_BRANCH}"
echo "HEAD_BRANCH ${HEAD_BRANCH}"
echo "NEW_BRANCH ${NEW_BRANCH}"
- name: Create mergeback branch
env:
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
run: |
git checkout -b "${NEW_BRANCH}"
- name: Check for tag
id: check
env:
VERSION: "${{ steps.getVersion.outputs.version }}"
run: |
set +e # don't fail on an errored command
git ls-remote --tags origin | grep "${VERSION}"
exists="$?"
if [ "${exists}" -eq 0 ]; then
echo "Tag ${VERSION} exists. Not going to re-release."
echo "exists=true" >> $GITHUB_OUTPUT
else
echo "Tag ${VERSION} does not exist yet."
fi
# we didn't tag the release during the update-release-branch workflow because the
# commit that actually makes it to the release branch is a merge commit,
# and not yet known during the first workflow. We tag now because we know the correct commit.
- name: Tag release
if: steps.check.outputs.exists != 'true'
env:
VERSION: ${{ steps.getVersion.outputs.version }}
run: |
# Unshallow the repo in order to allow pushes
git fetch --unshallow
# Create the `vx.y.z` tag
git tag --annotate "${VERSION}" --message "${VERSION}"
# Update the `vx` tag
major_version_tag=$(cut -d '.' -f1 <<< "${VERSION}")
# Use `--force` to overwrite the major version tag
git tag --annotate "${major_version_tag}" --message "${major_version_tag}" --force
# Push the tags, using:
# - `--atomic` to make sure we either update both tags or neither (an intermediate state,
# e.g. where we update the v2.x.y tag on the remote but not the v2 tag, could result in
# unwanted Dependabot updates, e.g. from v2 to v2.x.y)
# - `--force` since we're overwriting the `vx` tag
git push origin --atomic --force refs/tags/"${VERSION}" refs/tags/"${major_version_tag}"
- name: Create mergeback branch
if: steps.check.outputs.exists != 'true' && contains(github.ref, 'releases/v2')
env:
VERSION: "${{ steps.getVersion.outputs.version }}"
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
run: |
set -exu
pr_title="Mergeback ${VERSION} ${HEAD_BRANCH} into ${BASE_BRANCH}"
pr_body=$(cat << EOF
This PR bumps the version number and updates the changelog after the ${VERSION} release.
Please do the following:
- [ ] Remove and re-add the "Update dependencies" label to the PR to trigger just this workflow.
- [ ] Wait for the "Update dependencies" workflow to push a commit updating the dependencies.
- [ ] Mark the PR as ready for review to trigger the full set of PR checks.
- [ ] Approve and merge the PR. When merging the PR, make sure "Create a merge commit" is
selected rather than "Squash and merge" or "Rebase and merge".
EOF
)
# Update the version number ready for the next release
npm version patch --no-git-tag-version
# Update the changelog
perl -i -pe 's/^/## \[UNRELEASED\]\n\nNo user facing changes.\n\n/ if($.==3)' CHANGELOG.md
git add .
git commit -m "Update changelog and version after ${VERSION}"
git push origin "${NEW_BRANCH}"
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft
# so that a maintainer can take the PR out of draft, thereby triggering the PR checks.
gh pr create \
--head "${NEW_BRANCH}" \
--base "${BASE_BRANCH}" \
--title "${pr_title}" \
--label "Update dependencies" \
--body "${pr_body}" \
--assignee "${GITHUB_ACTOR}" \
--draft

View File

@@ -1,8 +1,8 @@
name: PR Checks
name: PR Checks (Basic Checks and Runner)
on:
push:
branches: [main, releases/v1, releases/v2]
branches: [main, v1]
pull_request:
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
# by other workflows.
@@ -10,111 +10,50 @@ on:
workflow_dispatch:
jobs:
check-js:
name: Check JS
runs-on: ubuntu-latest
timeout-minutes: 45
strategy:
matrix:
node-types-version: [12.12, current]
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Lint
run: npm run-script lint
- name: Update version of @types/node
if: matrix.node-types-version != 'current'
env:
NODE_TYPES_VERSION: ${{ matrix.node-types-version }}
run: |
# Export `NODE_TYPES_VERSION` so it's available to jq
export NODE_TYPES_VERSION="${NODE_TYPES_VERSION}"
contents=$(jq '.devDependencies."@types/node" = env.NODE_TYPES_VERSION' package.json)
echo "${contents}" > package.json
# Usually we run `npm install` on macOS to ensure that we pick up macOS-only dependencies.
# However we're not checking in the updated lockfile here, so it's fine to run
# `npm install` on Linux.
npm install
if [ ! -z "$(git status --porcelain)" ]; then
git config --global user.email "github-actions@github.com"
git config --global user.name "github-actions[bot]"
# The period in `git add --all .` ensures that we stage deleted files too.
git add --all .
git commit -m "Use @types/node=${NODE_TYPES_VERSION}"
fi
- name: Check generated JS
run: .github/workflows/script/check-js.sh
check-node-modules:
name: Check modules up to date
runner-analyze-csharp-autobuild-macos:
name: Runner macos autobuild C# analyze
runs-on: macos-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v3
- name: Check node modules up to date
run: .github/workflows/script/check-node-modules.sh
- uses: actions/checkout@v2
check-file-contents:
name: Check file contents
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- name: Checkout
uses: actions/checkout@v3
# Checks for any conflict markers created by git. This check is primarily intended to validate that
# any merge conflicts in the v2 -> v1 backport PR are fixed before the PR is merged.
- name: Check for merge conflicts
- name: Move codeql-action
shell: bash
run: |
# Use `|| true` since grep returns exit code 1 if there are no matches, and we don't want
# this to fail the workflow.
FILES_WITH_CONFLICTS=$(grep --extended-regexp --ignore-case --line-number --recursive \
'^(<<<<<<<|>>>>>>>)' . || true)
if [[ "${FILES_WITH_CONFLICTS}" ]]; then
echo "Fail: Found merge conflict markers in the following files:"
echo ""
echo "${FILES_WITH_CONFLICTS}"
exit 1
else
echo "Success: Found no merge conflict markers."
fi
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Set up Python
uses: actions/setup-python@v3
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Initialize dotnet
run: dotnet restore
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
rm build.sh
../action/runner/dist/codeql-runner-macos autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
- uses: actions/upload-artifact@v2
if: always()
with:
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install ruamel.yaml
# Ensure the generated PR check workflows are up to date.
- name: Verify PR checks up to date
run: .github/workflows/script/verify-pr-checks.sh
npm-test:
name: Unit Test
needs: [check-js, check-node-modules]
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
runs-on: ${{ matrix.os }}
timeout-minutes: 45
steps:
- uses: actions/checkout@v3
- name: npm test
run: |
# Run any commands referenced in package.json using Bash, otherwise
# we won't be able to find them on Windows.
npm config set script-shell bash
npm test
name: my-artifact
path: |
bin
obj

View File

@@ -1,190 +0,0 @@
name: Test Python Package Installation on Linux and Mac
on:
push:
branches: [main, releases/v1, releases/v2]
pull_request:
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
# by other workflows.
types: [opened, synchronize, reopened, ready_for_review]
paths:
# Changes to this workflow.
- '.github/workflows/python-deps.yml'
# Changes to the Python package installation scripts and their tests.
- 'python-setup/**'
# Changes to the default CodeQL bundle version.
- '**/defaults.json'
schedule:
# Weekly on Monday.
- cron: '0 0 * * 1'
workflow_dispatch:
jobs:
test-setup-python-scripts:
timeout-minutes: 45
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04, ubuntu-22.04, macos-latest]
python_deps_type: [pipenv, poetry, requirements, setup_py]
python_version: [2, 3]
exclude:
# Python2 and poetry are not supported. See https://github.com/actions/setup-python/issues/374
- python_version: 2
python_deps_type: poetry
# Python2 and pipenv are not supported since pipenv v2021.11.5
- python_version: 2
python_deps_type: pipenv
# Python2 is not available on ubuntu-22.04 by default -- see https://github.com/github/codeql-action/pull/1257
- python_version: 2
os: ubuntu-22.04
env:
PYTHON_DEPS_TYPE: ${{ matrix.python_deps_type }}
PYTHON_VERSION: ${{ matrix.python_version }}
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v3
- name: Initialize CodeQL
uses: ./init
id: init
with:
tools: latest
languages: python
setup-python-dependencies: false
- name: Test Auto Package Installation
run: |
set -x
$GITHUB_WORKSPACE/python-setup/install_tools.sh
cd $GITHUB_WORKSPACE/python-setup/tests/${PYTHON_DEPS_TYPE}/requests-${PYTHON_VERSION}
case ${{ matrix.os }} in
ubuntu-20.04*) basePath="/opt";;
ubuntu-22.04*) basePath="/opt";;
macos-latest*) basePath="/Users/runner";;
esac
echo ${basePath}
$GITHUB_WORKSPACE/python-setup/auto_install_packages.py "$(dirname ${{steps.init.outputs.codeql-path}})"
- name: Setup for extractor
run: |
echo $CODEQL_PYTHON
# only run if $CODEQL_PYTHON is set
if [ ! -z $CODEQL_PYTHON ]; then
$GITHUB_WORKSPACE/python-setup/tests/from_python_exe.py $CODEQL_PYTHON;
fi
- name: Verify packages installed
run: |
$GITHUB_WORKSPACE/python-setup/tests/check_requests_2_26_0.sh ${PYTHON_VERSION}
# This one shouldn't fail, but also won't install packages
test-setup-python-scripts-non-standard-location:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04, ubuntu-22.04, macos-latest]
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v3
- name: Initialize CodeQL
uses: ./init
id: init
with:
tools: latest
languages: python
setup-python-dependencies: false
- name: Test Auto Package Installation
run: |
set -x
$GITHUB_WORKSPACE/python-setup/install_tools.sh
cd $GITHUB_WORKSPACE/python-setup/tests/requirements/non-standard-location
case ${{ matrix.os }} in
ubuntu-20.04*) basePath="/opt";;
ubuntu-22.04*) basePath="/opt";;
macos-latest*) basePath="/Users/runner";;
esac
echo ${basePath}
$GITHUB_WORKSPACE/python-setup/auto_install_packages.py "$(dirname ${{steps.init.outputs.codeql-path}})"
- name: Setup for extractor
run: |
echo $CODEQL_PYTHON
# only run if $CODEQL_PYTHON is set
if [ ! -z $CODEQL_PYTHON ]; then
$GITHUB_WORKSPACE/python-setup/tests/from_python_exe.py $CODEQL_PYTHON;
fi
- name: Verify packages installed
run: |
test -z $LGTM_INDEX_IMPORT_PATH
test-setup-python-scripts-windows:
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
python_deps_type: [pipenv, poetry, requirements, setup_py]
python_version: [2, 3]
exclude:
# Python2 and poetry are not supported. See https://github.com/actions/setup-python/issues/374
- python_version: 2
python_deps_type: poetry
# Python2 and pipenv are not supported since pipenv v2021.11.5
- python_version: 2
python_deps_type: pipenv
env:
CODEQL_ACTION_TEST_MODE: true
PYTHON_DEPS_TYPE: ${{ matrix.python_deps_type }}
PYTHON_VERSION: ${{ matrix.python_version }}
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v3
- uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python_version }}
- name: Initialize CodeQL
uses: ./init
with:
tools: latest
languages: python
setup-python-dependencies: false
- name: Test Auto Package Installation
run: |
$cmd = $Env:GITHUB_WORKSPACE + "\\python-setup\\install_tools.ps1"
powershell -File $cmd
cd $Env:GITHUB_WORKSPACE\\python-setup/tests/$Env:PYTHON_DEPS_TYPE/requests-$Env:PYTHON_VERSION
$DefaultsPath = Join-Path (Join-Path $Env:GITHUB_WORKSPACE "src") "defaults.json"
$CodeQLBundleName = (Get-Content -Raw -Path $DefaultsPath | ConvertFrom-Json).bundleVersion
$CodeQLVersion = "0.0.0-" + $CodeQLBundleName.split("-")[-1]
py -3 $Env:GITHUB_WORKSPACE\\python-setup\\auto_install_packages.py C:\\hostedtoolcache\\windows\\CodeQL\\$CodeQLVersion\\x64\\codeql
- name: Setup for extractor
run: |
echo $Env:CODEQL_PYTHON
py -3 $Env:GITHUB_WORKSPACE\\python-setup\\tests\\from_python_exe.py $Env:CODEQL_PYTHON
- name: Verify packages installed
run: |
$cmd = $Env:GITHUB_WORKSPACE + "\\python-setup\\tests\\check_requests_2_26_0.ps1"
powershell -File $cmd $Env:PYTHON_VERSION

View File

@@ -1,56 +0,0 @@
name: Query filters tests
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
query-filters:
name: Query Filters Tests
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: latest
- name: Check SARIF for default queries with Single include, Single exclude
uses: ./../action/.github/query-filter-test
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: js/zipslip
queries-not-run: js/path-injection
config-file: ./.github/codeql/codeql-config-query-filters1.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Check SARIF for query packs with Single include, Single exclude
uses: ./../action/.github/query-filter-test
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: js/zipslip,javascript/example/empty-or-one-block
queries-not-run: js/path-injection
config-file: ./.github/codeql/codeql-config-query-filters2.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Check SARIF for query packs and local queries with Single include, Single exclude
uses: ./../action/.github/query-filter-test
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: js/zipslip,javascript/example/empty-or-one-block,inrepo-javascript-querypack/show-ifs
queries-not-run: js/path-injection,complex-python-querypack/show-ifs,complex-python-querypack/foo/bar/show-ifs
config-file: ./.github/codeql/codeql-config-query-filters3.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}

View File

@@ -14,7 +14,7 @@ npm run-script build
# Check that repo is still clean
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
>&2 echo "Failed: JavaScript files are not up to date. Run 'rm -rf lib && npm run-script build' to update"
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
git status
exit 1
fi

View File

@@ -7,10 +7,7 @@ if [ ! -z "$(git status --porcelain)" ]; then
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Pin npm to v8 since v9 doesn't support Node 12.
# When updating this, make sure to update the npm version in
# `.github/workflows/update-dependencies.yml` too.
sudo npm install --force -g npm@^8.19.3
sudo npm install --force -g npm@latest
# Reinstall modules and then clean to remove absolute paths
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
npm ci
@@ -18,7 +15,7 @@ npm run removeNPMAbsolutePaths
# Check that repo is still clean
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
>&2 echo "Failed: node_modules are not up to date. Add the 'Update dependencies' label to your PR to update them. Note it is important that node modules are updated on macOS and not any other operating system as there is one dependency (fsevents) that is needed for macOS and may not be installed if dependencies are updated on a Windows or Linux machine."
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci && npm run removeNPMAbsolutePaths' on a macOS machine to update. Note it is important this command is run on macOS and not any other operating system as there is one dependency (fsevents) that is needed for macOS and may not be installed if the command is run on a Windows or Linux machine."
git status
exit 1
fi

View File

@@ -1,37 +0,0 @@
#!/usr/bin/env bash
# Update the required checks based on the current branch.
# Typically, this will be main.
if ! gh auth status 2>/dev/null; then
gh auth status
echo "Failed: Not authorized. This script requires admin access to github/codeql-action through the gh CLI."
exit 1
fi
if [ "$#" -eq 1 ]; then
# If we were passed an argument, use that as the SHA
GITHUB_SHA="$0"
elif [ "$#" -gt 1 ]; then
echo "Usage: $0 [SHA]"
echo "Update the required checks based on the SHA, or main."
exit 1
elif [ -z "$GITHUB_SHA" ]; then
# If we don't have a SHA, use main
GITHUB_SHA="$(git rev-parse main)"
fi
echo "Getting checks for $GITHUB_SHA"
# Ignore any checks with "https://", CodeQL, LGTM, and Update checks.
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or contains("Update") or contains("update") or contains("test-setup-python-scripts") | not)] | unique | sort')"
echo "$CHECKS" | jq
echo "{\"contexts\": ${CHECKS}}" > checks.json
for BRANCH in main releases/v2 releases/v1; do
echo "Updating $BRANCH"
gh api --silent -X "PATCH" "repos/github/codeql-action/branches/$BRANCH/protection/required_status_checks" --input checks.json
done
rm checks.json

View File

@@ -1,43 +0,0 @@
name: Update dependencies
on:
pull_request_target:
types: [opened, synchronize, reopened, ready_for_review, labeled]
jobs:
update:
name: Update dependencies
timeout-minutes: 45
runs-on: macos-latest
if: contains(github.event.pull_request.labels.*.name, 'Update dependencies') && (github.event.pull_request.head.repo.full_name == 'github/codeql-action')
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Remove PR label
env:
REPOSITORY: '${{ github.repository }}'
PR_NUMBER: '${{ github.event.pull_request.number }}'
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
run: |
gh api "repos/$REPOSITORY/issues/$PR_NUMBER/labels/Update%20dependencies" -X DELETE
- name: Push updated dependencies
env:
BRANCH: '${{ github.head_ref }}'
run: |
git fetch origin "$BRANCH" --depth=1
git checkout "origin/$BRANCH"
# Pin npm to v8 since v9 doesn't support Node 12.
# When updating this, make sure to update the npm version in
# `.github/workflows/script/check-node-modules.sh` too.
sudo npm install --force -g npm@^8.19.3
npm install
npm ci
npm run removeNPMAbsolutePaths
if [ ! -z "$(git status --porcelain)" ]; then
git config --global user.email "github-actions@github.com"
git config --global user.name "github-actions[bot]"
git add node_modules
git commit -am "Update checked-in dependencies"
git push origin "HEAD:$BRANCH"
fi

View File

@@ -1,62 +0,0 @@
name: Update release branch
on:
# You can trigger this workflow via workflow dispatch to start a release.
# This will open a PR to update the v2 release branch.
workflow_dispatch:
# When the v2 release is complete, this workflow will open a PR to update the v1 release branch.
push:
branches:
- releases/v2
jobs:
update:
timeout-minutes: 45
runs-on: ubuntu-latest
if: github.repository == 'github/codeql-action'
steps:
- name: Dump environment
run: env
- name: Dump GitHub context
env:
GITHUB_CONTEXT: '${{ toJson(github) }}'
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v3
with:
# Need full history so we calculate diffs
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v3
with:
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install PyGithub==1.55 requests
- name: Update git config
run: |
git config --global user.email "github-actions@github.com"
git config --global user.name "github-actions[bot]"
- name: Update v2 release branch
if: github.event_name == 'workflow_dispatch'
run: |
python .github/update-release-branch.py \
--github-token ${{ secrets.GITHUB_TOKEN }} \
--repository-nwo ${{ github.repository }} \
--mode v2-release \
--conductor ${GITHUB_ACTOR}
- name: Update v1 release branch
if: github.event_name == 'push'
run: |
python .github/update-release-branch.py \
--github-token ${{ secrets.GITHUB_TOKEN }} \
--repository-nwo ${{ github.repository }} \
--mode v1-release \
--conductor ${GITHUB_ACTOR}

View File

@@ -1,47 +0,0 @@
name: Update Supported Enterprise Server Versions
on:
schedule:
- cron: "0 0 * * *"
jobs:
update-supported-enterprise-server-versions:
name: Update Supported Enterprise Server Versions
timeout-minutes: 45
runs-on: ubuntu-latest
if: ${{ github.repository == 'github/codeql-action' }}
steps:
- name: Setup Python
uses: actions/setup-python@v3
with:
python-version: "3.7"
- name: Checkout CodeQL Action
uses: actions/checkout@v3
- name: Checkout Enterprise Releases
uses: actions/checkout@v3
with:
repository: github/enterprise-releases
ssh-key: ${{ secrets.ENTERPRISE_RELEASES_SSH_KEY }}
path: ${{ github.workspace }}/enterprise-releases/
- name: Update Supported Enterprise Server Versions
run: |
cd ./.github/workflows/update-supported-enterprise-server-versions/
python3 -m pip install pipenv
pipenv install
pipenv run ./update.py
rm --recursive "$ENTERPRISE_RELEASES_PATH"
npm run build
env:
ENTERPRISE_RELEASES_PATH: ${{ github.workspace }}/enterprise-releases/
- name: Commit Changes
uses: peter-evans/create-pull-request@c7f493a8000b8aeb17a1332e326ba76b57cb83eb # v3.4.1
with:
commit-message: Update supported GitHub Enterprise Server versions.
title: Update supported GitHub Enterprise Server versions.
body: ""
author: GitHub <noreply@github.com>
branch: update-supported-enterprise-server-versions
draft: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

4
.gitignore vendored
View File

@@ -1,2 +1,2 @@
# Ignore for example failing-tests.json from AVA
node_modules/.cache
/runner/dist/
/runner/node_modules/

1
.npmrc
View File

@@ -1 +0,0 @@
lockfile-version=3

View File

@@ -1,214 +1,8 @@
# CodeQL Action Changelog
# CodeQL Action and CodeQL Runner Changelog
## [UNRELEASED]
No user facing changes.
## 2.1.35 - 01 Dec 2022
No user facing changes.
## 2.1.34 - 25 Nov 2022
- Update default CodeQL bundle version to 2.11.4. [#1391](https://github.com/github/codeql-action/pull/1391)
- Fixed a bug where some the `init` action and the `analyze` action would have different sets of experimental feature flags enabled. [#1384](https://github.com/github/codeql-action/pull/1384)
## 2.1.33 - 16 Nov 2022
- Go is now analyzed in the same way as other compiled languages such as C/C++, C#, and Java. This completes the rollout of the feature described in [CodeQL Action version 2.1.27](#2127---06-oct-2022). [#1322](https://github.com/github/codeql-action/pull/1322)
- Bump the minimum CodeQL bundle version to 2.6.3. [#1358](https://github.com/github/codeql-action/pull/1358)
## 2.1.32 - 14 Nov 2022
- Update default CodeQL bundle version to 2.11.3. [#1348](https://github.com/github/codeql-action/pull/1348)
- Update the ML-powered additional query pack for JavaScript to version 0.4.0. [#1351](https://github.com/github/codeql-action/pull/1351)
## 2.1.31 - 04 Nov 2022
- The `rb/weak-cryptographic-algorithm` Ruby query has been updated to no longer report uses of hash functions such as `MD5` and `SHA1` even if they are known to be weak. These hash algorithms are used very often in non-sensitive contexts, making the query too imprecise in practice. For more information, see the corresponding change in the [github/codeql repository](https://github.com/github/codeql/pull/11129). [#1344](https://github.com/github/codeql-action/pull/1344)
## 2.1.30 - 02 Nov 2022
- Improve the error message when using CodeQL bundle version 2.7.2 and earlier in a workflow that runs on a runner image such as `ubuntu-22.04` that uses glibc version 2.34 and later. [#1334](https://github.com/github/codeql-action/pull/1334)
## 2.1.29 - 26 Oct 2022
- Update default CodeQL bundle version to 2.11.2. [#1320](https://github.com/github/codeql-action/pull/1320)
## 2.1.28 - 18 Oct 2022
- Update default CodeQL bundle version to 2.11.1. [#1294](https://github.com/github/codeql-action/pull/1294)
- Replace uses of GitHub Actions command `set-output` because it is now deprecated. See more information in the [GitHub Changelog](https://github.blog/changelog/2022-10-11-github-actions-deprecating-save-state-and-set-output-commands/). [#1301](https://github.com/github/codeql-action/pull/1301)
## 2.1.27 - 06 Oct 2022
- We are rolling out a feature of the CodeQL Action in October 2022 that changes the way that Go code is analyzed to be more consistent with other compiled languages like C/C++, C#, and Java. You do not need to alter your code scanning workflows. If you encounter any problems, please [file an issue](https://github.com/github/codeql-action/issues) or open a private ticket with GitHub Support and request an escalation to engineering.
## 2.1.26 - 29 Sep 2022
- Update default CodeQL bundle version to 2.11.0. [#1267](https://github.com/github/codeql-action/pull/1267)
## 2.1.25 - 21 Sep 2022
- We will soon be rolling out a feature of the CodeQL Action that stores some information used to make future runs faster in the GitHub Actions cache. Initially, this will only be enabled on JavaScript repositories, but we plan to add more languages to this soon. The new feature can be disabled by passing the `trap-caching: false` option to your workflow's `init` step, for example if you are already using the GitHub Actions cache for a different purpose and are near the storage limit for it.
- Add support for Python automatic dependency installation with Poetry 1.2 [#1258](https://github.com/github/codeql-action/pull/1258).
## 2.1.24 - 16 Sep 2022
No user facing changes.
## 2.1.23 - 14 Sep 2022
- Allow CodeQL packs to be downloaded from GitHub Enterprise Server instances, using the new `registries` input for the `init` action. [#1221](https://github.com/github/codeql-action/pull/1221)
- Update default CodeQL bundle version to 2.10.5. [#1240](https://github.com/github/codeql-action/pull/1240)
## 2.1.22 - 01 Sep 2022
- Downloading CodeQL packs has been moved to the `init` step. Previously, CodeQL packs were downloaded during the `analyze` step. [#1218](https://github.com/github/codeql-action/pull/1218)
- Update default CodeQL bundle version to 2.10.4. [#1224](https://github.com/github/codeql-action/pull/1224)
- The newly released [Poetry 1.2](https://python-poetry.org/blog/announcing-poetry-1.2.0) is not yet supported. In the most common case where the CodeQL Action is automatically installing Python dependencies, it will continue to install and use Poetry 1.1 on its own. However, in certain cases such as with self-hosted runners, you may need to ensure Poetry 1.1 is installed yourself.
## 2.1.21 - 25 Aug 2022
- Improve error messages when the code scanning configuration file includes an invalid `queries` block or an invalid `query-filters` block. [#1208](https://github.com/github/codeql-action/pull/1208)
- Fix a bug where Go build tracing could fail on Windows. [#1209](https://github.com/github/codeql-action/pull/1209)
## 2.1.20 - 22 Aug 2022
No user facing changes.
## 2.1.19 - 17 Aug 2022
- Add the ability to filter queries from a code scanning run by using the `query-filters` option in the code scanning configuration file. [#1098](https://github.com/github/codeql-action/pull/1098)
- In debug mode, debug artifacts are now uploaded even if a step in the Actions workflow fails. [#1159](https://github.com/github/codeql-action/pull/1159)
- Update default CodeQL bundle version to 2.10.3. [#1178](https://github.com/github/codeql-action/pull/1178)
- The combination of python2 and Pipenv is no longer supported. [#1181](https://github.com/github/codeql-action/pull/1181)
## 2.1.18 - 03 Aug 2022
- Update default CodeQL bundle version to 2.10.2. [#1156](https://github.com/github/codeql-action/pull/1156)
## 2.1.17 - 28 Jul 2022
- Update default CodeQL bundle version to 2.10.1. [#1143](https://github.com/github/codeql-action/pull/1143)
## 2.1.16 - 13 Jul 2022
- You can now quickly debug a job that uses the CodeQL Action by re-running the job from the GitHub UI and selecting the "Enable debug logging" option. [#1132](https://github.com/github/codeql-action/pull/1132)
- You can now see diagnostic messages produced by the analysis in the logs of the `analyze` Action by enabling debug mode. To enable debug mode, pass `debug: true` to the `init` Action, or [enable step debug logging](https://docs.github.com/en/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging). This feature is available for CodeQL CLI version 2.10.0 and later. [#1133](https://github.com/github/codeql-action/pull/1133)
## 2.1.15 - 28 Jun 2022
- CodeQL query packs listed in the `packs` configuration field will be skipped if their target language is not being analyzed in the current Actions job. Previously, this would throw an error. [#1116](https://github.com/github/codeql-action/pull/1116)
- The combination of python2 and poetry is no longer supported. See <https://github.com/actions/setup-python/issues/374> for more details. [#1124](https://github.com/github/codeql-action/pull/1124)
- Update default CodeQL bundle version to 2.10.0. [#1123](https://github.com/github/codeql-action/pull/1123)
## 2.1.14 - 22 Jun 2022
No user facing changes.
## 2.1.13 - 21 Jun 2022
- Update default CodeQL bundle version to 2.9.4. [#1100](https://github.com/github/codeql-action/pull/1100)
## 2.1.12 - 01 Jun 2022
- Update default CodeQL bundle version to 2.9.3. [#1084](https://github.com/github/codeql-action/pull/1084)
## 2.1.11 - 17 May 2022
- Update default CodeQL bundle version to 2.9.2. [#1074](https://github.com/github/codeql-action/pull/1074)
## 2.1.10 - 10 May 2022
- Update default CodeQL bundle version to 2.9.1. [#1056](https://github.com/github/codeql-action/pull/1056)
- When `wait-for-processing` is enabled, the workflow will now fail if there were any errors that occurred during processing of the analysis results.
## 2.1.9 - 27 Apr 2022
- Add `working-directory` input to the `autobuild` action. [#1024](https://github.com/github/codeql-action/pull/1024)
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`. [#1007](https://github.com/github/codeql-action/pull/1007)
- Update default CodeQL bundle version to 2.9.0.
- Fix a bug where [status reporting fails on Windows](https://github.com/github/codeql-action/issues/1041). [#1042](https://github.com/github/codeql-action/pull/1042)
## 2.1.8 - 08 Apr 2022
- Update default CodeQL bundle version to 2.8.5. [#1014](https://github.com/github/codeql-action/pull/1014)
- Fix error where the init action would fail due to a GitHub API request that was taking too long to complete [#1025](https://github.com/github/codeql-action/pull/1025)
## 2.1.7 - 05 Apr 2022
- A bug where additional queries specified in the workflow file would sometimes not be respected has been fixed. [#1018](https://github.com/github/codeql-action/pull/1018)
## 2.1.6 - 30 Mar 2022
- [v2+ only] The CodeQL Action now runs on Node.js v16. [#1000](https://github.com/github/codeql-action/pull/1000)
- Update default CodeQL bundle version to 2.8.4. [#990](https://github.com/github/codeql-action/pull/990)
- Fix a bug where an invalid `commit_oid` was being sent to code scanning when a custom checkout path was being used. [#956](https://github.com/github/codeql-action/pull/956)
## 1.1.5 - 15 Mar 2022
- Update default CodeQL bundle version to 2.8.3.
- The CodeQL runner is now deprecated and no longer being released. For more information, see [CodeQL runner deprecation](https://github.blog/changelog/2021-09-21-codeql-runner-deprecation/).
- Fix two bugs that cause action failures with GHES 3.3 or earlier. [#978](https://github.com/github/codeql-action/pull/978)
- Fix `not a permitted key` invalid requests with GHES 3.1 or earlier
- Fix `RUNNER_ARCH environment variable must be set` errors with GHES 3.3 or earlier
## 1.1.4 - 07 Mar 2022
- Update default CodeQL bundle version to 2.8.2. [#950](https://github.com/github/codeql-action/pull/950)
- Fix a bug where old results can be uploaded if the languages in a repository change when using a non-ephemeral self-hosted runner. [#955](https://github.com/github/codeql-action/pull/955)
## 1.1.3 - 23 Feb 2022
- Fix a bug where the CLR traces can continue tracing even after tracing should be stopped. [#938](https://github.com/github/codeql-action/pull/938)
## 1.1.2 - 17 Feb 2022
- Due to potential issues for GHES 3.13.3 customers who are using recent versions of the CodeQL Action via GHES Connect, the CodeQL Action now uses Node.js v12 rather than Node.js v16. [#937](https://github.com/github/codeql-action/pull/937)
## 1.1.1 - 17 Feb 2022
- The CodeQL CLI versions up to and including version 2.4.4 are not compatible with the CodeQL Action 1.1.1 and later. The Action will emit an error if it detects that it is being used by an incompatible version of the CLI. [#931](https://github.com/github/codeql-action/pull/931)
- Update default CodeQL bundle version to 2.8.1. [#925](https://github.com/github/codeql-action/pull/925)
## 1.1.0 - 11 Feb 2022
- The CodeQL Action now uses Node.js v16. [#909](https://github.com/github/codeql-action/pull/909)
- Beware that the CodeQL build tracer in this release (and in all earlier releases) is incompatible with Windows 11 and Windows Server 2022. This incompatibility affects database extraction for compiled languages: cpp, csharp, go, and java. As a result, analyzing these languages with the `windows-latest` or `windows-2022` Actions virtual environments is currently unsupported. If you use any of these languages, please use the `windows-2019` Actions virtual environment or otherwise avoid these specific Windows versions until a new release fixes this incompatibility.
## 1.0.32 - 07 Feb 2022
- Add `sarif-id` as an output for the `upload-sarif` and `analyze` actions. [#889](https://github.com/github/codeql-action/pull/889)
- Add `ref` and `sha` inputs to the `analyze` action, which override the defaults provided by the GitHub Action context. [#889](https://github.com/github/codeql-action/pull/889)
- Update default CodeQL bundle version to 2.8.0. [#911](https://github.com/github/codeql-action/pull/911)
## 1.0.31 - 31 Jan 2022
- Remove `experimental` message when using custom CodeQL packages. [#888](https://github.com/github/codeql-action/pull/888)
- Add a better warning message stating that experimental features will be disabled if the workflow has been triggered by a pull request from a fork or the `security-events: write` permission is not present. [#882](https://github.com/github/codeql-action/pull/882)
## 1.0.30 - 24 Jan 2022
- Display a better error message when encountering a workflow that runs the `codeql-action/init` action multiple times. [#876](https://github.com/github/codeql-action/pull/876)
- Update default CodeQL bundle version to 2.7.6. [#877](https://github.com/github/codeql-action/pull/877)
## 1.0.29 - 21 Jan 2022
- The feature to wait for SARIF processing to complete after upload has been disabled by default due to a bug in its interaction with pull requests from forks.
## 1.0.28 - 18 Jan 2022
- Update default CodeQL bundle version to 2.7.5. [#866](https://github.com/github/codeql-action/pull/866)
- Fix a bug where SARIF files were failing upload due to an invalid test for unique categories. [#872](https://github.com/github/codeql-action/pull/872)
## 1.0.27 - 11 Jan 2022
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`. [#855](https://github.com/github/codeql-action/pull/855)
## 1.0.26 - 10 Dec 2021
- Update default CodeQL bundle version to 2.7.3. [#842](https://github.com/github/codeql-action/pull/842)
- Update default CodeQL bundle version to 2.7.3. [#837](https://github.com/github/codeql-action/pull/837)
## 1.0.25 - 06 Dec 2021

View File

@@ -1,3 +1 @@
**/* @github/codeql-action-reviewers
/python-setup/ @github/codeql-python @github/codeql-action-reviewers

View File

@@ -38,6 +38,10 @@ To see the effect of your changes and to test them, push your changes in a branc
As well as the unit tests (see _Common tasks_ above), there are integration tests, defined in `.github/workflows/integration-testing.yml`. These are run by a CI check. Depending on the change youre making, you may want to add a test to this file or extend an existing one.
### Building the CodeQL runner
Navigate to the `runner` directory and run `npm install` to install dependencies needed only for compiling the CodeQL runner. Run `npm run build-runner` to output files to the `runner/dist` directory.
## Submitting a pull request
1. [Fork][fork] and clone the repository
@@ -57,30 +61,16 @@ Here are a few things you can do that will increase the likelihood of your pull
## Releasing (write access required)
1. The first step of releasing a new version of the `codeql-action` is running the "Update release branch" workflow.
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `releases/v2` release branch.
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `v1` release branch.
You can start a release by triggering this workflow via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
1. The workflow run will open a pull request titled "Merge main into releases/v2". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
A release is automatically started every Monday via a scheduled run of this workflow, however you can start a release manually by triggering a run via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
1. The workflow run will open a pull request titled "Merge main into v1". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
1. Review the checklist items in the pull request description.
Once you've checked off all but the last two of these, approve the PR and automerge it.
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Tag release and merge back" workflow will create a mergeback PR.
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into releases/v2" pull request, and bumps the patch version of the CodeQL Action.
Once you've checked off all but the last of these, approve the PR and automerge it.
1. When the "Merge main into v1" pull request is merged into the `v1` branch, the "Tag release and merge back" workflow will create a mergeback PR.
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into v1" pull request, and bumps the patch version of the CodeQL Action.
Approve the mergeback PR and automerge it.
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Update release branch" workflow will create a "Merge releases/v2 into releases/v1" pull request to merge the changes since the last release into the `releases/v1` release branch.
This ensures we keep both the `releases/v1` and `releases/v2` release branches up to date and fully supported.
Review the checklist items in the pull request description.
Once you've checked off all the items, approve the PR and automerge it.
1. Once the mergeback has been merged to `main` and the "Merge releases/v2 into releases/v1" PR has been merged to `releases/v1`, the release is complete.
## Keeping the PR checks up to date (admin access required)
Since the `codeql-action` runs most of its testing through individual Actions workflows, there are over two hundred jobs that need to pass in order for a PR to turn green. You can regenerate the checks automatically by running the [update-required-checks.sh](.github/workflows/script/update-required-checks.sh) script:
1. By default, this script retrieves the checks from the latest SHA on `main`, so make sure that your `main` branch is up to date.
2. Run the script. If there's a reason to, you can pass in a different SHA as a CLI argument.
3. After running, go to the [branch protection rules settings page](https://github.com/github/codeql-action/settings/branches) and validate that the rules for `main`, `v1`, and `v2` have been updated.
Approve the mergeback PR and automerge it. Once the mergeback has been merged into main, the release is complete.
## Resources

View File

@@ -52,22 +52,22 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java, ruby
# languages: go, javascript, csharp, python, cpp, java
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below).
- name: Autobuild
uses: github/codeql-action/autobuild@v2
uses: github/codeql-action/autobuild@v1
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following
# three lines and modify them (or add more) to build your code if your
@@ -78,14 +78,14 @@ jobs:
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
uses: github/codeql-action/analyze@v1
```
If you prefer to integrate this within an existing CI workflow, it should end up looking something like this:
```yaml
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
uses: github/codeql-action/init@v1
with:
languages: go, javascript
@@ -95,7 +95,7 @@ If you prefer to integrate this within an existing CI workflow, it should end up
make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
uses: github/codeql-action/analyze@v1
```
### Configuration file
@@ -103,7 +103,7 @@ If you prefer to integrate this within an existing CI workflow, it should end up
Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`.
```yaml
- uses: github/codeql-action/init@v2
- uses: github/codeql-action/init@v1
with:
config-file: ./.github/codeql/codeql-config.yml
```
@@ -111,7 +111,7 @@ Use the `config-file` parameter of the `init` action to enable the configuration
The configuration file can be located in a different repository. This is useful if you want to share the same configuration across multiple repositories. If the configuration file is in a private repository you can also specify an `external-repository-token` option. This should be a personal access token that has read access to any repositories containing referenced config files and queries.
```yaml
- uses: github/codeql-action/init@v2
- uses: github/codeql-action/init@v1
with:
config-file: owner/repo/codeql-config.yml@branch
external-repository-token: ${{ secrets.EXTERNAL_REPOSITORY_TOKEN }}
@@ -122,7 +122,7 @@ For information on how to write a configuration file, see "[Using a custom confi
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:
```yaml
- uses: github/codeql-action/init@v2
- uses: github/codeql-action/init@v1
with:
queries: <local-or-remote-query>,<another-query>
```
@@ -130,7 +130,7 @@ If you only want to customise the queries used, you can specify them in your wor
By default, this will override any queries specified in a config file. If you wish to use both sets of queries, prefix the list of queries in the workflow with `+`:
```yaml
- uses: github/codeql-action/init@v2
- uses: github/codeql-action/init@v1
with:
queries: +<local-or-remote-query>,<another-query>
```
@@ -139,3 +139,10 @@ By default, this will override any queries specified in a config file. If you wi
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
### Note on "missing analysis" message
The very first time code scanning is run and if it is on a pull request, you will probably get a message mentioning a "missing analysis". This is expected.
After code scanning has analyzed the code in a pull request, it needs to compare the analysis of the topic branch (the merge commit of the branch you used to create the pull request) with the analysis of the base branch (the branch into which you want to merge the pull request). This allows code scanning to compute which alerts are newly introduced by the pull request, which alerts were already present in the base branch, and whether any existing alerts are fixed by the changes in the pull request. Initially, if you use a pull request to add code scanning to a repository, the base branch has not yet been analyzed, so it's not possible to compute these details. In this case, when you click through from the results check on the pull request you will see the "Missing analysis for base commit SHA-HASH" message.
For more information and other causes of this message, see [Reasons for the "missing analysis" message](https://docs.github.com/en/code-security/secure-coding/automatically-scanning-your-code-for-vulnerabilities-and-errors/setting-up-code-scanning-for-a-repository#reasons-for-the-missing-analysis-message)

View File

@@ -45,12 +45,6 @@ inputs:
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
required: false
default: ${{ github.workspace }}
ref:
description: "The ref where results will be uploaded. If not provided, the Action will use the GITHUB_REF environment variable. If provided, the sha input must be provided as well. This input is not available in pull requests from forks."
required: false
sha:
description: "The sha of the HEAD of the ref where results will be uploaded. If not provided, the Action will use the GITHUB_SHA environment variable. If provided, the ref input must be provided as well. This input is not available in pull requests from forks."
required: false
category:
description: String used by Code Scanning for matching the analyses
required: false
@@ -61,21 +55,14 @@ inputs:
wait-for-processing:
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
required: true
default: "true"
default: "false"
token:
default: ${{ github.token }}
matrix:
default: ${{ toJson(matrix) }}
expect-error:
description: "[Internal] It is an error to use this input outside of integration testing of the codeql-action."
required: false
default: "false"
outputs:
db-locations:
description: A map from language to absolute path for each database created by CodeQL.
sarif-id:
description: The ID of the uploaded SARIF file.
runs:
using: "node16"
using: "node12"
main: "../lib/analyze-action.js"
post: "../lib/analyze-action-post.js"

View File

@@ -6,12 +6,6 @@ inputs:
default: ${{ github.token }}
matrix:
default: ${{ toJson(matrix) }}
working-directory:
description: >-
Run the autobuilder using this path (relative to $GITHUB_WORKSPACE) as
working directory. If this input is not set, the autobuilder runs with
$GITHUB_WORKSPACE as its working directory.
required: false
runs:
using: 'node16'
using: 'node12'
main: '../lib/autobuild-action.js'

View File

@@ -10,34 +10,9 @@ inputs:
description: The languages to be analysed
required: false
token:
description: GitHub token to use for authenticating with this instance of GitHub. To download custom packs from multiple registries, use the registries input.
default: ${{ github.token }}
required: false
registries:
description: |
Use this input only when you need to download CodeQL packages from another instance of GitHub. If you only need to download packages from this GitHub instance, use the token input instead.
A YAML string that defines the list of GitHub container registries to use for downloading packs. The string is in the following form (the | is required on the first line):
registries: |
- url: https://containers.GHEHOSTNAME1/v2/
packages:
- my-company/*
- my-company2/*
token: \$\{{ secrets.GHEHOSTNAME1_TOKEN }}
- url: https://ghcr.io/v2/
packages: */*
token: \$\{{ secrets.GHCR_TOKEN }}
The `url` property contains the URL to the container registry you want to connect to.
The `packages` property contains a single glob string or a list of glob strings, specifying which packages should be retrieved from this particular container registry. Order is important. Earlier entries will match before later entries.
The `token` property contains a connection token for this registry. required: false
matrix:
default: ${{ toJson(matrix) }}
required: false
config-file:
description: Path of the config file to use
required: false
@@ -57,7 +32,7 @@ inputs:
analyses, you must specify packs in the codeql-config.yml file.
required: false
external-repository-token:
description: A token for fetching external config files and queries if they reside in a private repository in the same GitHub instance that is running this action.
description: A token for fetching external config files and queries if they reside in a private repository.
required: false
setup-python-dependencies:
description: Try to auto-install your python dependencies
@@ -81,30 +56,12 @@ inputs:
This input also sets the number of threads that can later be used by the "analyze" action.
required: false
debug:
description: >-
Enable debugging mode.
This will result in more output being produced which may be useful when debugging certain issues.
Debugging mode is enabled automatically when step debug logging is turned on.
description: Enable debugging mode. This will result in more output being produced which may be useful when debugging certain issues.
required: false
default: 'false'
debug-artifact-name:
description: >-
The name of the artifact to store debugging information in.
This is only used when debug mode is enabled.
required: false
debug-database-name:
description: >-
The name of the database uploaded to the debugging artifact.
This is only used when debug mode is enabled.
required: false
trap-caching:
description: >-
Explicitly enable or disable TRAP caching rather than respecting the feature flag for it.
required: false
outputs:
codeql-path:
description: The path of the CodeQL binary used for analysis
runs:
using: 'node16'
using: 'node12'
main: '../lib/init-action.js'
post: '../lib/init-action-post.js'

454
lib/actions-util.js generated
View File

@@ -19,24 +19,26 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.workflowEventName = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path"));
const core = __importStar(require("@actions/core"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
const yaml = __importStar(require("js-yaml"));
const api = __importStar(require("./api-client"));
const sharedEnv = __importStar(require("./shared-environment"));
const util_1 = require("./util");
const workflow_1 = require("./workflow");
// eslint-disable-next-line import/no-commonjs
const pkg = require("../package.json");
/**
* The utils in this module are meant to be run inside of the action only.
* Code paths from the runner should not enter this module.
*/
/**
* Wrapper around core.getInput for inputs that always have a value.
* Also see getOptionalInput.
*
* This allows us to get stronger type checking of required/optional inputs.
* This allows us to get stronger type checking of required/optional inputs
* and make behaviour more consistent between actions and the runner.
*/
function getRequiredInput(name) {
return core.getInput(name, { required: true });
@@ -46,12 +48,13 @@ exports.getRequiredInput = getRequiredInput;
* Wrapper around core.getInput that converts empty inputs to undefined.
* Also see getRequiredInput.
*
* This allows us to get stronger type checking of required/optional inputs.
* This allows us to get stronger type checking of required/optional inputs
* and make behaviour more consistent between actions and the runner.
*/
const getOptionalInput = function (name) {
function getOptionalInput(name) {
const value = core.getInput(name);
return value.length > 0 ? value : undefined;
};
}
exports.getOptionalInput = getOptionalInput;
function getTemporaryDirectory() {
const value = process.env["CODEQL_ACTION_TEMP"];
@@ -60,10 +63,17 @@ function getTemporaryDirectory() {
: (0, util_1.getRequiredEnvParam)("RUNNER_TEMP");
}
exports.getTemporaryDirectory = getTemporaryDirectory;
function getToolCacheDirectory() {
const value = process.env["CODEQL_ACTION_TOOL_CACHE"];
return value !== undefined && value !== ""
? value
: (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE");
}
exports.getToolCacheDirectory = getToolCacheDirectory;
/**
* Gets the SHA of the commit that is currently checked out.
*/
const getCommitOid = async function (checkoutPath, ref = "HEAD") {
const getCommitOid = async function (ref = "HEAD") {
// Try to use git to get the current commit SHA. If that fails then
// log but otherwise silently fall back to using the SHA from the environment.
// The only time these two values will differ is during analysis of a PR when
@@ -83,68 +93,235 @@ const getCommitOid = async function (checkoutPath, ref = "HEAD") {
process.stderr.write(data);
},
},
cwd: checkoutPath,
}).exec();
return commitOid.trim();
}
catch (e) {
core.info(`Failed to call git to get current commit. Continuing with data from environment or input: ${e}`);
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
core.info(e.stack || "NO STACK");
return (0, exports.getOptionalInput)("sha") || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
return (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
}
};
exports.getCommitOid = getCommitOid;
/**
* If the action was triggered by a pull request, determine the commit sha of the merge base.
* Returns undefined if run by other triggers or the merge base cannot be determined.
*/
const determineMergeBaseCommitOid = async function () {
if (workflowEventName() !== "pull_request") {
return undefined;
function isObject(o) {
return o !== null && typeof o === "object";
}
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
const checkoutPath = (0, exports.getOptionalInput)("checkout_path");
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
function escapeRegExp(string) {
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
}
function patternToRegExp(value) {
return new RegExp(`^${value
.toString()
.split(GLOB_PATTERN)
.reduce(function (arr, cur) {
if (cur === "**") {
arr.push(".*?");
}
else if (cur === "*") {
arr.push("[^/]*?");
}
else if (cur) {
arr.push(escapeRegExp(cur));
}
return arr;
}, [])
.join("")}$`);
}
// this function should return true if patternA is a superset of patternB
// e.g: * is a superset of main-* but main-* is not a superset of *.
function patternIsSuperset(patternA, patternB) {
return patternToRegExp(patternA).test(patternB);
}
exports.patternIsSuperset = patternIsSuperset;
function branchesToArray(branches) {
if (typeof branches === "string") {
return [branches];
}
if (Array.isArray(branches)) {
if (branches.length === 0) {
return "**";
}
return branches;
}
return "**";
}
function toCodedErrors(errors) {
return Object.entries(errors).reduce((acc, [key, value]) => {
acc[key] = { message: value, code: key };
return acc;
}, {});
}
// code to send back via status report
// message to add as a warning annotation to the run
exports.WorkflowErrors = toCodedErrors({
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
});
function getWorkflowErrors(doc) {
var _a, _b, _c, _d, _e;
const errors = [];
const jobName = process.env.GITHUB_JOB;
if (jobName) {
const job = (_a = doc === null || doc === void 0 ? void 0 : doc.jobs) === null || _a === void 0 ? void 0 : _a[jobName];
const steps = job === null || job === void 0 ? void 0 : job.steps;
if (Array.isArray(steps)) {
for (const step of steps) {
// this was advice that we used to give in the README
// we actually want to run the analysis on the merge commit
// to produce results that are more inline with expectations
// (i.e: this is what will happen if you merge this PR)
// and avoid some race conditions
if ((step === null || step === void 0 ? void 0 : step.run) === "git checkout HEAD^2") {
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
break;
}
}
}
}
let missingPush = false;
if (doc.on === undefined) {
// this is not a valid config
}
else if (typeof doc.on === "string") {
if (doc.on === "pull_request") {
missingPush = true;
}
}
else if (Array.isArray(doc.on)) {
const hasPush = doc.on.includes("push");
const hasPullRequest = doc.on.includes("pull_request");
if (hasPullRequest && !hasPush) {
missingPush = true;
}
}
else if (isObject(doc.on)) {
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
const hasPullRequest = Object.prototype.hasOwnProperty.call(doc.on, "pull_request");
if (!hasPush && hasPullRequest) {
missingPush = true;
}
if (hasPush && hasPullRequest) {
const paths = (_b = doc.on.push) === null || _b === void 0 ? void 0 : _b.paths;
// if you specify paths or paths-ignore you can end up with commits that have no baseline
// if they didn't change any files
// currently we cannot go back through the history and find the most recent baseline
if (Array.isArray(paths) && paths.length > 0) {
errors.push(exports.WorkflowErrors.PathsSpecified);
}
const pathsIgnore = (_c = doc.on.push) === null || _c === void 0 ? void 0 : _c["paths-ignore"];
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
}
}
// if doc.on.pull_request is null that means 'all branches'
// if doc.on.pull_request is undefined that means 'off'
// we only want to check for mismatched branches if pull_request is on.
if (doc.on.pull_request !== undefined) {
const push = branchesToArray((_d = doc.on.push) === null || _d === void 0 ? void 0 : _d.branches);
if (push !== "**") {
const pull_request = branchesToArray((_e = doc.on.pull_request) === null || _e === void 0 ? void 0 : _e.branches);
if (pull_request !== "**") {
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
if (difference.length > 0) {
// there are branches in pull_request that may not have a baseline
// because we are not building them on push
errors.push(exports.WorkflowErrors.MismatchedBranches);
}
}
else if (push.length > 0) {
// push is set up to run on a subset of branches
// and you could open a PR against a branch with no baseline
errors.push(exports.WorkflowErrors.MismatchedBranches);
}
}
}
}
if (missingPush) {
errors.push(exports.WorkflowErrors.MissingPushHook);
}
return errors;
}
exports.getWorkflowErrors = getWorkflowErrors;
async function validateWorkflow() {
let workflow;
try {
let commitOid = "";
let baseOid = "";
let headOid = "";
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), ["show", "-s", "--format=raw", mergeSha], {
silent: true,
listeners: {
stdline: (data) => {
if (data.startsWith("commit ") && commitOid === "") {
commitOid = data.substring(7);
}
else if (data.startsWith("parent ")) {
if (baseOid === "") {
baseOid = data.substring(7);
}
else if (headOid === "") {
headOid = data.substring(7);
}
}
},
stderr: (data) => {
process.stderr.write(data);
},
},
cwd: checkoutPath,
}).exec();
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
if (commitOid === mergeSha &&
headOid.length === 40 &&
baseOid.length === 40) {
return baseOid;
}
return undefined;
workflow = await getWorkflow();
}
catch (e) {
core.info(`Failed to call git to determine merge base. Continuing with data from environment: ${e}`);
core.info(e.stack || "NO STACK");
return `error: getWorkflow() failed: ${String(e)}`;
}
let workflowErrors;
try {
workflowErrors = getWorkflowErrors(workflow);
}
catch (e) {
return `error: getWorkflowErrors() failed: ${String(e)}`;
}
if (workflowErrors.length > 0) {
let message;
try {
message = formatWorkflowErrors(workflowErrors);
}
catch (e) {
return `error: formatWorkflowErrors() failed: ${String(e)}`;
}
core.warning(message);
}
return formatWorkflowCause(workflowErrors);
}
exports.validateWorkflow = validateWorkflow;
function formatWorkflowErrors(errors) {
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
const errorsList = errors.map((e) => e.message).join(" ");
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
}
exports.formatWorkflowErrors = formatWorkflowErrors;
function formatWorkflowCause(errors) {
if (errors.length === 0) {
return undefined;
}
};
exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
return errors.map((e) => e.code).join(",");
}
exports.formatWorkflowCause = formatWorkflowCause;
async function getWorkflow() {
const relativePath = await getWorkflowPath();
const absolutePath = path.join((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), relativePath);
return yaml.load(fs.readFileSync(absolutePath, "utf-8"));
}
exports.getWorkflow = getWorkflow;
/**
* Get the path of the currently executing workflow.
*/
async function getWorkflowPath() {
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
const owner = repo_nwo[0];
const repo = repo_nwo[1];
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
const apiClient = api.getActionsApiClient();
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
owner,
repo,
run_id,
});
const workflowUrl = runsResponse.data.workflow_url;
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
return workflowResponse.data.path;
}
/**
* Get the workflow run ID.
*/
function getWorkflowRunID() {
const workflowRunID = parseInt((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"), 10);
if (Number.isNaN(workflowRunID)) {
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
}
return workflowRunID;
}
exports.getWorkflowRunID = getWorkflowRunID;
/**
* Get the analysis key parameter for the current job.
*
@@ -158,7 +335,7 @@ async function getAnalysisKey() {
if (analysisKey !== undefined) {
return analysisKey;
}
const workflowPath = await (0, workflow_1.getWorkflowPath)();
const workflowPath = await getWorkflowPath();
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
analysisKey = `${workflowPath}:${jobName}`;
core.exportVariable(analysisKeyEnvVar, analysisKey);
@@ -196,24 +373,8 @@ exports.computeAutomationID = computeAutomationID;
async function getRef() {
// Will be in the form "refs/heads/master" on a push event
// or in the form "refs/pull/N/merge" on a pull_request event
const refInput = (0, exports.getOptionalInput)("ref");
const shaInput = (0, exports.getOptionalInput)("sha");
const checkoutPath = (0, exports.getOptionalInput)("checkout_path") ||
(0, exports.getOptionalInput)("source-root") ||
(0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE");
const hasRefInput = !!refInput;
const hasShaInput = !!shaInput;
// If one of 'ref' or 'sha' are provided, both are required
if ((hasRefInput || hasShaInput) && !(hasRefInput && hasShaInput)) {
throw new Error("Both 'ref' and 'sha' are required if one of them is provided.");
}
const ref = refInput || getRefFromEnv();
const sha = shaInput || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
// If the ref is a user-provided input, we have to skip logic
// and assume that it is really where they want to upload the results.
if (refInput) {
return refInput;
}
const ref = (0, util_1.getRequiredEnvParam)("GITHUB_REF");
const sha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
// For pull request refs we want to detect whether the workflow
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
// than the 'merge' ref. If so, we want to convert the ref that
@@ -222,14 +383,15 @@ async function getRef() {
if (!pull_ref_regex.test(ref)) {
return ref;
}
const head = await (0, exports.getCommitOid)(checkoutPath, "HEAD");
// in actions/checkout@v2+ we can check if git rev-parse HEAD == GITHUB_SHA
const head = await (0, exports.getCommitOid)("HEAD");
// in actions/checkout@v2 we can check if git rev-parse HEAD == GITHUB_SHA
// in actions/checkout@v1 this may not be true as it checks out the repository
// using GITHUB_REF. There is a subtle race condition where
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
// git rev-parse GITHUB_REF == git rev-parse HEAD instead.
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
const hasChangedRef = sha !== head &&
(await (0, exports.getCommitOid)(checkoutPath, ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !== head;
(await (0, exports.getCommitOid)(ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !==
head;
if (hasChangedRef) {
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
core.debug(`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`);
@@ -240,35 +402,6 @@ async function getRef() {
}
}
exports.getRef = getRef;
function getRefFromEnv() {
// To workaround a limitation of Actions dynamic workflows not setting
// the GITHUB_REF in some cases, we accept also the ref within the
// CODE_SCANNING_REF variable. When possible, however, we prefer to use
// the GITHUB_REF as that is a protected variable and cannot be overwritten.
let refEnv;
try {
refEnv = (0, util_1.getRequiredEnvParam)("GITHUB_REF");
}
catch (e) {
// If the GITHUB_REF is not set, we try to rescue by getting the
// CODE_SCANNING_REF.
const maybeRef = process.env["CODE_SCANNING_REF"];
if (maybeRef === undefined || maybeRef.length === 0) {
throw e;
}
refEnv = maybeRef;
}
return refEnv;
}
function getActionsStatus(error, otherFailureCause) {
if (error || otherFailureCause) {
return error instanceof util_1.UserError ? "user-error" : "failure";
}
else {
return "success";
}
}
exports.getActionsStatus = getActionsStatus;
/**
* Compose a StatusReport.
*
@@ -279,7 +412,7 @@ exports.getActionsStatus = getActionsStatus;
* @param exception Exception (only supply if status is 'failure')
*/
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
const commitOid = (0, exports.getOptionalInput)("sha") || process.env["GITHUB_SHA"] || "";
const commitOid = process.env["GITHUB_SHA"] || "";
const ref = await getRef();
const workflowRunIDStr = process.env["GITHUB_RUN_ID"];
let workflowRunID = -1;
@@ -294,15 +427,11 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
workflowStartedAt = actionStartedAt.toISOString();
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
}
const runnerOs = (0, util_1.getRequiredEnvParam)("RUNNER_OS");
const codeQlCliVersion = (0, util_1.getCachedCodeQlVersion)();
const actionRef = process.env["GITHUB_ACTION_REF"];
const testingEnvironment = process.env[sharedEnv.CODEQL_ACTION_TESTING_ENVIRONMENT] || "";
// re-export the testing environment variable so that it is available to subsequent steps,
// even if it was only set for this step
if (testingEnvironment !== "") {
core.exportVariable(sharedEnv.CODEQL_ACTION_TESTING_ENVIRONMENT, testingEnvironment);
}
// If running locally then the GITHUB_ACTION_REF cannot be trusted as it may be for the previous action
// See https://github.com/actions/runner/issues/803
const actionRef = isRunningLocalAction()
? undefined
: process.env["GITHUB_ACTION_REF"];
const statusReport = {
workflow_run_id: workflowRunID,
workflow_name: workflowName,
@@ -316,9 +445,6 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
started_at: workflowStartedAt,
action_started_at: actionStartedAt.toISOString(),
status,
testing_environment: testingEnvironment,
runner_os: runnerOs,
action_version: pkg.version,
};
// Add optional parameters
if (cause) {
@@ -327,32 +453,18 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
if (exception) {
statusReport.exception = exception;
}
if (status === "success" ||
status === "failure" ||
status === "aborted" ||
status === "user-error") {
if (status === "success" || status === "failure" || status === "aborted") {
statusReport.completed_at = new Date().toISOString();
}
const matrix = getRequiredInput("matrix");
if (matrix) {
statusReport.matrix_vars = matrix;
}
if ("RUNNER_ARCH" in process.env) {
// RUNNER_ARCH is available only in GHES 3.4 and later
// Values other than X86, X64, ARM, or ARM64 are discarded server side
statusReport.runner_arch = process.env["RUNNER_ARCH"];
}
if (runnerOs === "Windows" || runnerOs === "macOS") {
statusReport.runner_os_release = os.release();
}
if (codeQlCliVersion !== undefined) {
statusReport.codeql_version = codeQlCliVersion;
}
return statusReport;
}
exports.createStatusReportBase = createStatusReportBase;
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
const GENERIC_404_MSG = "Not authorized to use the CodeQL code scanning feature on this repo.";
const GENERIC_404_MSG = "Not authorized to used the CodeQL code scanning feature on this repo.";
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
/**
@@ -367,14 +479,9 @@ const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code sc
async function sendStatusReport(statusReport) {
const statusReportJSON = JSON.stringify(statusReport);
core.debug(`Sending status report: ${statusReportJSON}`);
// If in test mode we don't want to upload the results
if ((0, util_1.isInTestMode)()) {
core.debug("In test mode. Status reports are not uploaded.");
return true;
}
const nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY");
const [owner, repo] = nwo.split("/");
const client = api.getApiClient();
const client = api.getActionsApiClient();
try {
await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
owner,
@@ -421,21 +528,9 @@ async function sendStatusReport(statusReport) {
}
}
exports.sendStatusReport = sendStatusReport;
function workflowEventName() {
// If the original event is dynamic CODESCANNING_EVENT_NAME will contain the right info (push/pull_request)
if (process.env["GITHUB_EVENT_NAME"] === "dynamic") {
const value = process.env["CODESCANNING_EVENT_NAME"];
if (value === undefined || value.length === 0) {
return process.env["GITHUB_EVENT_NAME"];
}
return value;
}
return process.env["GITHUB_EVENT_NAME"];
}
exports.workflowEventName = workflowEventName;
// Was the workflow run triggered by a `push` event, for example as opposed to a `pull_request` event.
function workflowIsTriggeredByPushEvent() {
return workflowEventName() === "push";
return process.env["GITHUB_EVENT_NAME"] === "push";
}
// Is dependabot the actor that triggered the current workflow run.
function isDependabotActor() {
@@ -466,51 +561,22 @@ function getWorkflowEvent() {
throw new Error(`Unable to read workflow event JSON from ${eventJsonFile}: ${e}`);
}
}
function removeRefsHeadsPrefix(ref) {
return ref.startsWith("refs/heads/") ? ref.slice("refs/heads/".length) : ref;
}
// Is the version of the repository we are currently analyzing from the default branch,
// or alternatively from another branch or a pull request.
async function isAnalyzingDefaultBranch() {
var _a;
// Get the current ref and trim and refs/heads/ prefix
let currentRef = await getRef();
currentRef = removeRefsHeadsPrefix(currentRef);
currentRef = currentRef.startsWith("refs/heads/")
? currentRef.substr("refs/heads/".length)
: currentRef;
const event = getWorkflowEvent();
let defaultBranch = (_a = event === null || event === void 0 ? void 0 : event.repository) === null || _a === void 0 ? void 0 : _a.default_branch;
if (process.env.GITHUB_EVENT_NAME === "schedule") {
defaultBranch = removeRefsHeadsPrefix((0, util_1.getRequiredEnvParam)("GITHUB_REF"));
}
const defaultBranch = (_a = event === null || event === void 0 ? void 0 : event.repository) === null || _a === void 0 ? void 0 : _a.default_branch;
return currentRef === defaultBranch;
}
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
async function printDebugLogs(config) {
for (const language of config.languages) {
const databaseDirectory = (0, util_1.getCodeQLDatabasePath)(config, language);
const logsDirectory = path.join(databaseDirectory, "log");
if (!(0, util_1.doesDirectoryExist)(logsDirectory)) {
core.info(`Directory ${logsDirectory} does not exist.`);
continue; // Skip this language database.
function sanitizeArifactName(name) {
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
}
const walkLogFiles = (dir) => {
const entries = fs.readdirSync(dir, { withFileTypes: true });
if (entries.length === 0) {
core.info(`No debug logs found at directory ${logsDirectory}.`);
}
for (const entry of entries) {
if (entry.isFile()) {
const absolutePath = path.resolve(dir, entry.name);
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name} from file at path ${absolutePath}`);
process.stdout.write(fs.readFileSync(absolutePath));
core.endGroup();
}
else if (entry.isDirectory()) {
walkLogFiles(path.resolve(dir, entry.name));
}
}
};
walkLogFiles(logsDirectory);
}
}
exports.printDebugLogs = printDebugLogs;
exports.sanitizeArifactName = sanitizeArifactName;
//# sourceMappingURL=actions-util.js.map

File diff suppressed because one or more lines are too long

427
lib/actions-util.test.js generated
View File

@@ -25,18 +25,20 @@ Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const ava_1 = __importDefault(require("ava"));
const yaml = __importStar(require("js-yaml"));
const sinon = __importStar(require("sinon"));
const actionsutil = __importStar(require("./actions-util"));
const testing_utils_1 = require("./testing-utils");
const util_1 = require("./util");
function errorCodes(actual, expected) {
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
}
(0, testing_utils_1.setupTests)(ava_1.default);
(0, ava_1.default)("getRef() throws on the empty string", async (t) => {
process.env["GITHUB_REF"] = "";
await t.throwsAsync(actionsutil.getRef);
});
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const expectedRef = "refs/pull/1/merge";
const currentSha = "a".repeat(40);
process.env["GITHUB_REF"] = expectedRef;
@@ -47,10 +49,7 @@ const util_1 = require("./util");
t.deepEqual(actualRef, expectedRef);
callback.restore();
});
});
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const expectedRef = "refs/pull/1/merge";
process.env["GITHUB_REF"] = expectedRef;
process.env["GITHUB_SHA"] = "b".repeat(40);
@@ -62,67 +61,16 @@ const util_1 = require("./util");
t.deepEqual(actualRef, expectedRef);
callback.restore();
});
});
(0, ava_1.default)("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
process.env["GITHUB_REF"] = "refs/pull/1/merge";
process.env["GITHUB_SHA"] = "a".repeat(40);
const callback = sinon.stub(actionsutil, "getCommitOid");
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
callback.withArgs("refs/pull/1/merge").resolves("a".repeat(40));
callback.withArgs("HEAD").resolves("b".repeat(40));
const actualRef = await actionsutil.getRef();
t.deepEqual(actualRef, "refs/pull/1/head");
callback.restore();
});
});
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
// These values are be ignored
process.env["GITHUB_REF"] = "refs/pull/1/merge";
process.env["GITHUB_SHA"] = "a".repeat(40);
const callback = sinon.stub(actionsutil, "getCommitOid");
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
callback.withArgs("HEAD").resolves("b".repeat(40));
const actualRef = await actionsutil.getRef();
t.deepEqual(actualRef, "refs/pull/2/merge");
callback.restore();
getAdditionalInputStub.restore();
});
});
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
await t.throwsAsync(async () => {
await actionsutil.getRef();
}, {
instanceOf: Error,
message: "Both 'ref' and 'sha' are required if one of them is provided.",
});
getAdditionalInputStub.restore();
});
});
(0, ava_1.default)("getRef() throws an error if only `sha` is provided as an input", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
process.env["GITHUB_WORKSPACE"] = "/tmp";
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
await t.throwsAsync(async () => {
await actionsutil.getRef();
}, {
instanceOf: Error,
message: "Both 'ref' and 'sha' are required if one of them is provided.",
});
getAdditionalInputStub.restore();
});
});
(0, ava_1.default)("computeAutomationID()", async (t) => {
let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
@@ -139,13 +87,343 @@ const util_1 = require("./util");
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", undefined);
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
});
(0, ava_1.default)("getWorkflowErrors() when on is empty", (t) => {
const errors = actionsutil.getWorkflowErrors({ on: {} });
t.deepEqual(...errorCodes(errors, []));
});
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
t.deepEqual(...errorCodes(errors, []));
});
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing push", (t) => {
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook]));
});
(0, ava_1.default)("getWorkflowErrors() when on.push is valid", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: ["push", "pull_request"],
});
t.deepEqual(...errorCodes(errors, []));
});
(0, ava_1.default)("getWorkflowErrors() when on.push is a valid superset", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: ["push", "pull_request", "schedule"],
});
t.deepEqual(...errorCodes(errors, []));
});
(0, ava_1.default)("getWorkflowErrors() when on.push should not have a path", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: {
push: { branches: ["main"], paths: ["test/*"] },
pull_request: { branches: ["main"] },
},
});
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified]));
});
(0, ava_1.default)("getWorkflowErrors() when on.push is a correct object", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
});
t.deepEqual(...errorCodes(errors, []));
});
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
});
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
});
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
});
t.deepEqual(...errorCodes(errors, []));
});
(0, ava_1.default)("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
const errors = actionsutil.getWorkflowErrors(yaml.load(`
on:
push:
pull_request:
`));
t.deepEqual(...errorCodes(errors, []));
});
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: {
push: { branches: ["main"] },
pull_request: { branches: ["feature"] },
},
});
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
});
(0, ava_1.default)("getWorkflowErrors() when on.push is not mismatched", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: {
push: { branches: ["main", "feature"] },
pull_request: { branches: ["main"] },
},
});
t.deepEqual(...errorCodes(errors, []));
});
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: {
push: { branches: ["main"] },
pull_request: { branches: ["main", "feature"] },
},
});
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
});
(0, ava_1.default)("getWorkflowErrors() for a range of malformed workflows", (t) => {
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: {
push: 1,
pull_request: 1,
},
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: 1,
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: [1],
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: { 1: 1 },
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: { test: 1 },
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: { test: [1] },
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: { test: { steps: 1 } },
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: { test: [undefined] },
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: {
push: {
branches: 1,
},
pull_request: {
branches: 1,
},
},
}), []));
});
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
const errors = actionsutil.getWorkflowErrors(yaml.load(`
name: "CodeQL"
on:
push:
branches: ["main"]
pull_request:
`));
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
});
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: {
push: { branches: ["feature/*"] },
pull_request: { branches: "feature/moose" },
},
});
t.deepEqual(...errorCodes(errors, []));
});
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: {
push: { branches: ["feature/moose"] },
pull_request: { branches: "feature/*" },
},
});
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
});
(0, ava_1.default)("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
process.env.GITHUB_JOB = "test";
const errors = actionsutil.getWorkflowErrors({
on: ["push", "pull_request"],
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
});
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
});
(0, ava_1.default)("formatWorkflowErrors() when there is one error", (t) => {
const message = actionsutil.formatWorkflowErrors([
actionsutil.WorkflowErrors.CheckoutWrongHead,
]);
t.true(message.startsWith("1 issue was detected with this workflow:"));
});
(0, ava_1.default)("formatWorkflowErrors() when there are multiple errors", (t) => {
const message = actionsutil.formatWorkflowErrors([
actionsutil.WorkflowErrors.CheckoutWrongHead,
actionsutil.WorkflowErrors.PathsSpecified,
]);
t.true(message.startsWith("2 issues were detected with this workflow:"));
});
(0, ava_1.default)("formatWorkflowCause() with no errors", (t) => {
const message = actionsutil.formatWorkflowCause([]);
t.deepEqual(message, undefined);
});
(0, ava_1.default)("formatWorkflowCause()", (t) => {
const message = actionsutil.formatWorkflowCause([
actionsutil.WorkflowErrors.CheckoutWrongHead,
actionsutil.WorkflowErrors.PathsSpecified,
]);
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
});
(0, ava_1.default)("patternIsSuperset()", (t) => {
t.false(actionsutil.patternIsSuperset("main-*", "main"));
t.true(actionsutil.patternIsSuperset("*", "*"));
t.true(actionsutil.patternIsSuperset("*", "main-*"));
t.false(actionsutil.patternIsSuperset("main-*", "*"));
t.false(actionsutil.patternIsSuperset("main-*", "main"));
t.true(actionsutil.patternIsSuperset("main", "main"));
t.false(actionsutil.patternIsSuperset("*", "feature/*"));
t.true(actionsutil.patternIsSuperset("**", "feature/*"));
t.false(actionsutil.patternIsSuperset("feature-*", "**"));
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**/d"));
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**"));
t.true(actionsutil.patternIsSuperset("a/**", "a/**/c"));
t.true(actionsutil.patternIsSuperset("a/**/c", "a/main-**/c"));
t.false(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/d/**/c"));
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/c/**/c"));
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/d/**/c"));
t.false(actionsutil.patternIsSuperset("a/**/c/d/**/c", "a/**/b/**/c"));
t.false(actionsutil.patternIsSuperset("a/main-**/c", "a/**/c"));
t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
});
(0, ava_1.default)("getWorkflowErrors() when branches contain dots", (t) => {
const errors = actionsutil.getWorkflowErrors(yaml.load(`
on:
push:
branches: [4.1, master]
pull_request:
# The branches below must be a subset of the branches above
branches: [4.1, master]
`));
t.deepEqual(...errorCodes(errors, []));
});
(0, ava_1.default)("getWorkflowErrors() when on.push has a trailing comma", (t) => {
const errors = actionsutil.getWorkflowErrors(yaml.load(`
name: "CodeQL"
on:
push:
branches: [master, ]
pull_request:
# The branches below must be a subset of the branches above
branches: [master]
`));
t.deepEqual(...errorCodes(errors, []));
});
(0, ava_1.default)("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
process.env.GITHUB_JOB = "test";
const errors = actionsutil.getWorkflowErrors(yaml.load(`
name: "CodeQL"
on:
push:
branches: [master]
pull_request:
# The branches below must be a subset of the branches above
branches: [master]
jobs:
test:
steps:
- run: "git checkout HEAD^2"
test2:
steps:
- run: "git checkout HEAD^2"
test3:
steps: []
`));
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
});
(0, ava_1.default)("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
process.env.GITHUB_JOB = "test3";
const errors = actionsutil.getWorkflowErrors(yaml.load(`
name: "CodeQL"
on:
push:
branches: [master]
pull_request:
# The branches below must be a subset of the branches above
branches: [master]
jobs:
test:
steps:
- run: "git checkout HEAD^2"
test2:
steps:
- run: "git checkout HEAD^2"
test3:
steps: []
`));
t.deepEqual(...errorCodes(errors, []));
});
(0, ava_1.default)("getWorkflowErrors() when on is missing", (t) => {
const errors = actionsutil.getWorkflowErrors(yaml.load(`
name: "CodeQL"
`));
t.deepEqual(...errorCodes(errors, []));
});
(0, ava_1.default)("getWorkflowErrors() with a different on setup", (t) => {
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
name: "CodeQL"
on: "workflow_dispatch"
`)), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
name: "CodeQL"
on: [workflow_dispatch]
`)), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
name: "CodeQL"
on:
workflow_dispatch: {}
`)), []));
});
(0, ava_1.default)("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
name: "CodeQL"
on:
push:
branches: [master]
`)), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
name: "CodeQL"
on: ["push"]
`)), []));
});
(0, ava_1.default)("initializeEnvironment", (t) => {
(0, util_1.initializeEnvironment)("1.2.3");
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
t.deepEqual((0, util_1.getMode)(), util_1.Mode.actions);
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
(0, util_1.initializeEnvironment)(util_1.Mode.runner, "4.5.6");
t.deepEqual((0, util_1.getMode)(), util_1.Mode.runner);
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "4.5.6");
});
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const envFile = path.join(tmpDir, "event.json");
fs.writeFileSync(envFile, JSON.stringify({
repository: {
@@ -160,31 +438,12 @@ const util_1 = require("./util");
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
process.env["GITHUB_REF"] = "feature";
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
fs.writeFileSync(envFile, JSON.stringify({
schedule: "0 0 * * *",
}));
process.env["GITHUB_EVENT_NAME"] = "schedule";
process.env["GITHUB_REF"] = "refs/heads/main";
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
getAdditionalInputStub
.withArgs("ref")
.resolves("refs/heads/something-else");
getAdditionalInputStub
.withArgs("sha")
.resolves("0000000000000000000000000000000000000000");
process.env["GITHUB_EVENT_NAME"] = "schedule";
process.env["GITHUB_REF"] = "refs/heads/main";
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
getAdditionalInputStub.restore();
});
});
(0, ava_1.default)("workflowEventName()", async (t) => {
process.env["GITHUB_EVENT_NAME"] = "push";
t.deepEqual(actionsutil.workflowEventName(), "push");
process.env["GITHUB_EVENT_NAME"] = "dynamic";
t.deepEqual(actionsutil.workflowEventName(), "dynamic");
process.env["CODESCANNING_EVENT_NAME"] = "push";
t.deepEqual(actionsutil.workflowEventName(), "push");
(0, ava_1.default)("sanitizeArifactName", (t) => {
t.deepEqual(actionsutil.sanitizeArifactName("hello-world_"), "hello-world_");
t.deepEqual(actionsutil.sanitizeArifactName("hello`world`"), "helloworld");
t.deepEqual(actionsutil.sanitizeArifactName("hello===123"), "hello123");
t.deepEqual(actionsutil.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
});
//# sourceMappingURL=actions-util.test.js.map

File diff suppressed because one or more lines are too long

11
lib/analysis-paths.js generated
View File

@@ -37,11 +37,11 @@ function buildIncludeExcludeEnvVar(paths) {
return paths.join("\n");
}
function printPathFiltersWarning(config, logger) {
// Index include/exclude/filters only work in javascript/python/ruby.
// Index include/exclude/filters only work in javascript and python.
// If any other languages are detected/configured then show a warning.
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
!config.languages.every(isInterpretedLanguage)) {
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript, Python, and Ruby');
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript and Python');
}
}
exports.printPathFiltersWarning = printPathFiltersWarning;
@@ -58,11 +58,14 @@ function includeAndExcludeAnalysisPaths(config) {
}
// If the temporary or tools directory is in the working directory ignore that too.
const tempRelativeToWorking = path.relative(process.cwd(), config.tempDir);
const toolsRelativeToWorking = path.relative(process.cwd(), config.toolCacheDir);
let pathsIgnore = config.pathsIgnore;
if (!tempRelativeToWorking.startsWith("..") &&
!path.isAbsolute(tempRelativeToWorking)) {
if (!tempRelativeToWorking.startsWith("..")) {
pathsIgnore = pathsIgnore.concat(tempRelativeToWorking);
}
if (!toolsRelativeToWorking.startsWith("..")) {
pathsIgnore = pathsIgnore.concat(toolsRelativeToWorking);
}
if (pathsIgnore.length !== 0) {
process.env["LGTM_INDEX_EXCLUDE"] = buildIncludeExcludeEnvVar(pathsIgnore);
}

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IACE,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC;QACvC,CAAC,IAAI,CAAC,UAAU,CAAC,qBAAqB,CAAC,EACvC;QACA,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AAjCD,wEAiCC"}
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,oEAAoE;IACpE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,4FAA4F,CAC7F,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}

View File

@@ -37,20 +37,12 @@ const util = __importStar(require("./util"));
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
augmentationProperties: {
injectedMlQueries: false,
packsInputCombines: false,
queriesInputCombines: false,
},
trapCaches: {},
trapCacheDownloadTime: 0,
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
@@ -67,20 +59,12 @@ const util = __importStar(require("./util"));
pathsIgnore: ["path4", "path5", "path6/**"],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
augmentationProperties: {
injectedMlQueries: false,
packsInputCombines: false,
queriesInputCombines: false,
},
trapCaches: {},
trapCacheDownloadTime: 0,
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
@@ -89,6 +73,7 @@ const util = __importStar(require("./util"));
});
});
(0, ava_1.default)("exclude temp dir", async (t) => {
return await util.withTmpDir(async (toolCacheDir) => {
const tempDir = path.join(process.cwd(), "codeql-runner-temp");
const config = {
languages: [],
@@ -97,24 +82,17 @@ const util = __importStar(require("./util"));
paths: [],
originalUserInput: {},
tempDir,
toolCacheDir,
codeQLCmd: "",
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
dbLocation: path.resolve(tempDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
augmentationProperties: {
injectedMlQueries: false,
packsInputCombines: false,
queriesInputCombines: false,
},
trapCaches: {},
trapCacheDownloadTime: 0,
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
t.is(process.env["LGTM_INDEX_EXCLUDE"], "codeql-runner-temp");
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
});
});
//# sourceMappingURL=analysis-paths.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,sBAAsB,EAAE;gBACtB,iBAAiB,EAAE,KAAK;gBACxB,kBAAkB,EAAE,KAAK;gBACzB,oBAAoB,EAAE,KAAK;aAC5B;YACD,UAAU,EAAE,EAAE;YACd,qBAAqB,EAAE,CAAC;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,sBAAsB,EAAE;gBACtB,iBAAiB,EAAE,KAAK;gBACxB,kBAAkB,EAAE,KAAK;gBACzB,oBAAoB,EAAE,KAAK;aAC5B;YACD,UAAU,EAAE,EAAE;YACd,qBAAqB,EAAE,CAAC;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;IAC/D,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;QACT,iBAAiB,EAAE,EAAE;QACrB,OAAO;QACP,SAAS,EAAE,EAAE;QACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;QACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;QACrD,KAAK,EAAE,EAAE;QACT,SAAS,EAAE,KAAK;QAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;QACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;QACnD,sBAAsB,EAAE;YACtB,iBAAiB,EAAE,KAAK;YACxB,kBAAkB,EAAE,KAAK;YACzB,oBAAoB,EAAE,KAAK;SAC5B;QACD,UAAU,EAAE,EAAE;QACd,qBAAqB,EAAE,CAAC;KACzB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;IAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC"}
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;SACjB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;SACjB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;SACjB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

View File

@@ -38,32 +38,21 @@ const util = __importStar(require("./util"));
// but the first test would fail.
(0, ava_1.default)("analyze action with RAM & threads from environment variables", async (t) => {
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["GITHUB_API_URL"] = "https://api.github.com";
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
process.env["GITHUB_REPOSITORY"] = "fake/repository";
sinon
.stub(actionsUtil, "createStatusReportBase")
.resolves({});
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
const gitHubVersion = {
type: util.GitHubVariant.DOTCOM,
};
sinon.stub(configUtils, "getConfig").resolves({
gitHubVersion,
languages: [],
packs: [],
trapCaches: {},
});
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("token").returns("fake-token");
requiredInputStub.withArgs("upload-database").returns("false");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("cleanup-level").returns("none");
optionalInputStub.withArgs("expect-error").returns("false");
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
// When there are no action inputs for RAM and threads, the action uses
// environment variables (passed down from the init action) to set RAM and
// threads usage.

View File

@@ -1 +1 @@
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAEnE,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAA+D;AAC/D,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

View File

@@ -38,32 +38,21 @@ const util = __importStar(require("./util"));
// but the first test would fail.
(0, ava_1.default)("analyze action with RAM & threads from action inputs", async (t) => {
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["GITHUB_API_URL"] = "https://api.github.com";
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
process.env["GITHUB_REPOSITORY"] = "fake/repository";
sinon
.stub(actionsUtil, "createStatusReportBase")
.resolves({});
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
const gitHubVersion = {
type: util.GitHubVariant.DOTCOM,
};
sinon.stub(configUtils, "getConfig").resolves({
gitHubVersion,
languages: [],
packs: [],
trapCaches: {},
});
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("token").returns("fake-token");
requiredInputStub.withArgs("upload-database").returns("false");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("cleanup-level").returns("none");
optionalInputStub.withArgs("expect-error").returns("false");
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
process.env["CODEQL_THREADS"] = "1";
process.env["CODEQL_RAM"] = "4992";
// Action inputs have precedence over environment variables.

View File

@@ -1 +1 @@
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnE,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAA+D;AAC/D,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

View File

@@ -1,41 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.run = void 0;
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const config_utils_1 = require("./config-utils");
const logging_1 = require("./logging");
async function run(uploadSarifDebugArtifact) {
const logger = (0, logging_1.getActionsLogger)();
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Did the 'init' action fail to start?");
}
// Upload Actions SARIF artifacts for debugging
if (config === null || config === void 0 ? void 0 : config.debugMode) {
core.info("Debug mode is on. Uploading available SARIF files as Actions debugging artifact...");
const outputDir = actionsUtil.getRequiredInput("output");
await uploadSarifDebugArtifact(config, outputDir);
}
}
exports.run = run;
//# sourceMappingURL=analyze-action-post-helper.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analyze-action-post-helper.js","sourceRoot":"","sources":["../src/analyze-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAA2C;AAC3C,uCAA6C;AAEtC,KAAK,UAAU,GAAG,CAAC,wBAAkC;IAC1D,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,IAAI,KAAK,CACb,2FAA2F,CAC5F,CAAC;KACH;IAED,+CAA+C;IAC/C,IAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,SAAS,EAAE;QACrB,IAAI,CAAC,IAAI,CACP,oFAAoF,CACrF,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,wBAAwB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;KACnD;AACH,CAAC;AAlBD,kBAkBC"}

View File

@@ -1,69 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const sinon = __importStar(require("sinon"));
const actionsUtil = __importStar(require("./actions-util"));
const analyzeActionPostHelper = __importStar(require("./analyze-action-post-helper"));
const configUtils = __importStar(require("./config-utils"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
(0, testing_utils_1.setupTests)(ava_1.default);
(0, ava_1.default)("post: analyze action with debug mode off", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
const gitHubVersion = {
type: util.GitHubVariant.DOTCOM,
};
sinon.stub(configUtils, "getConfig").resolves({
debugMode: false,
gitHubVersion,
languages: [],
packs: [],
});
const uploadSarifSpy = sinon.spy();
await analyzeActionPostHelper.run(uploadSarifSpy);
t.assert(uploadSarifSpy.notCalled);
});
});
(0, ava_1.default)("post: analyze action with debug mode on", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
const gitHubVersion = {
type: util.GitHubVariant.DOTCOM,
};
sinon.stub(configUtils, "getConfig").resolves({
debugMode: true,
gitHubVersion,
languages: [],
packs: [],
});
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("output").returns("fake-output-dir");
const uploadSarifSpy = sinon.spy();
await analyzeActionPostHelper.run(uploadSarifSpy);
t.assert(uploadSarifSpy.called);
});
});
//# sourceMappingURL=analyze-action-post-helper.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analyze-action-post-helper.test.js","sourceRoot":"","sources":["../src/analyze-action-post-helper.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,sFAAwE;AACxE,4DAA8C;AAC9C,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,0CAA0C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC3D,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QAEpC,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,KAAK;YAChB,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QAEpC,MAAM,cAAc,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;QAEnC,MAAM,uBAAuB,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;QAElD,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,SAAS,CAAC,CAAC;IACrC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QAEpC,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,IAAI;YACf,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QAEpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;QAEhE,MAAM,cAAc,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;QAEnC,MAAM,uBAAuB,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;QAElD,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;IAClC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

View File

@@ -1,40 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
* This file is the entry point for the `post:` hook of `analyze-action.yml`.
* It will run after the all steps in this job, in reverse order in relation to
* other `post:` hooks.
*/
const core = __importStar(require("@actions/core"));
const analyzeActionPostHelper = __importStar(require("./analyze-action-post-helper"));
const debugArtifacts = __importStar(require("./debug-artifacts"));
async function runWrapper() {
try {
await analyzeActionPostHelper.run(debugArtifacts.uploadSarifDebugArtifact);
}
catch (error) {
core.setFailed(`analyze post-action step failed: ${error}`);
console.log(error);
}
}
void runWrapper();
//# sourceMappingURL=analyze-action-post.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,sFAAwE;AACxE,kEAAoD;AAEpD,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,uBAAuB,CAAC,GAAG,CAAC,cAAc,CAAC,wBAAwB,CAAC,CAAC;KAC5E;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,oCAAoC,KAAK,EAAE,CAAC,CAAC;QAC5D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

235
lib/analyze-action.js generated
View File

@@ -18,154 +18,83 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.runPromise = exports.sendStatusReport = void 0;
const fs = __importStar(require("fs"));
const path_1 = __importDefault(require("path"));
// We need to import `performance` on Node 12
const perf_hooks_1 = require("perf_hooks");
const path = __importStar(require("path"));
const artifact = __importStar(require("@actions/artifact"));
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const analyze_1 = require("./analyze");
const api_client_1 = require("./api-client");
const autobuild_1 = require("./autobuild");
const codeql_1 = require("./codeql");
const config_utils_1 = require("./config-utils");
const database_upload_1 = require("./database-upload");
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const trap_caching_1 = require("./trap-caching");
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
const util_1 = require("./util");
// eslint-disable-next-line import/no-commonjs
const pkg = require("../package.json");
async function sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger) {
const status = actionsUtil.getActionsStatus(error, stats === null || stats === void 0 ? void 0 : stats.analyze_failure_language);
async function sendStatusReport(startedAt, stats, error) {
const status = (stats === null || stats === void 0 ? void 0 : stats.analyze_failure_language) !== undefined || error !== undefined
? "failure"
: "success";
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, error === null || error === void 0 ? void 0 : error.message, error === null || error === void 0 ? void 0 : error.stack);
const statusReport = {
...statusReportBase,
...(config
? {
ml_powered_javascript_queries: util.getMlPoweredJsQueriesStatus(config),
}
: {}),
...(stats || {}),
...(dbCreationTimings || {}),
};
if (config && didUploadTrapCaches) {
const trapCacheUploadStatusReport = {
...statusReport,
trap_cache_upload_duration_ms: Math.round(trapCacheUploadTime || 0),
trap_cache_upload_size_bytes: Math.round(await (0, trap_caching_1.getTotalCacheSize)(config.trapCaches, logger)),
};
await actionsUtil.sendStatusReport(trapCacheUploadStatusReport);
}
else {
await actionsUtil.sendStatusReport(statusReport);
}
}
exports.sendStatusReport = sendStatusReport;
// `expect-error` should only be set to a non-false value by the CodeQL Action PR checks.
function hasBadExpectErrorInput() {
return (actionsUtil.getOptionalInput("expect-error") !== "false" &&
!util.isInTestMode());
}
/**
* Returns whether any TRAP files exist under the `db-go` folder,
* indicating whether Go extraction has extracted at least one file.
*/
function doesGoExtractionOutputExist(config) {
const golangDbDirectory = util.getCodeQLDatabasePath(config, languages_1.Language.go);
const trapDirectory = path_1.default.join(golangDbDirectory, "trap", languages_1.Language.go);
return (fs.existsSync(trapDirectory) &&
fs
.readdirSync(trapDirectory)
.some((fileName) => [
".trap",
".trap.gz",
".trap.br",
".trap.tar.gz",
".trap.tar.br",
".trap.tar",
].some((ext) => fileName.endsWith(ext))));
}
/**
* We attempt to autobuild Go to preserve compatibility for users who have
* set up Go using a legacy scanning style CodeQL workflow, i.e. one without
* an autobuild step or manual build steps.
*
* - We detect whether an autobuild step is present by checking the
* `util.DID_AUTOBUILD_GO_ENV_VAR_NAME` environment variable, which is set
* when the autobuilder is invoked.
* - We detect whether the Go database has already been finalized in case it
* has been manually set in a prior Action step.
* - We approximate whether manual build steps are present by looking at
* whether any extraction output already exists for Go.
*/
async function runAutobuildIfLegacyGoWorkflow(config, logger) {
if (!config.languages.includes(languages_1.Language.go)) {
return;
}
if (process.env[util.DID_AUTOBUILD_GO_ENV_VAR_NAME] === "true") {
logger.debug("Won't run Go autobuild since it has already been run.");
return;
}
if ((0, analyze_1.dbIsFinalized)(config, languages_1.Language.go, logger)) {
logger.debug("Won't run Go autobuild since there is already a finalized database for Go.");
return;
}
// This captures whether a user has added manual build steps for Go
if (doesGoExtractionOutputExist(config)) {
logger.debug("Won't run Go autobuild since at least one file of Go code has already been extracted.");
// If the user has run the manual build step, and has set the `CODEQL_EXTRACTOR_GO_BUILD_TRACING`
// variable, we suggest they remove it from their workflow.
if ("CODEQL_EXTRACTOR_GO_BUILD_TRACING" in process.env) {
logger.warning(`The CODEQL_EXTRACTOR_GO_BUILD_TRACING environment variable has no effect on workflows with manual build steps, so we recommend that you remove it from your workflow.`);
}
return;
}
await (0, autobuild_1.runAutobuild)(languages_1.Language.go, config, logger);
}
async function run() {
const startedAt = new Date();
let uploadResult = undefined;
let runStats = undefined;
let config = undefined;
let trapCacheUploadTime = undefined;
let dbCreationTimings = undefined;
let didUploadTrapCaches = false;
util.initializeEnvironment(pkg.version);
await util.checkActionVersion(pkg.version);
const logger = (0, logging_1.getActionsLogger)();
util.initializeEnvironment(util.Mode.actions, pkg.version);
try {
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
return;
}
const logger = (0, logging_1.getActionsLogger)();
config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
}
if (hasBadExpectErrorInput()) {
throw new Error("`expect-error` input parameter is for internal use only. It should only be set by codeql-action or a fork.");
}
await util.enrichEnvironment(await (0, codeql_1.getCodeQL)(config.codeQLCmd));
const apiDetails = (0, api_client_1.getApiDetails)();
await util.enrichEnvironment(util.Mode.actions, await (0, codeql_1.getCodeQL)(config.codeQLCmd));
const apiDetails = {
auth: actionsUtil.getRequiredInput("token"),
url: util.getRequiredEnvParam("GITHUB_SERVER_URL"),
};
const outputDir = actionsUtil.getRequiredInput("output");
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]);
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, actionsUtil.getTemporaryDirectory(), logger);
await runAutobuildIfLegacyGoWorkflow(config, logger);
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger, features);
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
if (config.debugMode) {
// Upload the SARIF files as an Actions artifact for debugging
await uploadDebugArtifacts(config.languages.map((lang) => path.resolve(outputDir, `${lang}.sarif`)), outputDir);
}
}
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
if (config.debugMode) {
// Upload the logs as an Actions artifact for debugging
const toUpload = [];
for (const language of config.languages) {
toUpload.push(...listFolder(path.resolve(util.getCodeQLDatabasePath(config, language), "log")));
}
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
// Multilanguage tracing: there are additional logs in the root of the cluster
toUpload.push(...listFolder(path.resolve(config.dbLocation, "log")));
}
await uploadDebugArtifacts(toUpload, config.dbLocation);
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
await uploadDebugArtifacts([path.resolve(config.tempDir, "compound-build-tracer.log")], config.tempDir);
}
}
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
@@ -176,60 +105,96 @@ async function run() {
}
core.setOutput("db-locations", dbLocations);
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
uploadResult = await upload_lib.uploadFromActions(outputDir, logger);
core.setOutput("sarif-id", uploadResult.sarifID);
uploadResult = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
}
else {
logger.info("Not uploading results");
}
// Possibly upload the database bundles for remote queries
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger);
// Possibly upload the TRAP caches for later re-use
const trapCacheUploadStartTime = perf_hooks_1.performance.now();
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
didUploadTrapCaches = await (0, trap_caching_1.uploadTrapCaches)(codeql, config, logger);
trapCacheUploadTime = perf_hooks_1.performance.now() - trapCacheUploadStartTime;
// We don't upload results in test mode, so don't wait for processing
if (util.isInTestMode()) {
core.debug("In test mode. Waiting for processing is disabled.");
}
else if (uploadResult !== undefined &&
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger); // Possibly upload the database bundles for remote queries
if (uploadResult !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, (0, logging_1.getActionsLogger)());
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
}
// If we did not throw an error yet here, but we expect one, throw it.
if (actionsUtil.getOptionalInput("expect-error") === "true") {
core.setFailed(`expect-error input was set to true but no error was thrown.`);
if (config.debugMode) {
// Upload the database bundles as an Actions artifact for debugging
const toUpload = [];
for (const language of config.languages)
toUpload.push(await (0, util_1.bundleDb)(config, language, codeql));
await uploadDebugArtifacts(toUpload, config.dbLocation);
}
}
catch (origError) {
const error = origError instanceof Error ? origError : new Error(String(origError));
if (actionsUtil.getOptionalInput("expect-error") !== "true" ||
hasBadExpectErrorInput()) {
core.setFailed(error.message);
}
console.log(error);
if (error instanceof analyze_1.CodeQLAnalysisError) {
const stats = { ...error.queriesStatusReport };
await sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
await sendStatusReport(startedAt, stats, error);
}
else {
await sendStatusReport(startedAt, config, undefined, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
await sendStatusReport(startedAt, undefined, error);
}
return;
}
finally {
if (core.isDebug() && config !== undefined) {
core.info("Debug mode is on. Printing CodeQL debug logs...");
for (const language of config.languages) {
const databaseDirectory = util.getCodeQLDatabasePath(config, language);
const logsDirectory = path.join(databaseDirectory, "log");
const walkLogFiles = (dir) => {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile()) {
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
core.endGroup();
}
else if (entry.isDirectory()) {
walkLogFiles(path.resolve(dir, entry.name));
}
}
};
walkLogFiles(logsDirectory);
}
}
}
if (runStats && uploadResult) {
await sendStatusReport(startedAt, config, {
await sendStatusReport(startedAt, {
...runStats,
...uploadResult.statusReport,
}, undefined, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
});
}
else if (runStats) {
await sendStatusReport(startedAt, config, { ...runStats }, undefined, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
await sendStatusReport(startedAt, { ...runStats });
}
else {
await sendStatusReport(startedAt, config, undefined, undefined, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
await sendStatusReport(startedAt, undefined);
}
}
async function uploadDebugArtifacts(toUpload, rootDir) {
let suffix = "";
const matrix = actionsUtil.getRequiredInput("matrix");
if (matrix !== undefined && matrix !== "null") {
for (const entry of Object.entries(JSON.parse(matrix)).sort())
suffix += `-${entry[1]}`;
}
await artifact.create().uploadArtifact(actionsUtil.sanitizeArifactName(`${util_1.DEBUG_ARTIFACT_NAME}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
}
function listFolder(dir) {
const entries = fs.readdirSync(dir, { withFileTypes: true });
const files = [];
for (const entry of entries) {
if (entry.isFile()) {
files.push(path.resolve(dir, entry.name));
}
else if (entry.isDirectory()) {
files.push(...listFolder(path.resolve(dir, entry.name)));
}
}
return files;
}
exports.runPromise = run();
async function runWrapper() {
try {
@@ -237,8 +202,8 @@ async function runWrapper() {
}
catch (error) {
core.setFailed(`analyze action failed: ${error}`);
console.log(error);
}
await (0, util_1.checkForTimeout)();
}
void runWrapper();
//# sourceMappingURL=analyze-action.js.map

File diff suppressed because one or more lines are too long

219
lib/analyze.js generated
View File

@@ -18,23 +18,17 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.validateQueryFilters = exports.runCleanup = exports.runFinalize = exports.createQuerySuiteContents = exports.convertPackToQuerySuiteEntry = exports.runQueries = exports.dbIsFinalized = exports.createdDBForScannedLanguages = exports.CodeQLAnalysisError = void 0;
exports.runCleanup = exports.runFinalize = exports.runQueries = exports.CodeQLAnalysisError = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const perf_hooks_1 = require("perf_hooks"); // We need to import `performance` on Node 12
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const del_1 = __importDefault(require("del"));
const yaml = __importStar(require("js-yaml"));
const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const count_loc_1 = require("./count-loc");
const languages_1 = require("./languages");
const sharedEnv = __importStar(require("./shared-environment"));
const tracer_config_1 = require("./tracer-config");
const util = __importStar(require("./util"));
class CodeQLAnalysisError extends Error {
constructor(queriesStatusReport, message) {
@@ -69,10 +63,11 @@ async function setupPythonExtractor(logger) {
logger.info(`Setting LGTM_PYTHON_SETUP_VERSION=${output}`);
process.env["LGTM_PYTHON_SETUP_VERSION"] = output;
}
async function createdDBForScannedLanguages(codeql, config, logger) {
async function createdDBForScannedLanguages(config, logger) {
// Insert the LGTM_INDEX_X env vars at this point so they are set when
// we extract any scanned languages.
analysisPaths.includeAndExcludeAnalysisPaths(config);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
for (const language of config.languages) {
if ((0, languages_1.isScannedLanguage)(language) &&
!dbIsFinalized(config, language, logger)) {
@@ -80,12 +75,11 @@ async function createdDBForScannedLanguages(codeql, config, logger) {
if (language === languages_1.Language.python) {
await setupPythonExtractor(logger);
}
await codeql.extractScannedLanguage(config, language);
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config, language), language);
logger.endGroup();
}
}
}
exports.createdDBForScannedLanguages = createdDBForScannedLanguages;
function dbIsFinalized(config, language, logger) {
const dbPath = util.getCodeQLDatabasePath(config, language);
try {
@@ -97,13 +91,9 @@ function dbIsFinalized(config, language, logger) {
return false;
}
}
exports.dbIsFinalized = dbIsFinalized;
async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger) {
await createdDBForScannedLanguages(config, logger);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
const extractionStart = perf_hooks_1.performance.now();
await createdDBForScannedLanguages(codeql, config, logger);
const extractionTime = perf_hooks_1.performance.now() - extractionStart;
const trapImportStart = perf_hooks_1.performance.now();
for (const language of config.languages) {
if (dbIsFinalized(config, language, logger)) {
logger.info(`There is already a finalized database for ${language} at the location where the CodeQL Action places databases, so we did not create one.`);
@@ -114,56 +104,51 @@ async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger)
logger.endGroup();
}
}
const trapImportTime = perf_hooks_1.performance.now() - trapImportStart;
return {
scanned_language_extraction_duration_ms: Math.round(extractionTime),
trap_import_duration_ms: Math.round(trapImportTime),
};
}
// Runs queries and creates sarif files in the given folder
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger, featureEnablement) {
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger) {
const statusReport = {};
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
await util.logCodeScanningConfigInCli(codeql, featureEnablement, logger);
let locPromise = Promise.resolve({});
const cliCanCountBaseline = await cliCanCountLoC();
const debugMode = process.env["INTERNAL_CODEQL_ACTION_DEBUG_LOC"] ||
process.env["ACTIONS_RUNNER_DEBUG"] ||
process.env["ACTIONS_STEP_DEBUG"];
if (!cliCanCountBaseline || debugMode) {
// count the number of lines in the background
locPromise = (0, count_loc_1.countLoc)(path.resolve(),
// config.paths specifies external directories. the current
// directory is included in the analysis by default. Replicate
// that here.
config.paths, config.pathsIgnore, config.languages, logger);
}
for (const language of config.languages) {
const queries = config.queries[language];
const queryFilters = validateQueryFilters(config.originalUserInput["query-filters"]);
const packsWithVersion = config.packs[language] || [];
const hasBuiltinQueries = (queries === null || queries === void 0 ? void 0 : queries.builtin.length) > 0;
const hasCustomQueries = (queries === null || queries === void 0 ? void 0 : queries.custom.length) > 0;
const hasPackWithCustomQueries = packsWithVersion.length > 0;
if (!hasBuiltinQueries && !hasCustomQueries && !hasPackWithCustomQueries) {
throw new Error(`Unable to analyze ${language} as no queries were selected for this language`);
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
}
try {
if (await util.useCodeScanningConfigInCli(codeql, featureEnablement)) {
// If we are using the code scanning config in the CLI,
// much of the work needed to generate the query suites
// is done in the CLI. We just need to make a single
// call to run all the queries for each language and
// another to interpret the results.
logger.startGroup(`Running queries for ${language}`);
const startTimeBuiltIn = new Date().getTime();
await runQueryGroup(language, "all", undefined, undefined);
// TODO should not be using `builtin` here. We should be using `all` instead.
// The status report does not support `all` yet.
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
new Date().getTime() - startTimeBuiltIn;
logger.startGroup(`Interpreting results for ${language}`);
const startTimeInterpretResults = new Date().getTime();
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
const analysisSummary = await runInterpretResults(language, undefined, sarifFile, config.debugMode);
statusReport[`interpret_results_${language}_duration_ms`] =
new Date().getTime() - startTimeInterpretResults;
if (hasPackWithCustomQueries) {
logger.info("*************");
logger.info("Performing analysis with custom QL Packs. QL Packs are an experimental feature.");
logger.info("And should not be used in production yet.");
logger.info("*************");
logger.startGroup(`Downloading custom packs for ${language}`);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
const results = await codeql.packDownload(packsWithVersion);
logger.info(`Downloaded packs: ${results.packs
.map((r) => `${r.name}@${r.version || "latest"}`)
.join(", ")}`);
logger.endGroup();
logger.info(analysisSummary);
}
else {
logger.startGroup(`Running queries for ${language}`);
const querySuitePaths = [];
if (queries["builtin"].length > 0) {
const startTimeBuiltIn = new Date().getTime();
querySuitePaths.push((await runQueryGroup(language, "builtin", createQuerySuiteContents(queries["builtin"], queryFilters), undefined)));
querySuitePaths.push(await runQueryGroup(language, "builtin", createQuerySuiteContents(queries["builtin"]), undefined));
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
new Date().getTime() - startTimeBuiltIn;
}
@@ -171,12 +156,12 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
let ranCustom = false;
for (let i = 0; i < queries["custom"].length; ++i) {
if (queries["custom"][i].queries.length > 0) {
querySuitePaths.push((await runQueryGroup(language, `custom-${i}`, createQuerySuiteContents(queries["custom"][i].queries, queryFilters), queries["custom"][i].searchPath)));
querySuitePaths.push(await runQueryGroup(language, `custom-${i}`, createQuerySuiteContents(queries["custom"][i].queries), queries["custom"][i].searchPath));
ranCustom = true;
}
}
if (packsWithVersion.length > 0) {
querySuitePaths.push(await runQueryPacks(language, "packs", packsWithVersion, queryFilters));
querySuitePaths.push(await runQueryGroup(language, "packs", createPackSuiteContents(packsWithVersion), undefined));
ranCustom = true;
}
if (ranCustom) {
@@ -187,12 +172,16 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
logger.startGroup(`Interpreting results for ${language}`);
const startTimeInterpretResults = new Date().getTime();
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
const analysisSummary = await runInterpretResults(language, querySuitePaths, sarifFile, config.debugMode);
const analysisSummary = await runInterpretResults(language, querySuitePaths, sarifFile);
if (!cliCanCountBaseline)
await injectLinesOfCode(sarifFile, language, locPromise);
statusReport[`interpret_results_${language}_duration_ms`] =
new Date().getTime() - startTimeInterpretResults;
logger.endGroup();
logger.info(analysisSummary);
}
if (!cliCanCountBaseline || debugMode)
printLinesOfCodeSummary(logger, language, await locPromise);
if (cliCanCountBaseline)
logger.info(await runPrintLinesOfCode(language));
}
catch (e) {
@@ -205,89 +194,51 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
}
}
return statusReport;
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging) {
async function runInterpretResults(language, queries, sarifFile) {
const databasePath = util.getCodeQLDatabasePath(config, language);
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId, featureEnablement);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId);
}
async function cliCanCountLoC() {
return await util.codeQlVersionAbove(await (0, codeql_1.getCodeQL)(config.codeQLCmd), codeql_1.CODEQL_VERSION_COUNTS_LINES);
}
async function runPrintLinesOfCode(language) {
const databasePath = util.getCodeQLDatabasePath(config, language);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
return await codeql.databasePrintBaseline(databasePath);
}
async function runQueryGroup(language, type, querySuiteContents, searchPath) {
const databasePath = util.getCodeQLDatabasePath(config, language);
// Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows.
const querySuitePath = querySuiteContents
? `${databasePath}-queries-${type}.qls`
: undefined;
if (querySuiteContents && querySuitePath) {
const querySuitePath = `${databasePath}-queries-${type}.qls`;
fs.writeFileSync(querySuitePath, querySuiteContents);
logger.debug(`Query suite file for ${language}-${type}...\n${querySuiteContents}`);
}
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
await codeql.databaseRunQueries(databasePath, searchPath, querySuitePath, memoryFlag, threadsFlag);
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
return querySuitePath;
}
async function runQueryPacks(language, type, packs, queryFilters) {
const databasePath = util.getCodeQLDatabasePath(config, language);
for (const pack of packs) {
logger.debug(`Running query pack for ${language}-${type}: ${pack}`);
}
// combine the list of packs into a query suite in order to run them all simultaneously.
const querySuite = packs.map(convertPackToQuerySuiteEntry).concat(queryFilters);
const querySuitePath = `${databasePath}-queries-${type}.qls`;
fs.writeFileSync(querySuitePath, yaml.dump(querySuite));
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
await codeql.databaseRunQueries(databasePath, undefined, querySuitePath, memoryFlag, threadsFlag);
return querySuitePath;
}
}
exports.runQueries = runQueries;
function convertPackToQuerySuiteEntry(packStr) {
var _a, _b, _c, _d;
const pack = configUtils.parsePacksSpecification(packStr);
return {
qlpack: !pack.path ? pack.name : undefined,
from: pack.path ? pack.name : undefined,
version: pack.version,
query: ((_a = pack.path) === null || _a === void 0 ? void 0 : _a.endsWith(".ql")) ? pack.path : undefined,
queries: !((_b = pack.path) === null || _b === void 0 ? void 0 : _b.endsWith(".ql")) && !((_c = pack.path) === null || _c === void 0 ? void 0 : _c.endsWith(".qls"))
? pack.path
: undefined,
apply: ((_d = pack.path) === null || _d === void 0 ? void 0 : _d.endsWith(".qls")) ? pack.path : undefined,
};
function createQuerySuiteContents(queries) {
return queries.map((q) => `- query: ${q}`).join("\n");
}
exports.convertPackToQuerySuiteEntry = convertPackToQuerySuiteEntry;
function createQuerySuiteContents(queries, queryFilters) {
return yaml.dump(queries.map((q) => ({ query: q })).concat(queryFilters));
function createPackSuiteContents(packsWithVersion) {
return packsWithVersion.map(packWithVersionToQuerySuiteEntry).join("\n");
}
function packWithVersionToQuerySuiteEntry(pack) {
let text = `- qlpack: ${pack.packName}`;
if (pack.version) {
text += `\n version: ${pack.version}`;
}
return text;
}
exports.createQuerySuiteContents = createQuerySuiteContents;
async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger) {
try {
await (0, del_1.default)(outputDir, { force: true });
}
catch (error) {
if ((error === null || error === void 0 ? void 0 : error.code) !== "ENOENT") {
throw error;
}
}
await fs.promises.mkdir(outputDir, { recursive: true });
const timings = await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
// WARNING: This does not _really_ end tracing, as the tracer will restore its
// critical environment variables and it'll still be active for all processes
// launched from this build step.
// However, it will stop tracing for all steps past the codeql-action/analyze
// step.
if (await util.codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
// Delete variables as specified by the end-tracing script
await (0, tracer_config_1.endTracingForCluster)(config);
}
else {
// Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
}
return timings;
fs.mkdirSync(outputDir, { recursive: true });
await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger);
}
exports.runFinalize = runFinalize;
async function runCleanup(config, cleanupLevel, logger) {
@@ -300,28 +251,32 @@ async function runCleanup(config, cleanupLevel, logger) {
logger.endGroup();
}
exports.runCleanup = runCleanup;
// exported for testing
function validateQueryFilters(queryFilters) {
if (!queryFilters) {
return [];
}
if (!Array.isArray(queryFilters)) {
throw new Error(`Query filters must be an array of "include" or "exclude" entries. Found ${typeof queryFilters}`);
}
const errors = [];
for (const qf of queryFilters) {
const keys = Object.keys(qf);
if (keys.length !== 1) {
errors.push(`Query filter must have exactly one key: ${JSON.stringify(qf)}`);
}
if (!["exclude", "include"].includes(keys[0])) {
errors.push(`Only "include" or "exclude" filters are allowed:\n${JSON.stringify(qf)}`);
async function injectLinesOfCode(sarifFile, language, locPromise) {
var _a;
const lineCounts = await locPromise;
if (language in lineCounts) {
const sarif = JSON.parse(fs.readFileSync(sarifFile, "utf8"));
if (Array.isArray(sarif.runs)) {
for (const run of sarif.runs) {
run.properties = run.properties || {};
run.properties.metricResults = run.properties.metricResults || [];
for (const metric of run.properties.metricResults) {
// Baseline is inserted when matching rule has tag lines-of-code
if (metric.rule && metric.rule.toolComponent) {
const matchingRule = run.tool.extensions[metric.rule.toolComponent.index].rules[metric.rule.index];
if ((_a = matchingRule.properties.tags) === null || _a === void 0 ? void 0 : _a.includes("lines-of-code")) {
metric.baseline = lineCounts[language];
}
}
if (errors.length) {
throw new Error(`Invalid query filter.\n${errors.join("\n")}`);
}
return queryFilters;
}
exports.validateQueryFilters = validateQueryFilters;
}
fs.writeFileSync(sarifFile, JSON.stringify(sarif));
}
}
function printLinesOfCodeSummary(logger, language, lineCounts) {
if (language in lineCounts) {
logger.info(`Counted a baseline of ${lineCounts[language]} lines of code for ${language}.`);
}
}
//# sourceMappingURL=analyze.js.map

File diff suppressed because one or more lines are too long

221
lib/analyze.test.js generated
View File

@@ -26,8 +26,11 @@ const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const ava_1 = __importDefault(require("ava"));
const yaml = __importStar(require("js-yaml"));
const semver_1 = require("semver");
const sinon = __importStar(require("sinon"));
const analyze_1 = require("./analyze");
const codeql_1 = require("./codeql");
const count = __importStar(require("./count-loc"));
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
@@ -37,6 +40,12 @@ const util = __importStar(require("./util"));
// and correct case of builtin or custom. Also checks the correct search
// paths are set in the database analyze invocation.
(0, ava_1.default)("status report fields and search path setting", async (t) => {
const mockLinesOfCode = Object.values(languages_1.Language).reduce((obj, lang, i) => {
// use a different line count for each language
obj[lang] = i + 1;
return obj;
}, {});
sinon.stub(count, "countLoc").resolves(mockLinesOfCode);
let searchPathsUsed = [];
return await util.withTmpDir(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
@@ -44,8 +53,18 @@ const util = __importStar(require("./util"));
const addSnippetsFlag = "";
const threadsFlag = "";
const packs = {
[languages_1.Language.cpp]: ["a/b@1.0.0"],
[languages_1.Language.java]: ["c/d@2.0.0"],
[languages_1.Language.cpp]: [
{
packName: "a/b",
version: (0, semver_1.clean)("1.0.0"),
},
],
[languages_1.Language.java]: [
{
packName: "c/d",
version: (0, semver_1.clean)("2.0.0"),
},
],
};
for (const language of Object.values(languages_1.Language)) {
(0, codeql_1.setCodeQL)({
@@ -90,7 +109,6 @@ const util = __importStar(require("./util"));
}));
return "";
},
databasePrintBaseline: async () => "",
});
searchPathsUsed = [];
const config = {
@@ -100,6 +118,7 @@ const util = __importStar(require("./util"));
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
gitHubVersion: {
type: util.GitHubVariant.DOTCOM,
@@ -107,15 +126,6 @@ const util = __importStar(require("./util"));
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs,
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
augmentationProperties: {
injectedMlQueries: false,
packsInputCombines: false,
queriesInputCombines: false,
},
trapCaches: {},
trapCacheDownloadTime: 0,
};
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
recursive: true,
@@ -124,7 +134,7 @@ const util = __importStar(require("./util"));
builtin: ["foo.ql"],
custom: [],
};
const builtinStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([]));
const builtinStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true));
const hasPacks = language in packs;
const statusReportKeys = Object.keys(builtinStatusReport).sort();
if (hasPacks) {
@@ -150,7 +160,7 @@ const util = __importStar(require("./util"));
},
],
};
const customStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([]));
const customStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true));
t.deepEqual(Object.keys(customStatusReport).length, 2);
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
const expectedSearchPathsUsed = hasPacks
@@ -159,8 +169,32 @@ const util = __importStar(require("./util"));
t.deepEqual(searchPathsUsed, expectedSearchPathsUsed);
t.true(`interpret_results_${language}_duration_ms` in customStatusReport);
}
verifyLineCounts(tmpDir);
verifyQuerySuites(tmpDir);
});
function verifyLineCounts(tmpDir) {
// eslint-disable-next-line github/array-foreach
Object.keys(languages_1.Language).forEach((lang, i) => {
verifyLineCountForFile(path.join(tmpDir, `${lang}.sarif`), i + 1);
});
}
function verifyLineCountForFile(filePath, lineCount) {
const sarif = JSON.parse(fs.readFileSync(filePath, "utf8"));
t.deepEqual(sarif.runs[0].properties.metricResults, [
{
rule: {
index: 0,
toolComponent: {
index: 0,
},
},
value: 123,
baseline: lineCount,
},
]);
// when the rule doesn't exist, it should not be added
t.deepEqual(sarif.runs[1].properties.metricResults, []);
}
function verifyQuerySuites(tmpDir) {
const qlsContent = [
{
@@ -172,10 +206,32 @@ const util = __importStar(require("./util"));
query: "bar.ql",
},
];
const qlsPackContentCpp = [
{
qlpack: "a/b",
version: "1.0.0",
},
];
const qlsPackContentJava = [
{
qlpack: "c/d",
version: "2.0.0",
},
];
for (const lang of Object.values(languages_1.Language)) {
t.deepEqual(readContents(`${lang}-queries-builtin.qls`), qlsContent);
t.deepEqual(readContents(`${lang}-queries-custom-0.qls`), qlsContent);
t.deepEqual(readContents(`${lang}-queries-custom-1.qls`), qlsContent2);
const packSuiteName = `${lang}-queries-packs.qls`;
if (lang === languages_1.Language.cpp) {
t.deepEqual(readContents(packSuiteName), qlsPackContentCpp);
}
else if (lang === languages_1.Language.java) {
t.deepEqual(readContents(packSuiteName), qlsPackContentJava);
}
else {
t.false(fs.existsSync(path.join(tmpDir, "codeql_databases", packSuiteName)));
}
}
function readContents(name) {
const x = fs.readFileSync(path.join(tmpDir, "codeql_databases", name), "utf8");
@@ -184,141 +240,4 @@ const util = __importStar(require("./util"));
}
}
});
(0, ava_1.default)("validateQueryFilters", (t) => {
t.notThrows(() => (0, analyze_1.validateQueryFilters)([]));
t.notThrows(() => (0, analyze_1.validateQueryFilters)(undefined));
t.notThrows(() => {
return (0, analyze_1.validateQueryFilters)([
{
exclude: {
"problem.severity": "recommendation",
},
},
{
exclude: {
"tags contain": ["foo", "bar"],
},
},
{
include: {
"problem.severity": "something-to-think-about",
},
},
{
include: {
"tags contain": ["baz", "bop"],
},
},
]);
});
t.throws(() => {
return (0, analyze_1.validateQueryFilters)([
{
exclude: {
"tags contain": ["foo", "bar"],
},
include: {
"tags contain": ["baz", "bop"],
},
},
]);
}, { message: /Query filter must have exactly one key/ });
t.throws(() => {
return (0, analyze_1.validateQueryFilters)([{ xxx: "foo" }]);
}, { message: /Only "include" or "exclude" filters are allowed/ });
t.throws(() => {
return (0, analyze_1.validateQueryFilters)({ exclude: "foo" });
}, {
message: /Query filters must be an array of "include" or "exclude" entries/,
});
});
const convertPackToQuerySuiteEntryMacro = ava_1.default.macro({
exec: (t, packSpec, suiteEntry) => t.deepEqual((0, analyze_1.convertPackToQuerySuiteEntry)(packSpec), suiteEntry),
title: (_providedTitle, packSpec) => `Query Suite Entry: ${packSpec}`,
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b", {
qlpack: "a/b",
from: undefined,
version: undefined,
query: undefined,
queries: undefined,
apply: undefined,
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b@~1.2.3", {
qlpack: "a/b",
from: undefined,
version: "~1.2.3",
query: undefined,
queries: undefined,
apply: undefined,
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b:my/path", {
qlpack: undefined,
from: "a/b",
version: undefined,
query: undefined,
queries: "my/path",
apply: undefined,
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b@~1.2.3:my/path", {
qlpack: undefined,
from: "a/b",
version: "~1.2.3",
query: undefined,
queries: "my/path",
apply: undefined,
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b:my/path/query.ql", {
qlpack: undefined,
from: "a/b",
version: undefined,
query: "my/path/query.ql",
queries: undefined,
apply: undefined,
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b@~1.2.3:my/path/query.ql", {
qlpack: undefined,
from: "a/b",
version: "~1.2.3",
query: "my/path/query.ql",
queries: undefined,
apply: undefined,
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b:my/path/suite.qls", {
qlpack: undefined,
from: "a/b",
version: undefined,
query: undefined,
queries: undefined,
apply: "my/path/suite.qls",
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b@~1.2.3:my/path/suite.qls", {
qlpack: undefined,
from: "a/b",
version: "~1.2.3",
query: undefined,
queries: undefined,
apply: "my/path/suite.qls",
});
(0, ava_1.default)("convertPackToQuerySuiteEntry Failure", (t) => {
t.throws(() => (0, analyze_1.convertPackToQuerySuiteEntry)("this-is-not-a-pack"));
});
(0, ava_1.default)("createQuerySuiteContents", (t) => {
const yamlResult = (0, analyze_1.createQuerySuiteContents)(["query1.ql", "query2.ql"], [
{
exclude: { "problem.severity": "recommendation" },
},
{
include: { "problem.severity": "recommendation" },
},
]);
const expected = `- query: query1.ql
- query: query2.ql
- exclude:
problem.severity: recommendation
- include:
problem.severity: recommendation
`;
t.deepEqual(yamlResult, expected);
});
//# sourceMappingURL=analyze.test.js.map

File diff suppressed because one or more lines are too long

56
lib/api-client.js generated
View File

@@ -22,12 +22,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getGitHubVersion = exports.getApiClientWithExternalAuth = exports.getApiClient = exports.getApiDetails = exports.DisallowedAPIVersionReason = void 0;
exports.getActionsApiClient = exports.getApiClient = exports.DisallowedAPIVersionReason = void 0;
const path = __importStar(require("path"));
const githubUtils = __importStar(require("@actions/github/lib/utils"));
const retry = __importStar(require("@octokit/plugin-retry"));
const console_log_level_1 = __importDefault(require("console-log-level"));
const actions_util_1 = require("./actions-util");
const util = __importStar(require("./util"));
const util_1 = require("./util");
// eslint-disable-next-line import/no-commonjs
const pkg = require("../package.json");
@@ -36,44 +36,36 @@ var DisallowedAPIVersionReason;
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) {
const getApiClient = function (apiDetails, { allowExternal = false } = {}) {
const auth = (allowExternal && apiDetails.externalRepoAuth) || apiDetails.auth;
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
return new retryingOctokit(githubUtils.getOctokitOptions(auth, {
baseUrl: apiDetails.apiURL,
userAgent: `CodeQL-Action/${pkg.version}`,
baseUrl: getApiUrl(apiDetails.url),
userAgent: `CodeQL-${(0, util_1.getMode)()}/${pkg.version}`,
log: (0, console_log_level_1.default)({ level: "debug" }),
}));
};
exports.getApiClient = getApiClient;
function getApiUrl(githubUrl) {
const url = new URL(githubUrl);
// If we detect this is trying to connect to github.com
// then return with a fixed canonical URL.
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
return "https://api.github.com";
}
function getApiDetails() {
return {
// Add the /api/v3 API prefix
url.pathname = path.join(url.pathname, "api", "v3");
return url.toString();
}
// Temporary function to aid in the transition to running on and off of github actions.
// Once all code has been converted this function should be removed or made canonical
// and called only from the action entrypoints.
function getActionsApiClient() {
const apiDetails = {
auth: (0, actions_util_1.getRequiredInput)("token"),
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
apiURL: (0, util_1.getRequiredEnvParam)("GITHUB_API_URL"),
};
return (0, exports.getApiClient)(apiDetails);
}
exports.getApiDetails = getApiDetails;
function getApiClient() {
return createApiClientWithDetails(getApiDetails());
}
exports.getApiClient = getApiClient;
function getApiClientWithExternalAuth(apiDetails) {
return createApiClientWithDetails(apiDetails, { allowExternal: true });
}
exports.getApiClientWithExternalAuth = getApiClientWithExternalAuth;
let cachedGitHubVersion = undefined;
/**
* Report the GitHub server version. This is a wrapper around
* util.getGitHubVersion() that automatically supplies GitHub API details using
* GitHub Action inputs.
*
* @returns GitHub version
*/
async function getGitHubVersion() {
if (cachedGitHubVersion === undefined) {
cachedGitHubVersion = await util.getGitHubVersion(getApiDetails());
}
return cachedGitHubVersion;
}
exports.getGitHubVersion = getGitHubVersion;
exports.getActionsApiClient = getActionsApiClient;
//# sourceMappingURL=api-client.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,6CAA+B;AAC/B,iCAA4D;AAE5D,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAiBD,SAAS,0BAA0B,CACjC,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,UAAU,CAAC,MAAM;QAC1B,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;QACzC,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC;AAED,SAAgB,aAAa;IAC3B,OAAO;QACL,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;QAC7C,MAAM,EAAE,IAAA,0BAAmB,EAAC,gBAAgB,CAAC;KAC9C,CAAC;AACJ,CAAC;AAND,sCAMC;AAED,SAAgB,YAAY;IAC1B,OAAO,0BAA0B,CAAC,aAAa,EAAE,CAAC,CAAC;AACrD,CAAC;AAFD,oCAEC;AAED,SAAgB,4BAA4B,CAC1C,UAAoC;IAEpC,OAAO,0BAA0B,CAAC,UAAU,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;AACzE,CAAC;AAJD,oEAIC;AAED,IAAI,mBAAmB,GAA8B,SAAS,CAAC;AAE/D;;;;;;GAMG;AACI,KAAK,UAAU,gBAAgB;IACpC,IAAI,mBAAmB,KAAK,SAAS,EAAE;QACrC,mBAAmB,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,EAAE,CAAC,CAAC;KACpE;IACD,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AALD,4CAKC"}
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,iCAAsD;AAEtD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeM,MAAM,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,UAAU,IAAA,cAAO,GAAE,IAAI,GAAG,CAAC,OAAO,EAAE;QAC/C,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAdW,QAAA,YAAY,gBAcvB;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB;IACjC,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,IAAA,oBAAY,EAAC,UAAU,CAAC,CAAC;AAClC,CAAC;AAPD,kDAOC"}

65
lib/api-client.test.js generated
View File

@@ -25,10 +25,9 @@ Object.defineProperty(exports, "__esModule", { value: true });
const githubUtils = __importStar(require("@actions/github/lib/utils"));
const ava_1 = __importDefault(require("ava"));
const sinon = __importStar(require("sinon"));
const actionsUtil = __importStar(require("./actions-util"));
const api_client_1 = require("./api-client");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
const util_1 = require("./util");
// eslint-disable-next-line import/no-commonjs
const pkg = require("../package.json");
(0, testing_utils_1.setupTests)(ava_1.default);
@@ -38,23 +37,55 @@ ava_1.default.beforeEach(() => {
pluginStub = sinon.stub(githubUtils.GitHub, "plugin");
githubStub = sinon.stub();
pluginStub.returns(githubStub);
util.initializeEnvironment(pkg.version);
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
});
(0, ava_1.default)("getApiClient", async (t) => {
sinon.stub(actionsUtil, "getRequiredInput").withArgs("token").returns("xyz");
const requiredEnvParamStub = sinon.stub(util, "getRequiredEnvParam");
requiredEnvParamStub
.withArgs("GITHUB_SERVER_URL")
.returns("http://github.localhost");
requiredEnvParamStub
.withArgs("GITHUB_API_URL")
.returns("http://api.github.localhost");
(0, api_client_1.getApiClient)();
t.assert(githubStub.calledOnceWithExactly({
(0, ava_1.default)("Get the client API", async (t) => {
doTest(t, {
auth: "xyz",
externalRepoAuth: "abc",
url: "http://hucairz",
}, undefined, {
auth: "token xyz",
baseUrl: "http://api.github.localhost",
log: sinon.match.any,
baseUrl: "http://hucairz/api/v3",
userAgent: `CodeQL-Action/${pkg.version}`,
}));
});
});
(0, ava_1.default)("Get the client API external", async (t) => {
doTest(t, {
auth: "xyz",
externalRepoAuth: "abc",
url: "http://hucairz",
}, { allowExternal: true }, {
auth: "token abc",
baseUrl: "http://hucairz/api/v3",
userAgent: `CodeQL-Action/${pkg.version}`,
});
});
(0, ava_1.default)("Get the client API external not present", async (t) => {
doTest(t, {
auth: "xyz",
url: "http://hucairz",
}, { allowExternal: true }, {
auth: "token xyz",
baseUrl: "http://hucairz/api/v3",
userAgent: `CodeQL-Action/${pkg.version}`,
});
});
(0, ava_1.default)("Get the client API with github url", async (t) => {
doTest(t, {
auth: "xyz",
url: "https://github.com/some/invalid/url",
}, undefined, {
auth: "token xyz",
baseUrl: "https://api.github.com",
userAgent: `CodeQL-Action/${pkg.version}`,
});
});
function doTest(t, clientArgs, clientOptions, expected) {
(0, api_client_1.getApiClient)(clientArgs, clientOptions);
const firstCallArgs = githubStub.args[0];
// log is a function, so we don't need to test for equality of it
delete firstCallArgs[0].log;
t.deepEqual(firstCallArgs, [expected]);
}
//# sourceMappingURL=api-client.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,uEAAyD;AACzD,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,6CAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAI,UAA2B,CAAC;AAChC,IAAI,UAA2B,CAAC;AAEhC,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACtD,UAAU,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC;IAC1B,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IAC/B,IAAI,CAAC,qBAAqB,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;AAC1C,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,cAAc,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/B,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IAC7E,MAAM,oBAAoB,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,qBAAqB,CAAC,CAAC;IACrE,oBAAoB;SACjB,QAAQ,CAAC,mBAAmB,CAAC;SAC7B,OAAO,CAAC,yBAAyB,CAAC,CAAC;IACtC,oBAAoB;SACjB,QAAQ,CAAC,gBAAgB,CAAC;SAC1B,OAAO,CAAC,6BAA6B,CAAC,CAAC;IAE1C,IAAA,yBAAY,GAAE,CAAC;IAEf,CAAC,CAAC,MAAM,CACN,UAAU,CAAC,qBAAqB,CAAC;QAC/B,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,6BAA6B;QACtC,GAAG,EAAE,KAAK,CAAC,KAAK,CAAC,GAAG;QACpB,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CAAC,CACH,CAAC;AACJ,CAAC,CAAC,CAAC"}
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,uEAAyD;AACzD,8CAA6C;AAC7C,6CAA+B;AAE/B,6CAA4C;AAC5C,mDAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAI,UAA2B,CAAC;AAChC,IAAI,UAA2B,CAAC;AAEhC,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACtD,UAAU,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC;IAC1B,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IAC/B,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;AACnD,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oBAAoB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrC,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,6BAA6B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9C,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oCAAoC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrD,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,qCAAqC;KAC3C,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,wBAAwB;QACjC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,MAAM,CACb,CAA4B,EAC5B,UAAe,EACf,aAAkB,EAClB,QAAa;IAEb,IAAA,yBAAY,EAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAExC,MAAM,aAAa,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IACzC,iEAAiE;IACjE,OAAO,aAAa,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;IAC5B,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzC,CAAC"}

View File

@@ -1 +1 @@
{ "maximumVersion": "3.8", "minimumVersion": "3.3" }
{ "maximumVersion": "3.3", "minimumVersion": "3.0" }

View File

@@ -21,17 +21,17 @@ var __importStar = (this && this.__importStar) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actions_util_1 = require("./actions-util");
const api_client_1 = require("./api-client");
const autobuild_1 = require("./autobuild");
const configUtils = __importStar(require("./config-utils"));
const languages_1 = require("./languages");
const config_utils = __importStar(require("./config-utils"));
const logging_1 = require("./logging");
const util_1 = require("./util");
// eslint-disable-next-line import/no-commonjs
const pkg = require("../package.json");
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
(0, util_1.initializeEnvironment)(pkg.version);
const status = (0, actions_util_1.getActionsStatus)(cause, failingLanguage);
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
const status = failingLanguage !== undefined || cause !== undefined
? "failure"
: "success";
const statusReportBase = await (0, actions_util_1.createStatusReportBase)("autobuild", status, startedAt, cause === null || cause === void 0 ? void 0 : cause.message, cause === null || cause === void 0 ? void 0 : cause.stack);
const statusReport = {
...statusReportBase,
@@ -41,44 +41,29 @@ async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguag
await (0, actions_util_1.sendStatusReport)(statusReport);
}
async function run() {
const startedAt = new Date();
const logger = (0, logging_1.getActionsLogger)();
await (0, util_1.checkActionVersion)(pkg.version);
let currentLanguage = undefined;
let languages = undefined;
const startedAt = new Date();
let language = undefined;
try {
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("autobuild", "starting", startedAt)))) {
return;
}
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
const config = await configUtils.getConfig((0, actions_util_1.getTemporaryDirectory)(), logger);
const config = await config_utils.getConfig((0, actions_util_1.getTemporaryDirectory)(), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
}
languages = await (0, autobuild_1.determineAutobuildLanguages)(config, logger);
if (languages !== undefined) {
const workingDirectory = (0, actions_util_1.getOptionalInput)("working-directory");
if (workingDirectory) {
logger.info(`Changing autobuilder working directory to ${workingDirectory}`);
process.chdir(workingDirectory);
}
for (const language of languages) {
currentLanguage = language;
language = (0, autobuild_1.determineAutobuildLanguage)(config, logger);
if (language !== undefined) {
await (0, autobuild_1.runAutobuild)(language, config, logger);
if (language === languages_1.Language.go) {
core.exportVariable(util_1.DID_AUTOBUILD_GO_ENV_VAR_NAME, "true");
}
}
}
}
catch (error) {
core.setFailed(`We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error instanceof Error ? error.message : String(error)}`);
console.log(error);
await sendCompletedStatusReport(startedAt, languages !== null && languages !== void 0 ? languages : [], currentLanguage, error instanceof Error ? error : new Error(String(error)));
await sendCompletedStatusReport(startedAt, language ? [language] : [], language, error instanceof Error ? error : new Error(String(error)));
return;
}
await sendCompletedStatusReport(startedAt, languages !== null && languages !== void 0 ? languages : []);
await sendCompletedStatusReport(startedAt, language ? [language] : []);
}
async function runWrapper() {
try {

View File

@@ -1 +1 @@
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAOwB;AACxB,6CAAgD;AAChD,2CAAwE;AACxE,4DAA8C;AAC9C,2CAAuC;AACvC,uCAA6C;AAC7C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IAEnC,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IAAI,eAAe,GAAyB,SAAS,CAAC;IACtD,IAAI,SAAS,GAA2B,SAAS,CAAC;IAClD,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,SAAS,GAAG,MAAM,IAAA,uCAA2B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAC9D,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;gBAChC,eAAe,GAAG,QAAQ,CAAC;gBAC3B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;gBAC7C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE;oBAC5B,IAAI,CAAC,cAAc,CAAC,oCAA6B,EAAE,MAAM,CAAC,CAAC;iBAC5D;aACF;SACF;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,SAAS,aAAT,SAAS,cAAT,SAAS,GAAI,EAAE,EACf,eAAe,EACf,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,SAAS,aAAT,SAAS,cAAT,SAAS,GAAI,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAKwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,IAAA,sCAA0B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

69
lib/autobuild.js generated
View File

@@ -1,75 +1,28 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.runAutobuild = exports.determineAutobuildLanguages = void 0;
exports.runAutobuild = exports.determineAutobuildLanguage = void 0;
const codeql_1 = require("./codeql");
const languages_1 = require("./languages");
async function determineAutobuildLanguages(config, logger) {
function determineAutobuildLanguage(config, logger) {
// Attempt to find a language to autobuild
// We want pick the dominant language in the repo from the ones we're able to build
// The languages are sorted in order specified by user or by lines of code if we got
// them from the GitHub API, so try to build the first language on the list.
const autobuildLanguages = config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l));
if (!autobuildLanguages) {
const autobuildLanguages = config.languages.filter(languages_1.isTracedLanguage);
const language = autobuildLanguages[0];
if (!language) {
logger.info("None of the languages in this project require extra build steps");
return undefined;
}
/**
* Additionally autobuild Go in the autobuild Action to ensure backwards
* compatibility for users performing a multi-language build within a single
* job.
*
* For example, consider a user with the following workflow file:
*
* ```yml
* - uses: github/codeql-action/init@v2
* with:
* languages: go, java
* - uses: github/codeql-action/autobuild@v2
* - uses: github/codeql-action/analyze@v2
* ```
*
* - With Go extraction disabled, we will run the Java autobuilder in the
* autobuild Action, ensuring we extract both Java and Go code.
* - With Go extraction enabled, taking the previous behavior we'd run the Go
* autobuilder, since Go is first on the list of languages. We wouldn't run
* the Java autobuilder at all and so we'd only extract Go code.
*
* We therefore introduce a special case here such that we'll autobuild Go
* in addition to the primary non-Go traced language in the autobuild Action.
*
* This special case behavior should be removed as part of the next major
* version of the CodeQL Action.
*/
const autobuildLanguagesWithoutGo = autobuildLanguages.filter((l) => l !== languages_1.Language.go);
const languages = [];
// First run the autobuilder for the first non-Go traced language, if one
// exists.
if (autobuildLanguagesWithoutGo[0] !== undefined) {
languages.push(autobuildLanguagesWithoutGo[0]);
}
// If Go is requested, run the Go autobuilder last to ensure it doesn't
// interfere with the other autobuilder.
if (autobuildLanguages.length !== autobuildLanguagesWithoutGo.length) {
languages.push(languages_1.Language.go);
}
logger.debug(`Will autobuild ${languages.join(" and ")}.`);
// In general the autobuilders for other traced languages may conflict with
// each other. Therefore if a user has requested more than one non-Go traced
// language, we ask for manual build steps.
// Matrixing the build would also work, but that would change the SARIF
// categories, potentially leading to a "stale tips" situation where alerts
// that should be fixed remain on a repo since they are linked to SARIF
// categories that are no longer updated.
if (autobuildLanguagesWithoutGo.length > 1) {
logger.warning(`We will only automatically build ${languages.join(" and ")} code. If you wish to scan ${autobuildLanguagesWithoutGo
logger.debug(`Detected dominant traced language: ${language}`);
if (autobuildLanguages.length > 1) {
logger.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages
.slice(1)
.join(" and ")}, you must replace the autobuild step of your workflow with custom build steps. ` +
"For more information, see " +
"https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-the-codeql-workflow-for-compiled-languages#adding-build-steps-for-a-compiled-language");
.join(" and ")}, you must replace this call with custom build steps.`);
}
return languages;
return language;
}
exports.determineAutobuildLanguages = determineAutobuildLanguages;
exports.determineAutobuildLanguage = determineAutobuildLanguage;
async function runAutobuild(language, config, logger) {
logger.startGroup(`Attempting to automatically build ${language} code`);
const codeQL = await (0, codeql_1.getCodeQL)(config.codeQLCmd);

View File

@@ -1 +1 @@
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;AAAA,qCAAqC;AAErC,2CAAyD;AAGlD,KAAK,UAAU,2BAA2B,CAC/C,MAA0B,EAC1B,MAAc;IAEd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CACvD,IAAA,4BAAgB,EAAC,CAAC,CAAC,CACpB,CAAC;IAEF,IAAI,CAAC,kBAAkB,EAAE;QACvB,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACH,MAAM,2BAA2B,GAAG,kBAAkB,CAAC,MAAM,CAC3D,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,oBAAQ,CAAC,EAAE,CACzB,CAAC;IAEF,MAAM,SAAS,GAAe,EAAE,CAAC;IACjC,yEAAyE;IACzE,UAAU;IACV,IAAI,2BAA2B,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE;QAChD,SAAS,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC,CAAC,CAAC,CAAC;KAChD;IACD,uEAAuE;IACvE,wCAAwC;IACxC,IAAI,kBAAkB,CAAC,MAAM,KAAK,2BAA2B,CAAC,MAAM,EAAE;QACpE,SAAS,CAAC,IAAI,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC;KAC7B;IAED,MAAM,CAAC,KAAK,CAAC,kBAAkB,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAE3D,2EAA2E;IAC3E,4EAA4E;IAC5E,2CAA2C;IAC3C,uEAAuE;IACvE,2EAA2E;IAC3E,uEAAuE;IACvE,yCAAyC;IACzC,IAAI,2BAA2B,CAAC,MAAM,GAAG,CAAC,EAAE;QAC1C,MAAM,CAAC,OAAO,CACZ,oCAAoC,SAAS,CAAC,IAAI,CAChD,OAAO,CACR,8BAA8B,2BAA2B;aACvD,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CACH,OAAO,CACR,kFAAkF;YACnF,4BAA4B;YAC5B,0NAA0N,CAC7N,CAAC;KACH;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAtFD,kEAsFC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;AAAA,qCAAqC;AAErC,2CAAyD;AAGzD,SAAgB,0BAA0B,CACxC,MAA2B,EAC3B,MAAc;IAEd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IACrE,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;IAEvC,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;IAE/D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QACjC,MAAM,CAAC,OAAO,CACZ,oCAAoC,QAAQ,8BAA8B,kBAAkB;aACzF,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CAAC,OAAO,CAAC,uDAAuD,CACxE,CAAC;KACH;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AA7BD,gEA6BC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}

394
lib/codeql.js generated
View File

@@ -22,32 +22,26 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CODEQL_VERSION_CONFIG_FILES = exports.CODEQL_DEFAULT_ACTION_REPOSITORY = exports.CommandInvocationError = void 0;
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const toolcache = __importStar(require("@actions/tool-cache"));
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
const yaml = __importStar(require("js-yaml"));
const query_string_1 = __importDefault(require("query-string"));
const semver = __importStar(require("semver"));
const uuid_1 = require("uuid");
const actions_util_1 = require("./actions-util");
const api = __importStar(require("./api-client"));
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
const error_matcher_1 = require("./error-matcher");
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const toolcache = __importStar(require("./toolcache"));
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
const trap_caching_1 = require("./trap-caching");
const util = __importStar(require("./util"));
const util_1 = require("./util");
class CommandInvocationError extends Error {
constructor(cmd, args, exitCode, error, output) {
constructor(cmd, args, exitCode, error) {
super(`Failure invoking ${cmd} with arguments ${args}.\n
Exit code ${exitCode} and error was:\n
${error}`);
this.output = output;
}
}
exports.CommandInvocationError = CommandInvocationError;
@@ -57,26 +51,28 @@ exports.CommandInvocationError = CommandInvocationError;
*/
let cachedCodeQL = undefined;
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
exports.CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
/**
* The oldest version of CodeQL that the Action will run with. This should be
* at least three minor versions behind the current version and must include the
* CLI versions shipped with each supported version of GHES.
* at least three minor versions behind the current version. The version flags
* below can be used to conditionally enable certain features on versions newer
* than this. Please record the reason we cannot support an older version.
*
* The version flags below can be used to conditionally enable certain features
* on versions newer than this.
* Reason: Changes to how the tracing environment is set up.
*/
const CODEQL_MINIMUM_VERSION = "2.6.3";
const CODEQL_MINIMUM_VERSION = "2.3.1";
/**
* Versions of CodeQL that version-flag certain functionality in the Action.
* For convenience, please keep these in descending order. Once a version
* flag is older than the oldest supported version above, it may be removed.
*/
const CODEQL_VERSION_RAM_FINALIZE = "2.5.8";
const CODEQL_VERSION_DIAGNOSTICS = "2.5.6";
const CODEQL_VERSION_METRICS = "2.5.5";
const CODEQL_VERSION_GROUP_RULES = "2.5.5";
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
const CODEQL_VERSION_LUA_TRACER_CONFIG = "2.10.0";
exports.CODEQL_VERSION_CONFIG_FILES = "2.10.1";
const CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED = "2.10.4";
exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
/**
* This variable controls using the new style of tracing from the CodeQL
* CLI. In particular, with versions above this we will use both indirect
@@ -87,22 +83,6 @@ exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
* versions above that.
*/
exports.CODEQL_VERSION_NEW_TRACING = "2.7.0";
/**
* Versions 2.7.3+ of the CodeQL CLI support build tracing with glibc 2.34 on Linux. Versions before
* this cannot perform build tracing when running on the Actions `ubuntu-22.04` runner image.
*/
exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = "2.7.3";
/**
* Versions 2.9.0+ of the CodeQL CLI run machine learning models from a temporary directory, which
* resolves an issue on Windows where TensorFlow models are not correctly loaded due to the path of
* some of their files being greater than MAX_PATH (260 characters).
*/
exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = "2.9.0";
/**
* Previous versions had the option already, but were missing the
* --extractor-options-verbosity that we need.
*/
exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = "2.10.3";
function getCodeQLBundleName() {
let platform;
if (process.platform === "win32") {
@@ -120,25 +100,39 @@ function getCodeQLBundleName() {
return `codeql-bundle-${platform}.tar.gz`;
}
function getCodeQLActionRepository(logger) {
if (!util.isActions()) {
return CODEQL_DEFAULT_ACTION_REPOSITORY;
}
else {
return getActionsCodeQLActionRepository(logger);
}
}
exports.getCodeQLActionRepository = getCodeQLActionRepository;
function getActionsCodeQLActionRepository(logger) {
if (process.env["GITHUB_ACTION_REPOSITORY"] !== undefined) {
return process.env["GITHUB_ACTION_REPOSITORY"];
}
// The Actions Runner used with GitHub Enterprise Server 2.22 did not set the GITHUB_ACTION_REPOSITORY variable.
// This fallback logic can be removed after the end-of-support for 2.22 on 2021-09-23.
if ((0, actions_util_1.isRunningLocalAction)()) {
// This handles the case where the Action does not come from an Action repository,
// e.g. our integration tests which use the Action code from the current checkout.
// In these cases, the GITHUB_ACTION_REPOSITORY environment variable is not set.
logger.info("The CodeQL Action is checked out locally. Using the default CodeQL Action repository.");
return exports.CODEQL_DEFAULT_ACTION_REPOSITORY;
return CODEQL_DEFAULT_ACTION_REPOSITORY;
}
return util.getRequiredEnvParam("GITHUB_ACTION_REPOSITORY");
logger.info("GITHUB_ACTION_REPOSITORY environment variable was not set. Falling back to legacy method of finding the GitHub Action.");
const relativeScriptPathParts = (0, actions_util_1.getRelativeScriptPath)().split(path.sep);
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
}
exports.getCodeQLActionRepository = getCodeQLActionRepository;
async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
const codeQLActionRepository = getCodeQLActionRepository(logger);
const potentialDownloadSources = [
// This GitHub instance, and this Action.
[apiDetails.url, codeQLActionRepository],
// This GitHub instance, and the canonical Action.
[apiDetails.url, exports.CODEQL_DEFAULT_ACTION_REPOSITORY],
[apiDetails.url, CODEQL_DEFAULT_ACTION_REPOSITORY],
// GitHub.com, and the canonical Action.
[util.GITHUB_DOTCOM_URL, exports.CODEQL_DEFAULT_ACTION_REPOSITORY],
[util.GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
];
// We now filter out any duplicates.
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
@@ -149,14 +143,14 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
if (variant === util.GitHubVariant.GHAE) {
try {
const release = await api
.getApiClient()
.getApiClient(apiDetails)
.request("GET /enterprise/code-scanning/codeql-bundle/find/{tag}", {
tag: CODEQL_BUNDLE_VERSION,
});
const assetID = release.data.assets[codeQLBundleName];
if (assetID !== undefined) {
const download = await api
.getApiClient()
.getApiClient(apiDetails)
.request("GET /enterprise/code-scanning/codeql-bundle/download/{asset_id}", { asset_id: assetID });
const downloadURL = download.data.url;
logger.info(`Found CodeQL bundle at GitHub AE endpoint with URL ${downloadURL}.`);
@@ -174,12 +168,12 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
const [apiURL, repository] = downloadSource;
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
if (apiURL === util.GITHUB_DOTCOM_URL &&
repository === exports.CODEQL_DEFAULT_ACTION_REPOSITORY) {
repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
break;
}
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release = await api.getApiClient().repos.getReleaseByTag({
const release = await api.getApiClient(apiDetails).repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: CODEQL_BUNDLE_VERSION,
@@ -195,62 +189,37 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
logger.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
}
}
return `https://github.com/${exports.CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
}
/**
* Set up CodeQL CLI access.
*
* @param codeqlURL
* @param apiDetails
* @param tempDir
* @param variant
* @param features
* @param logger
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
* version requirement. Must be set to true outside tests.
* @returns a { CodeQL, toolsVersion } object.
*/
async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, bypassToolcache, logger, checkVersion) {
async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger, checkVersion) {
try {
const forceLatestReason =
// We use the special value of 'latest' to prioritize the version in the
// defaults over any pinned cached version.
codeqlURL === "latest"
? '"tools: latest" was requested'
: // If the user hasn't requested a particular CodeQL version, then bypass
// the toolcache when the appropriate feature is enabled. This
// allows us to quickly rollback a broken bundle that has made its way
// into the toolcache.
codeqlURL === undefined && bypassToolcache
? "a specific version of CodeQL was not requested and the bypass toolcache feature is enabled"
: undefined;
const forceLatest = forceLatestReason !== undefined;
const forceLatest = codeqlURL === "latest";
if (forceLatest) {
logger.debug(`Forcing the latest version of the CodeQL tools since ${forceLatestReason}.`);
codeqlURL = undefined;
}
let codeqlFolder;
let codeqlURLVersion;
if (codeqlURL && !codeqlURL.startsWith("http")) {
codeqlFolder = await toolcache.extractTar(codeqlURL);
codeqlFolder = await toolcache.extractTar(codeqlURL, tempDir, logger);
codeqlURLVersion = "local";
}
else {
codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
const codeqlURLSemVer = convertToSemVer(codeqlURLVersion, logger);
// If we find the specified version, we always use that.
codeqlFolder = toolcache.find("CodeQL", codeqlURLSemVer);
codeqlFolder = toolcache.find("CodeQL", codeqlURLSemVer, toolCacheDir, logger);
// If we don't find the requested version, in some cases we may allow a
// different version to save download time if the version hasn't been
// specified explicitly (in which case we always honor it).
if (!codeqlFolder && !codeqlURL && !forceLatest) {
const codeqlVersions = toolcache.findAllVersions("CodeQL");
if (codeqlVersions.length === 1 && (0, util_1.isGoodVersion)(codeqlVersions[0])) {
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0]);
const codeqlVersions = toolcache.findAllVersions("CodeQL", toolCacheDir, logger);
if (codeqlVersions.length === 1) {
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0], toolCacheDir, logger);
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
codeqlFolder = tmpCodeqlFolder;
codeqlURLVersion = codeqlVersions[0];
}
}
}
@@ -263,9 +232,7 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, bypassToolca
}
const parsedCodeQLURL = new URL(codeqlURL);
const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search);
const headers = {
accept: "application/octet-stream",
};
const headers = { accept: "application/octet-stream" };
// We only want to provide an authorization header if we are downloading
// from the same GitHub instance the Action is running on.
// This avoids leaking Enterprise tokens to dotcom.
@@ -279,12 +246,10 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, bypassToolca
logger.debug("Downloading CodeQL bundle without token.");
}
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`);
const dest = path.join(tempDir, (0, uuid_1.v4)());
const finalHeaders = Object.assign({ "User-Agent": "CodeQL Action" }, headers);
const codeqlPath = await toolcache.downloadTool(codeqlURL, dest, undefined, finalHeaders);
const codeqlPath = await toolcache.downloadTool(codeqlURL, tempDir, headers);
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLSemVer);
const codeqlExtracted = await toolcache.extractTar(codeqlPath, tempDir, logger);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLSemVer, toolCacheDir, logger);
}
}
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
@@ -363,7 +328,6 @@ function setCodeQL(partialCodeql) {
extractScannedLanguage: resolveFunction(partialCodeql, "extractScannedLanguage"),
finalizeDatabase: resolveFunction(partialCodeql, "finalizeDatabase"),
resolveLanguages: resolveFunction(partialCodeql, "resolveLanguages"),
betterResolveLanguages: resolveFunction(partialCodeql, "betterResolveLanguages"),
resolveQueries: resolveFunction(partialCodeql, "resolveQueries"),
packDownload: resolveFunction(partialCodeql, "packDownload"),
databaseCleanup: resolveFunction(partialCodeql, "databaseCleanup"),
@@ -394,30 +358,20 @@ exports.getCachedCodeQL = getCachedCodeQL;
* a non-existent placeholder codeql command, so tests that use this function
* should also stub the toolrunner.ToolRunner constructor.
*/
async function getCodeQLForTesting(cmd = "codeql-for-testing") {
return getCodeQLForCmd(cmd, false);
async function getCodeQLForTesting() {
return getCodeQLForCmd("codeql-for-testing", false);
}
exports.getCodeQLForTesting = getCodeQLForTesting;
/**
* Return a CodeQL object for CodeQL CLI access.
*
* @param cmd Path to CodeQL CLI
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
* version requirement. Must be set to true outside tests.
* @returns A new CodeQL object
*/
async function getCodeQLForCmd(cmd, checkVersion) {
let cachedVersion = undefined;
const codeql = {
getPath() {
return cmd;
},
async getVersion() {
let result = util.getCachedCodeQlVersion();
if (result === undefined) {
result = (await runTool(cmd, ["version", "--format=terse"])).trim();
util.cacheCodeQlVersion(result);
}
return result;
if (cachedVersion === undefined)
cachedVersion = runTool(cmd, ["version", "--format=terse"]);
return await cachedVersion;
},
async printVersion() {
await runTool(cmd, ["version", "--format=json"]);
@@ -451,7 +405,6 @@ async function getCodeQLForCmd(cmd, checkVersion) {
// action/runner has been implemented in `codeql database trace-command`
// _and_ is present in the latest supported CLI release.)
const envFile = path.resolve(databasePath, "working", "env.tmp");
try {
await runTool(cmd, [
"database",
"trace-command",
@@ -461,22 +414,6 @@ async function getCodeQLForCmd(cmd, checkVersion) {
tracerEnvJs,
envFile,
]);
}
catch (e) {
if (e instanceof CommandInvocationError &&
e.output.includes("undefined symbol: __libc_dlopen_mode, version GLIBC_PRIVATE") &&
process.platform === "linux" &&
!(await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_TRACING_GLIBC_2_34))) {
throw new util.UserError("The CodeQL CLI is incompatible with the version of glibc on your system. " +
`Please upgrade to CodeQL CLI version ${exports.CODEQL_VERSION_TRACING_GLIBC_2_34} or ` +
"later. If you cannot upgrade to a newer version of the CodeQL CLI, you can " +
`alternatively run your workflow on another runner image such as "ubuntu-20.04" ` +
"that has glibc 2.33 or earlier installed.");
}
else {
throw e;
}
}
return JSON.parse(fs.readFileSync(envFile, "utf-8"));
},
async databaseInit(databasePath, language, sourceRoot) {
@@ -489,33 +426,22 @@ async function getCodeQLForCmd(cmd, checkVersion) {
...getExtraOptionsFromEnv(["database", "init"]),
]);
},
async databaseInitCluster(config, sourceRoot, processName, featureEnablement) {
const extraArgs = config.languages.map((language) => `--language=${language}`);
if (config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l)).length > 0) {
async databaseInitCluster(databasePath, languages, sourceRoot, processName, processLevel) {
const extraArgs = languages.map((language) => `--language=${language}`);
if (languages.filter(languages_1.isTracedLanguage).length > 0) {
extraArgs.push("--begin-tracing");
extraArgs.push(...(await (0, trap_caching_1.getTrapCachingExtractorConfigArgs)(config)));
if (processName !== undefined) {
extraArgs.push(`--trace-process-name=${processName}`);
if (
// There's a bug in Lua tracing for Go on Windows in versions earlier than
// `CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED`, so don't use Lua tracing
// when tracing Go on Windows on these CodeQL versions.
(await util.codeQlVersionAbove(this, CODEQL_VERSION_LUA_TRACER_CONFIG)) &&
config.languages.includes(languages_1.Language.go) &&
(0, languages_1.isTracedLanguage)(languages_1.Language.go) &&
process.platform === "win32" &&
!(await util.codeQlVersionAbove(this, CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED))) {
extraArgs.push("--no-internal-use-lua-tracing");
}
else {
extraArgs.push(`--trace-process-level=${processLevel || 3}`);
}
const configLocation = await generateCodeScanningConfig(codeql, config, featureEnablement);
if (configLocation) {
extraArgs.push(`--codescanning-config=${configLocation}`);
}
await runTool(cmd, [
"database",
"init",
"--db-cluster",
config.dbLocation,
databasePath,
`--source-root=${sourceRoot}`,
...extraArgs,
...getExtraOptionsFromEnv(["database", "init"]),
@@ -523,9 +449,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
},
async runAutobuild(language) {
const cmdName = process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh";
// The autobuilder for Swift is located in the experimental/ directory.
const possibleExperimentalDir = language === languages_1.Language.swift ? "experimental" : "";
const autobuildCmd = path.join(path.dirname(cmd), possibleExperimentalDir, language, "tools", cmdName);
const autobuildCmd = path.join(path.dirname(cmd), language, "tools", cmdName);
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
// This is because of an issue with Azure pipelines timing out connections after 4 minutes
// and Maven not properly handling closed connections
@@ -537,23 +461,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
"-Dhttp.keepAlive=false",
"-Dmaven.wagon.http.pool=false",
].join(" ");
// On macOS, System Integrity Protection (SIP) typically interferes with
// CodeQL build tracing of protected binaries.
// The usual workaround is to prefix `$CODEQL_RUNNER` to build commands:
// `$CODEQL_RUNNER` (not to be confused with the deprecated CodeQL Runner tool)
// points to a simple wrapper binary included with the CLI, and the extra layer of
// process indirection helps the tracer bypass SIP.
// The above SIP workaround is *not* needed here.
// At the `autobuild` step in the Actions workflow, we assume the `init` step
// has successfully run, and will have exported `DYLD_INSERT_LIBRARIES`
// into the environment of subsequent steps, to activate the tracer.
// When `DYLD_INSERT_LIBRARIES` is set in the environment for a step,
// the Actions runtime introduces its own workaround for SIP
// (https://github.com/actions/runner/pull/416).
await runTool(autobuildCmd);
},
async extractScannedLanguage(config, language) {
const databasePath = util.getCodeQLDatabasePath(config, language);
async extractScannedLanguage(databasePath, language) {
// Get extractor location
let extractorPath = "";
await new toolrunner.ToolRunner(cmd, [
@@ -580,7 +490,6 @@ async function getCodeQLForCmd(cmd, checkVersion) {
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, [
"database",
"trace-command",
...(await (0, trap_caching_1.getTrapCachingExtractorConfigArgsForLang)(config, language)),
...getExtraOptionsFromEnv(["database", "trace-command"]),
databasePath,
"--",
@@ -593,19 +502,15 @@ async function getCodeQLForCmd(cmd, checkVersion) {
"finalize",
"--finalize-dataset",
threadsFlag,
memoryFlag,
...getExtraOptionsFromEnv(["database", "finalize"]),
databasePath,
];
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_RAM_FINALIZE))
args.push(memoryFlag);
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, args, error_matcher_1.errorMatchers);
},
async resolveLanguages() {
const codeqlArgs = [
"resolve",
"languages",
"--format=json",
...getExtraOptionsFromEnv(["resolve", "languages"]),
];
const codeqlArgs = ["resolve", "languages", "--format=json"];
const output = await runTool(cmd, codeqlArgs);
try {
return JSON.parse(output);
@@ -614,22 +519,6 @@ async function getCodeQLForCmd(cmd, checkVersion) {
throw new Error(`Unexpected output from codeql resolve languages: ${e}`);
}
},
async betterResolveLanguages() {
const codeqlArgs = [
"resolve",
"languages",
"--format=betterjson",
"--extractor-options-verbosity=4",
...getExtraOptionsFromEnv(["resolve", "languages"]),
];
const output = await runTool(cmd, codeqlArgs);
try {
return JSON.parse(output);
}
catch (e) {
throw new Error(`Unexpected output from codeql resolve languages with --format=betterjson: ${e}`);
}
},
async resolveQueries(queries, extraSearchPath) {
const codeqlArgs = [
"resolve",
@@ -663,40 +552,35 @@ async function getCodeQLForCmd(cmd, checkVersion) {
if (extraSearchPath !== undefined) {
codeqlArgs.push("--additional-packs", extraSearchPath);
}
if (querySuitePath) {
codeqlArgs.push(querySuitePath);
}
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, codeqlArgs, error_matcher_1.errorMatchers);
await runTool(cmd, codeqlArgs);
},
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId, featureEnablement) {
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId) {
const codeqlArgs = [
"database",
"interpret-results",
threadsFlag,
"--format=sarif-latest",
verbosityFlag,
"-v",
`--output=${sarifFile}`,
addSnippetsFlag,
"--print-diagnostics-summary",
"--print-metrics-summary",
"--sarif-group-rules-by-pack",
...getExtraOptionsFromEnv(["database", "interpret-results"]),
];
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_DIAGNOSTICS))
codeqlArgs.push("--print-diagnostics-summary");
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_METRICS))
codeqlArgs.push("--print-metrics-summary");
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_GROUP_RULES))
codeqlArgs.push("--sarif-group-rules-by-pack");
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_CUSTOM_QUERY_HELP))
codeqlArgs.push("--sarif-add-query-help");
if (automationDetailsId !== undefined) {
if (automationDetailsId !== undefined &&
(await util.codeQlVersionAbove(this, CODEQL_VERSION_SARIF_GROUP))) {
codeqlArgs.push("--sarif-category", automationDetailsId);
}
if (await featureEnablement.getValue(feature_flags_1.Feature.FileBaselineInformationEnabled, this)) {
codeqlArgs.push("--sarif-add-baseline-file-info");
}
codeqlArgs.push(databasePath);
if (querySuitePaths) {
codeqlArgs.push(...querySuitePaths);
}
codeqlArgs.push(databasePath, ...querySuitePaths);
// capture stdout, which contains analysis summaries
const returnState = await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, codeqlArgs, error_matcher_1.errorMatchers);
return returnState.stdout;
return await runTool(cmd, codeqlArgs);
},
async databasePrintBaseline(databasePath) {
const codeqlArgs = [
@@ -716,22 +600,14 @@ async function getCodeQLForCmd(cmd, checkVersion) {
* If no version is specified, then the latest version is
* downloaded. The check to determine what the latest version is is done
* each time this package is requested.
*
* Optionally, a `qlconfigFile` is included. If used, then this file
* is used to determine which registry each pack is downloaded from.
*/
async packDownload(packs, qlconfigFile) {
const qlconfigArg = qlconfigFile
? [`--qlconfig-file=${qlconfigFile}`]
: [];
async packDownload(packs) {
const codeqlArgs = [
"pack",
"download",
...qlconfigArg,
"--format=json",
"--resolve-query-specs",
...getExtraOptionsFromEnv(["pack", "download"]),
...packs,
...packs.map(packWithVersionToString),
];
const output = await runTool(cmd, codeqlArgs);
try {
@@ -757,36 +633,28 @@ async function getCodeQLForCmd(cmd, checkVersion) {
"cleanup",
databasePath,
`--mode=${cleanupLevel}`,
...getExtraOptionsFromEnv(["database", "cleanup"]),
];
await runTool(cmd, codeqlArgs);
},
async databaseBundle(databasePath, outputFilePath, databaseName) {
async databaseBundle(databasePath, outputFilePath) {
const args = [
"database",
"bundle",
databasePath,
`--output=${outputFilePath}`,
`--name=${databaseName}`,
...getExtraOptionsFromEnv(["database", "bundle"]),
];
await new toolrunner.ToolRunner(cmd, args).exec();
},
};
// To ensure that status reports include the CodeQL CLI version wherever
// possible, we want to call getVersion(), which populates the version value
// used by status reporting, at the earliest opportunity. But invoking
// getVersion() directly here breaks tests that only pretend to create a
// CodeQL object. So instead we rely on the assumption that all non-test
// callers would set checkVersion to true, and util.codeQlVersionAbove()
// would call getVersion(), so the CLI version would be cached as soon as the
// CodeQL object is created.
if (checkVersion &&
!(await util.codeQlVersionAbove(codeql, CODEQL_MINIMUM_VERSION))) {
throw new Error(`Expected a CodeQL CLI with version at least ${CODEQL_MINIMUM_VERSION} but got version ${await codeql.getVersion()}`);
}
return codeql;
}
function packWithVersionToString(pack) {
return pack.version ? `${pack.packName}@${pack.version}` : pack.packName;
}
/**
* Gets the options for `path` of `options` as an array of extra option strings.
*/
@@ -847,95 +715,17 @@ async function runTool(cmd, args = []) {
const exitCode = await new toolrunner.ToolRunner(cmd, args, {
listeners: {
stdout: (data) => {
output += data.toString("utf8");
output += data.toString();
},
stderr: (data) => {
let readStartIndex = 0;
// If the error is too large, then we only take the last 20,000 characters
if (data.length - maxErrorSize > 0) {
// Eg: if we have 20,000 the start index should be 2.
readStartIndex = data.length - maxErrorSize + 1;
}
error += data.toString("utf8", readStartIndex);
const toRead = Math.min(maxErrorSize - error.length, data.length);
error += data.toString("utf8", 0, toRead);
},
},
ignoreReturnCode: true,
}).exec();
if (exitCode !== 0)
throw new CommandInvocationError(cmd, args, exitCode, error, output);
throw new CommandInvocationError(cmd, args, exitCode, error);
return output;
}
/**
* If appropriate, generates a code scanning configuration that is to be used for a scan.
* If the configuration is not to be generated, returns undefined.
*
* @param codeql The CodeQL object to use.
* @param config The configuration to use.
* @returns the path to the generated user configuration file.
*/
async function generateCodeScanningConfig(codeql, config, featureEnablement) {
var _a;
if (!(await util.useCodeScanningConfigInCli(codeql, featureEnablement))) {
return;
}
const configLocation = path.resolve(config.tempDir, "user-config.yaml");
// make a copy so we can modify it
const augmentedConfig = cloneObject(config.originalUserInput);
// Inject the queries from the input
if (config.augmentationProperties.queriesInput) {
if (config.augmentationProperties.queriesInputCombines) {
augmentedConfig.queries = (augmentedConfig.queries || []).concat(config.augmentationProperties.queriesInput);
}
else {
augmentedConfig.queries = config.augmentationProperties.queriesInput;
}
}
if (((_a = augmentedConfig.queries) === null || _a === void 0 ? void 0 : _a.length) === 0) {
delete augmentedConfig.queries;
}
// Inject the packs from the input
if (config.augmentationProperties.packsInput) {
if (config.augmentationProperties.packsInputCombines) {
// At this point, we already know that this is a single-language analysis
if (Array.isArray(augmentedConfig.packs)) {
augmentedConfig.packs = (augmentedConfig.packs || []).concat(config.augmentationProperties.packsInput);
}
else if (!augmentedConfig.packs) {
augmentedConfig.packs = config.augmentationProperties.packsInput;
}
else {
// At this point, we know there is only one language.
// If there were more than one language, an error would already have been thrown.
const language = Object.keys(augmentedConfig.packs)[0];
augmentedConfig.packs[language] = augmentedConfig.packs[language].concat(config.augmentationProperties.packsInput);
}
}
else {
augmentedConfig.packs = config.augmentationProperties.packsInput;
}
}
if (Array.isArray(augmentedConfig.packs) && !augmentedConfig.packs.length) {
delete augmentedConfig.packs;
}
if (config.augmentationProperties.injectedMlQueries) {
// We need to inject the ML queries into the original user input before
// we pass this on to the CLI, to make sure these get run.
const packString = await util.getMlPoweredJsQueriesPack(codeql);
if (augmentedConfig.packs === undefined)
augmentedConfig.packs = [];
if (Array.isArray(augmentedConfig.packs)) {
augmentedConfig.packs.push(packString);
}
else {
if (!augmentedConfig.packs.javascript)
augmentedConfig.packs["javascript"] = [];
augmentedConfig.packs["javascript"].push(packString);
}
}
fs.writeFileSync(configLocation, yaml.dump(augmentedConfig));
return configLocation;
}
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
//# sourceMappingURL=codeql.js.map

File diff suppressed because one or more lines are too long

469
lib/codeql.test.js generated
View File

@@ -22,22 +22,14 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.stubToolRunnerConstructor = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const toolcache = __importStar(require("@actions/tool-cache"));
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
const ava_1 = __importDefault(require("ava"));
const del_1 = __importDefault(require("del"));
const yaml = __importStar(require("js-yaml"));
const nock_1 = __importDefault(require("nock"));
const sinon = __importStar(require("sinon"));
const actionsUtil = __importStar(require("./actions-util"));
const codeql = __importStar(require("./codeql"));
const defaults = __importStar(require("./defaults.json"));
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
@@ -46,102 +38,54 @@ const util_1 = require("./util");
const sampleApiDetails = {
auth: "token",
url: "https://github.com",
apiURL: undefined,
registriesAuthTokens: undefined,
};
const sampleGHAEApiDetails = {
auth: "token",
url: "https://example.githubenterprise.com",
apiURL: undefined,
registriesAuthTokens: undefined,
};
let stubConfig;
ava_1.default.beforeEach(() => {
(0, util_1.initializeEnvironment)("1.2.3");
stubConfig = {
languages: [languages_1.Language.cpp],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: "",
codeQLCmd: "",
gitHubVersion: {
type: util.GitHubVariant.DOTCOM,
},
dbLocation: "",
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
augmentationProperties: {
injectedMlQueries: false,
packsInputCombines: false,
queriesInputCombines: false,
},
trapCaches: {},
trapCacheDownloadTime: 0,
};
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
});
async function mockApiAndSetupCodeQL({ apiDetails, bypassToolcache, isPinned, tmpDir, toolsInput, version, }) {
var _a;
const platform = process.platform === "win32"
? "win64"
: process.platform === "linux"
? "linux64"
: "osx64";
const baseUrl = (_a = apiDetails === null || apiDetails === void 0 ? void 0 : apiDetails.url) !== null && _a !== void 0 ? _a : "https://example.com";
const relativeUrl = apiDetails
? `/github/codeql-action/releases/download/${version}/codeql-bundle-${platform}.tar.gz`
: `/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
(0, nock_1.default)(baseUrl)
.get(relativeUrl)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle${isPinned ? "-pinned" : ""}.tar.gz`));
return await codeql.setupCodeQL(toolsInput ? toolsInput.input : `${baseUrl}${relativeUrl}`, apiDetails !== null && apiDetails !== void 0 ? apiDetails : sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, !!bypassToolcache, (0, logging_1.getRunnerLogger)(true), false);
}
(0, ava_1.default)("download codeql bundle cache", async (t) => {
await util.withTmpDir(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const versions = ["20200601", "20200610"];
for (let i = 0; i < versions.length; i++) {
const version = versions[i];
const codeQLConfig = await mockApiAndSetupCodeQL({ version, tmpDir });
(0, nock_1.default)("https://example.com")
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
t.deepEqual(codeQLConfig.toolsVersion, version);
}
t.is(toolcache.findAllVersions("CodeQL").length, 2);
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
(0, ava_1.default)("download codeql bundle cache explicitly requested with pinned different version cached", async (t) => {
await util.withTmpDir(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const pinnedCodeQLConfig = await mockApiAndSetupCodeQL({
version: "20200601",
isPinned: true,
tmpDir,
});
(0, nock_1.default)("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
t.deepEqual(pinnedCodeQLConfig.toolsVersion, "20200601");
const unpinnedCodeQLConfig = await mockApiAndSetupCodeQL({
version: "20200610",
tmpDir,
});
(0, nock_1.default)("https://example.com")
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
t.deepEqual(unpinnedCodeQLConfig.toolsVersion, "20200610");
});
});
(0, ava_1.default)("don't download codeql bundle cache with pinned different version cached", async (t) => {
await util.withTmpDir(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const pinnedCodeQLConfig = await mockApiAndSetupCodeQL({
version: "20200601",
isPinned: true,
tmpDir,
});
(0, nock_1.default)("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
t.deepEqual(pinnedCodeQLConfig.toolsVersion, "20200601");
const codeQLConfig = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, (0, logging_1.getRunnerLogger)(true), false);
t.deepEqual(codeQLConfig.toolsVersion, "0.0.0-20200601");
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1);
});
@@ -149,19 +93,20 @@ async function mockApiAndSetupCodeQL({ apiDetails, bypassToolcache, isPinned, tm
(0, ava_1.default)("download codeql bundle cache with different version cached (not pinned)", async (t) => {
await util.withTmpDir(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const cachedCodeQLConfig = await mockApiAndSetupCodeQL({
version: "20200601",
tmpDir,
});
(0, nock_1.default)("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
t.deepEqual(cachedCodeQLConfig.toolsVersion, "20200601");
const codeQLConfig = await mockApiAndSetupCodeQL({
version: defaults.bundleVersion,
tmpDir,
apiDetails: sampleApiDetails,
toolsInput: { input: undefined },
});
t.deepEqual(codeQLConfig.toolsVersion, defaults.bundleVersion.replace("codeql-bundle-", ""));
const platform = process.platform === "win32"
? "win64"
: process.platform === "linux"
? "linux64"
: "osx64";
(0, nock_1.default)("https://github.com")
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
@@ -169,56 +114,24 @@ async function mockApiAndSetupCodeQL({ apiDetails, bypassToolcache, isPinned, tm
(0, ava_1.default)('download codeql bundle cache with pinned different version cached if "latest" tools specified', async (t) => {
await util.withTmpDir(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const pinnedCodeQLConfig = await mockApiAndSetupCodeQL({
version: "20200601",
isPinned: true,
tmpDir,
});
(0, nock_1.default)("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
t.deepEqual(pinnedCodeQLConfig.toolsVersion, "20200601");
const latestCodeQLConfig = await mockApiAndSetupCodeQL({
version: defaults.bundleVersion,
apiDetails: sampleApiDetails,
toolsInput: { input: "latest" },
tmpDir,
});
t.deepEqual(latestCodeQLConfig.toolsVersion, defaults.bundleVersion.replace("codeql-bundle-", ""));
const platform = process.platform === "win32"
? "win64"
: process.platform === "linux"
? "linux64"
: "osx64";
(0, nock_1.default)("https://github.com")
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
const TOOLCACHE_BYPASS_TEST_CASES = [
[true, undefined, true],
[false, undefined, false],
[
true,
"https://github.com/github/codeql-action/releases/download/codeql-bundle-20200601/codeql-bundle.tar.gz",
false,
],
];
for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCACHE_BYPASS_TEST_CASES) {
(0, ava_1.default)(`download codeql bundle ${shouldToolcacheBeBypassed ? "bypasses" : "does not bypass"} toolcache when feature ${isFeatureEnabled ? "enabled" : "disabled"} and tools: ${toolsInput} passed`, async (t) => {
await util.withTmpDir(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
await mockApiAndSetupCodeQL({
version: "codeql-bundle-20200601",
apiDetails: sampleApiDetails,
isPinned: true,
tmpDir,
});
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
await mockApiAndSetupCodeQL({
version: defaults.bundleVersion,
apiDetails: sampleApiDetails,
bypassToolcache: isFeatureEnabled,
toolsInput: { input: toolsInput },
tmpDir,
});
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, shouldToolcacheBeBypassed ? 2 : 1);
});
});
}
(0, ava_1.default)("download codeql bundle from github ae endpoint", async (t) => {
await util.withTmpDir(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
@@ -242,7 +155,7 @@ for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCAC
(0, nock_1.default)("https://example.githubenterprise.com")
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, util.GitHubVariant.GHAE, false, (0, logging_1.getRunnerLogger)(true), false);
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, tmpDir, util.GitHubVariant.GHAE, (0, logging_1.getRunnerLogger)(true), false);
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1);
});
@@ -293,14 +206,15 @@ for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCAC
});
(0, ava_1.default)("getCodeQLActionRepository", (t) => {
const logger = (0, logging_1.getRunnerLogger)(true);
(0, util_1.initializeEnvironment)("1.2.3");
(0, util_1.initializeEnvironment)(util_1.Mode.runner, "1.2.3");
const repoActions = codeql.getCodeQLActionRepository(logger);
t.deepEqual(repoActions, "github/codeql-action");
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
// isRunningLocalAction() === true
delete process.env["GITHUB_ACTION_REPOSITORY"];
process.env["RUNNER_TEMP"] = path.dirname(__dirname);
const repoLocalRunner = codeql.getCodeQLActionRepository(logger);
t.deepEqual(repoLocalRunner, "github/codeql-action");
// isRunningLocalAction() === false
sinon.stub(actionsUtil, "isRunningLocalAction").returns(false);
process.env["GITHUB_ACTION_REPOSITORY"] = "xxx/yyy";
const repoEnv = codeql.getCodeQLActionRepository(logger);
t.deepEqual(repoEnv, "xxx/yyy");
@@ -309,292 +223,16 @@ for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCAC
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
// safeWhich throws because of the test CodeQL object.
sinon.stub(safeWhich, "safeWhich").resolves("");
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
});
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
// safeWhich throws because of the test CodeQL object.
sinon.stub(safeWhich, "safeWhich").resolves("");
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
});
(0, ava_1.default)("databaseInitCluster() without injected codescanning config", async (t) => {
await util.withTmpDir(async (tempDir) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves("2.8.1");
// safeWhich throws because of the test CodeQL object.
sinon.stub(safeWhich, "safeWhich").resolves("");
const thisStubConfig = {
...stubConfig,
tempDir,
augmentationProperties: {
injectedMlQueries: false,
queriesInputCombines: false,
packsInputCombines: false,
},
};
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
const args = runnerConstructorStub.firstCall.args[1];
// should NOT have used an config file
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
t.falsy(configArg, "Should have injected a codescanning config");
});
});
// Test macro for ensuring different variants of injected augmented configurations
const injectedConfigMacro = ava_1.default.macro({
exec: async (t, augmentationProperties, configOverride, expectedConfig) => {
await util.withTmpDir(async (tempDir) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon
.stub(codeqlObject, "getVersion")
.resolves(codeql.CODEQL_VERSION_CONFIG_FILES);
const thisStubConfig = {
...stubConfig,
...configOverride,
tempDir,
augmentationProperties,
};
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), (0, logging_1.getRunnerLogger)(true));
const args = runnerConstructorStub.firstCall.args[1];
// should have used an config file
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
t.truthy(configArg, "Should have injected a codescanning config");
const configFile = configArg.split("=")[1];
const augmentedConfig = yaml.load(fs.readFileSync(configFile, "utf8"));
t.deepEqual(augmentedConfig, expectedConfig);
await (0, del_1.default)(configFile, { force: true });
});
},
title: (providedTitle = "") => `databaseInitCluster() injected config: ${providedTitle}`,
});
(0, ava_1.default)("basic", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: false,
packsInputCombines: false,
}, {}, {});
(0, ava_1.default)("injected ML queries", injectedConfigMacro, {
injectedMlQueries: true,
queriesInputCombines: false,
packsInputCombines: false,
}, {}, {
packs: ["codeql/javascript-experimental-atm-queries@~0.3.0"],
});
(0, ava_1.default)("injected ML queries with existing packs", injectedConfigMacro, {
injectedMlQueries: true,
queriesInputCombines: false,
packsInputCombines: false,
}, {
originalUserInput: {
packs: { javascript: ["codeql/something-else"] },
},
}, {
packs: {
javascript: [
"codeql/something-else",
"codeql/javascript-experimental-atm-queries@~0.3.0",
],
},
});
(0, ava_1.default)("injected ML queries with existing packs of different language", injectedConfigMacro, {
injectedMlQueries: true,
queriesInputCombines: false,
packsInputCombines: false,
}, {
originalUserInput: {
packs: { cpp: ["codeql/something-else"] },
},
}, {
packs: {
cpp: ["codeql/something-else"],
javascript: ["codeql/javascript-experimental-atm-queries@~0.3.0"],
},
});
(0, ava_1.default)("injected packs from input", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: false,
packsInputCombines: false,
packsInput: ["xxx", "yyy"],
}, {}, {
packs: ["xxx", "yyy"],
});
(0, ava_1.default)("injected packs from input with existing packs combines", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: false,
packsInputCombines: true,
packsInput: ["xxx", "yyy"],
}, {
originalUserInput: {
packs: {
cpp: ["codeql/something-else"],
},
},
}, {
packs: {
cpp: ["codeql/something-else", "xxx", "yyy"],
},
});
(0, ava_1.default)("injected packs from input with existing packs overrides", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: false,
packsInputCombines: false,
packsInput: ["xxx", "yyy"],
}, {
originalUserInput: {
packs: {
cpp: ["codeql/something-else"],
},
},
}, {
packs: ["xxx", "yyy"],
});
(0, ava_1.default)("injected packs from input with existing packs overrides and ML model inject", injectedConfigMacro, {
injectedMlQueries: true,
queriesInputCombines: false,
packsInputCombines: false,
packsInput: ["xxx", "yyy"],
}, {
originalUserInput: {
packs: {
cpp: ["codeql/something-else"],
},
},
}, {
packs: ["xxx", "yyy", "codeql/javascript-experimental-atm-queries@~0.3.0"],
});
// similar, but with queries
(0, ava_1.default)("injected queries from input", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: false,
packsInputCombines: false,
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
}, {}, {
queries: [
{
uses: "xxx",
},
{
uses: "yyy",
},
],
});
(0, ava_1.default)("injected queries from input overrides", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: false,
packsInputCombines: false,
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
}, {
originalUserInput: {
queries: [{ uses: "zzz" }],
},
}, {
queries: [
{
uses: "xxx",
},
{
uses: "yyy",
},
],
});
(0, ava_1.default)("injected queries from input combines", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: true,
packsInputCombines: false,
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
}, {
originalUserInput: {
queries: [{ uses: "zzz" }],
},
}, {
queries: [
{
uses: "zzz",
},
{
uses: "xxx",
},
{
uses: "yyy",
},
],
});
(0, ava_1.default)("injected queries from input combines 2", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: true,
packsInputCombines: true,
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
}, {}, {
queries: [
{
uses: "xxx",
},
{
uses: "yyy",
},
],
});
(0, ava_1.default)("injected queries and packs, but empty", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: true,
packsInputCombines: true,
queriesInput: [],
packsInput: [],
}, {
originalUserInput: {
packs: [],
queries: [],
},
}, {});
(0, ava_1.default)("does not use injected config", async (t) => {
const origCODEQL_PASS_CONFIG_TO_CLI = process.env.CODEQL_PASS_CONFIG_TO_CLI;
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = "false";
try {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon
.stub(codeqlObject, "getVersion")
.resolves(codeql.CODEQL_VERSION_CONFIG_FILES);
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
const args = runnerConstructorStub.firstCall.args[1];
// should have used an config file
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
t.falsy(configArg, "Should NOT have injected a codescanning config");
}
finally {
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = origCODEQL_PASS_CONFIG_TO_CLI;
}
});
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-baseline-file-info when feature enabled", async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
// The version of CodeQL is checked separately to determine feature enablement, and does not
// otherwise impact this test, so set it to 0.0.0.
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
// safeWhich throws because of the test CodeQL object.
sinon.stub(safeWhich, "safeWhich").resolves("");
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.FileBaselineInformationEnabled]));
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info should be present, but it is absent");
});
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-baseline-file-info if feature disabled", async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
// The version of CodeQL is checked upstream to determine feature enablement, so it does not
// affect this test.
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
// safeWhich throws because of the test CodeQL object.
sinon.stub(safeWhich, "safeWhich").resolves("");
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info must be absent, but it is present");
});
function stubToolRunnerConstructor() {
const runnerObjectStub = sinon.createStubInstance(toolrunner.ToolRunner);
runnerObjectStub.exec.resolves(0);
@@ -602,5 +240,4 @@ function stubToolRunnerConstructor() {
runnerConstructorStub.returns(runnerObjectStub);
return runnerConstructorStub;
}
exports.stubToolRunnerConstructor = stubToolRunnerConstructor;
//# sourceMappingURL=codeql.test.js.map

File diff suppressed because one or more lines are too long

495
lib/config-utils.js generated
View File

@@ -19,20 +19,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.downloadPacks = exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePackSpecification = exports.prettyPrintPack = exports.parsePacksSpecification = exports.parsePacksFromConfig = exports.calculateAugmentation = exports.getDefaultConfig = exports.getRawLanguages = exports.getLanguages = exports.getLanguagesInRepo = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesMissingUses = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = exports.defaultAugmentationProperties = void 0;
exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.parsePacksFromConfig = exports.getDefaultConfig = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPacksRequireLanguage = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
// We need to import `performance` on Node 12
const perf_hooks_1 = require("perf_hooks");
const yaml = __importStar(require("js-yaml"));
const semver = __importStar(require("semver"));
const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
const externalQueries = __importStar(require("./external-queries"));
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const trap_caching_1 = require("./trap-caching");
const util_1 = require("./util");
// Property names from the user-supplied config file.
const NAME_PROPERTY = "name";
const DISABLE_DEFAULT_QUERIES_PROPERTY = "disable-default-queries";
@@ -41,17 +35,6 @@ const QUERIES_USES_PROPERTY = "uses";
const PATHS_IGNORE_PROPERTY = "paths-ignore";
const PATHS_PROPERTY = "paths";
const PACKS_PROPERTY = "packs";
/**
* The default, empty augmentation properties. This is most useeful
* for tests.
*/
exports.defaultAugmentationProperties = {
queriesInputCombines: false,
packsInputCombines: false,
injectedMlQueries: false,
packsInput: undefined,
queriesInput: undefined,
};
/**
* A list of queries from https://github.com/github/codeql that
* we don't want to run. Disabling them here is a quicker alternative to
@@ -132,38 +115,14 @@ const builtinSuites = ["security-extended", "security-and-quality"];
/**
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
* Throws an error if suiteName is not a valid builtin suite.
* May inject ML queries, and the return value will declare if this was done.
*/
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureEnablement, configFile) {
var _a;
let injectedMlQueries = false;
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, suiteName, configFile) {
const found = builtinSuites.find((suite) => suite === suiteName);
if (!found) {
throw new Error(getQueryUsesInvalid(configFile, suiteName));
}
// If we're running the JavaScript security-extended analysis (or a superset of it), the repo is
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
// pack, then add the ML-powered query pack so that we run ML-powered queries.
if (
// Only run ML-powered queries on Windows if we have a CLI that supports it.
(process.platform !== "win32" ||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS))) &&
languages.includes("javascript") &&
(found === "security-extended" || found === "security-and-quality") &&
!((_a = packs.javascript) === null || _a === void 0 ? void 0 : _a.some(isMlPoweredJsQueriesPack)) &&
(await featureEnablement.getValue(feature_flags_1.Feature.MlPoweredQueriesEnabled, codeQL))) {
if (!packs.javascript) {
packs.javascript = [];
}
packs.javascript.push(await (0, util_1.getMlPoweredJsQueriesPack)(codeQL));
injectedMlQueries = true;
}
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined);
return injectedMlQueries;
}
function isMlPoweredJsQueriesPack(pack) {
return parsePacksSpecification(pack).name === util_1.ML_POWERED_JS_QUERIES_PACK_NAME;
}
/**
* Retrieve the set of queries at localQueryPath and add them to resultMap.
@@ -220,13 +179,8 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
* parsing the 'uses' actions in the workflow file. So it can handle
* local paths starting with './', or references to remote repos, or
* a finite set of hardcoded terms for builtin suites.
*
* This may inject ML queries into the packs to use, and the return value will
* declare if this was done.
*
* @returns whether or not we injected ML queries into the packs
*/
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureEnablement, logger, configFile) {
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, workspacePath, apiDetails, logger, configFile) {
queryUses = queryUses.trim();
if (queryUses === "") {
throw new Error(getQueryUsesInvalid(configFile));
@@ -234,15 +188,15 @@ async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, te
// Check for the local path case before we start trying to parse the repository name
if (queryUses.startsWith("./")) {
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), workspacePath, configFile);
return false;
return;
}
// Check for one of the builtin suites
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
return await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureEnablement, configFile);
await addBuiltinSuiteQueries(languages, codeQL, resultMap, queryUses, configFile);
return;
}
// Otherwise, must be a reference to another repo
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
return false;
}
// Regex validating stars in paths or paths-ignore entries.
// The intention is to only allow ** to appear when immediately
@@ -304,10 +258,6 @@ function getQueriesInvalid(configFile) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, "must be an array");
}
exports.getQueriesInvalid = getQueriesInvalid;
function getQueriesMissingUses(configFile) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, "must be an array, with each entry having a 'uses' property");
}
exports.getQueriesMissingUses = getQueriesMissingUses;
function getQueryUsesInvalid(configFile, queryUses) {
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `must be a built-in suite (${builtinSuites.join(" or ")}), a relative path, or be of the form "owner/repo[/path]@ref"${queryUses !== undefined ? `\n Found: ${queryUses}` : ""}`);
}
@@ -321,8 +271,9 @@ function getPathsInvalid(configFile) {
}
exports.getPathsInvalid = getPathsInvalid;
function getPacksRequireLanguage(lang, configFile) {
return getConfigFilePropertyError(configFile, PACKS_PROPERTY, `has "${lang}", but it is not a valid language.`);
return getConfigFilePropertyError(configFile, PACKS_PROPERTY, `has "${lang}", but it is not one of the languages to analyze`);
}
exports.getPacksRequireLanguage = getPacksRequireLanguage;
function getPacksInvalidSplit(configFile) {
return getConfigFilePropertyError(configFile, PACKS_PROPERTY, "must split packages by language");
}
@@ -385,12 +336,11 @@ function getUnknownLanguagesError(languages) {
}
exports.getUnknownLanguagesError = getUnknownLanguagesError;
/**
* Gets the set of languages in the current repository that are
* scannable by CodeQL.
* Gets the set of languages in the current repository
*/
async function getLanguagesInRepo(repository, logger) {
async function getLanguagesInRepo(repository, apiDetails, logger) {
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
const response = await api.getApiClient().repos.listLanguages({
const response = await api.getApiClient(apiDetails).repos.listLanguages({
owner: repository.owner,
repo: repository.repo,
});
@@ -408,7 +358,6 @@ async function getLanguagesInRepo(repository, logger) {
}
return [...languages];
}
exports.getLanguagesInRepo = getLanguagesInRepo;
/**
* Get the languages to analyse.
*
@@ -419,17 +368,19 @@ exports.getLanguagesInRepo = getLanguagesInRepo;
* If no languages could be detected from either the workflow or the repository
* then throw an error.
*/
async function getLanguages(codeQL, languagesInput, repository, logger) {
// Obtain languages without filtering them.
const { rawLanguages, autodetected } = await getRawLanguages(languagesInput, repository, logger);
let languages = rawLanguages.map(languages_1.resolveAlias);
if (autodetected) {
async function getLanguages(codeQL, languagesInput, repository, apiDetails, logger) {
// Obtain from action input 'languages' if set
let languages = (languagesInput || "")
.split(",")
.map((x) => x.trim())
.filter((x) => x.length > 0);
logger.info(`Languages from configuration: ${JSON.stringify(languages)}`);
if (languages.length === 0) {
// Obtain languages as all languages in the repo that can be analysed
languages = await getLanguagesInRepo(repository, apiDetails, logger);
const availableLanguages = await codeQL.resolveLanguages();
languages = languages.filter((value) => value in availableLanguages);
logger.info(`Automatically detected languages: ${languages.join(", ")}`);
}
else {
logger.info(`Languages from configuration: ${languages.join(", ")}`);
logger.info(`Automatically detected languages: ${JSON.stringify(languages)}`);
}
// If the languages parameter was not given and no languages were
// detected then fail here as this is a workflow configuration error.
@@ -440,61 +391,26 @@ async function getLanguages(codeQL, languagesInput, repository, logger) {
const parsedLanguages = [];
const unknownLanguages = [];
for (const language of languages) {
// We know this is not an alias since we resolved it above.
const parsedLanguage = (0, languages_1.parseLanguage)(language);
if (parsedLanguage === undefined) {
unknownLanguages.push(language);
}
else if (!parsedLanguages.includes(parsedLanguage)) {
else if (parsedLanguages.indexOf(parsedLanguage) === -1) {
parsedLanguages.push(parsedLanguage);
}
}
// Any unknown languages here would have come directly from the input
// since we filter unknown languages coming from the GitHub API.
if (unknownLanguages.length > 0) {
throw new Error(getUnknownLanguagesError(unknownLanguages));
}
return parsedLanguages;
}
exports.getLanguages = getLanguages;
/**
* Gets the set of languages in the current repository without checking to
* see if these languages are actually supported by CodeQL.
*
* @param languagesInput The languages from the workflow input
* @param repository the owner/name of the repository
* @param logger a logger
* @returns A tuple containing a list of languages in this repository that might be
* analyzable and whether or not this list was determined automatically.
*/
async function getRawLanguages(languagesInput, repository, logger) {
// Obtain from action input 'languages' if set
let rawLanguages = (languagesInput || "")
.split(",")
.map((x) => x.trim().toLowerCase())
.filter((x) => x.length > 0);
let autodetected;
if (rawLanguages.length) {
autodetected = false;
}
else {
autodetected = true;
// Obtain all languages in the repo that can be analysed
rawLanguages = (await getLanguagesInRepo(repository, logger));
}
return { rawLanguages, autodetected };
}
exports.getRawLanguages = getRawLanguages;
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger) {
let injectedMlQueries = false;
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, workspacePath, apiDetails, logger) {
queriesInput = queriesInput.trim();
// "+" means "don't override config file" - see shouldAddConfigFileQueries
queriesInput = queriesInput.replace(/^\+/, "");
for (const query of queriesInput.split(",")) {
const didInject = await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureEnablement, logger);
injectedMlQueries = injectedMlQueries || didInject;
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, workspacePath, apiDetails, logger);
}
return injectedMlQueries;
}
// Returns true if either no queries were provided in the workflow.
// or if the queries in the workflow were provided in "additive" mode,
@@ -502,15 +418,16 @@ async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, r
// should instead be added in addition
function shouldAddConfigFileQueries(queriesInput) {
if (queriesInput) {
return queriesInput.trimStart().slice(0, 1) === "+";
return queriesInput.trimStart().substr(0, 1) === "+";
}
return true;
}
/**
* Get the default config for when the user has not supplied one.
*/
async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
const languages = await getLanguages(codeQL, languagesInput, repository, logger);
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
var _a;
const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger);
const queries = {};
for (const language of languages) {
queries[language] = {
@@ -519,17 +436,10 @@ async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput,
};
}
await addDefaultQueries(codeQL, languages, queries);
const augmentationProperties = calculateAugmentation(rawPacksInput, rawQueriesInput, languages);
const packs = augmentationProperties.packsInput
? {
[languages[0]]: augmentationProperties.packsInput,
if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger);
}
: {};
if (rawQueriesInput) {
augmentationProperties.injectedMlQueries =
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger);
}
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger);
const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
return {
languages,
queries,
@@ -538,32 +448,18 @@ async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput,
packs,
originalUserInput: {},
tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(),
gitHubVersion,
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
debugMode,
debugArtifactName,
debugDatabaseName,
augmentationProperties,
trapCaches,
trapCacheDownloadTime,
};
}
exports.getDefaultConfig = getDefaultConfig;
async function downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger) {
let trapCaches = {};
let trapCacheDownloadTime = 0;
if (trapCachingEnabled) {
const start = perf_hooks_1.performance.now();
trapCaches = await (0, trap_caching_1.downloadTrapCaches)(codeQL, languages, logger);
trapCacheDownloadTime = perf_hooks_1.performance.now() - start;
}
return { trapCaches, trapCacheDownloadTime };
}
/**
* Load the config from the given file.
*/
async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
var _a;
let parsedYAML;
if (isLocal(configFile)) {
@@ -584,7 +480,7 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
throw new Error(getNameInvalid(configFile));
}
}
const languages = await getLanguages(codeQL, languagesInput, repository, logger);
const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger);
const queries = {};
for (const language of languages) {
queries[language] = {
@@ -604,27 +500,25 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
if (!disableDefaultQueries) {
await addDefaultQueries(codeQL, languages, queries);
}
const augmentationProperties = calculateAugmentation(rawPacksInput, rawQueriesInput, languages);
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, rawPacksInput, augmentationProperties.packsInputCombines, languages, configFile, logger);
// If queries were provided using `with` in the action configuration,
// they should take precedence over the queries in the config file
// unless they're prefixed with "+", in which case they supplement those
// in the config file.
if (rawQueriesInput) {
augmentationProperties.injectedMlQueries =
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger);
if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger);
}
if (shouldAddConfigFileQueries(rawQueriesInput) &&
if (shouldAddConfigFileQueries(queriesInput) &&
QUERIES_PROPERTY in parsedYAML) {
const queriesArr = parsedYAML[QUERIES_PROPERTY];
if (!Array.isArray(queriesArr)) {
throw new Error(getQueriesInvalid(configFile));
}
for (const query of queriesArr) {
if (typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueriesMissingUses(configFile));
if (!(QUERIES_USES_PROPERTY in query) ||
typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueryUsesInvalid(configFile));
}
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, featureEnablement, logger, configFile);
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, logger, configFile);
}
}
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
@@ -649,7 +543,7 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
}
}
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger);
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
return {
languages,
queries,
@@ -658,62 +552,13 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
packs,
originalUserInput: parsedYAML,
tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(),
gitHubVersion,
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
debugMode,
debugArtifactName,
debugDatabaseName,
augmentationProperties,
trapCaches,
trapCacheDownloadTime,
};
}
/**
* Calculates how the codeql config file needs to be augmented before passing
* it to the CLI. The reason this is necessary is the codeql-action can be called
* with extra inputs from the workflow. These inputs are not part of the config
* and the CLI does not know about these inputs so we need to inject them into
* the config file sent to the CLI.
*
* @param rawPacksInput The packs input from the action configuration.
* @param rawQueriesInput The queries input from the action configuration.
* @param languages The languages that the config file is for. If the packs input
* is non-empty, then there must be exactly one language. Otherwise, an
* error is thrown.
*
* @returns The properties that need to be augmented in the config file.
*
* @throws An error if the packs input is non-empty and the languages input does
* not have exactly one language.
*/
// exported for testing.
function calculateAugmentation(rawPacksInput, rawQueriesInput, languages) {
const packsInputCombines = shouldCombine(rawPacksInput);
const packsInput = parsePacksFromInput(rawPacksInput, languages, packsInputCombines);
const queriesInputCombines = shouldCombine(rawQueriesInput);
const queriesInput = parseQueriesFromInput(rawQueriesInput, queriesInputCombines);
return {
injectedMlQueries: false,
packsInputCombines,
packsInput: packsInput === null || packsInput === void 0 ? void 0 : packsInput[languages[0]],
queriesInput,
queriesInputCombines,
};
}
exports.calculateAugmentation = calculateAugmentation;
function parseQueriesFromInput(rawQueriesInput, queriesInputCombines) {
if (!rawQueriesInput) {
return undefined;
}
const trimmedInput = queriesInputCombines
? rawQueriesInput.trim().slice(1).trim()
: rawQueriesInput === null || rawQueriesInput === void 0 ? void 0 : rawQueriesInput.trim();
if (queriesInputCombines && trimmedInput.length === 0) {
throw new Error(getConfigFilePropertyError(undefined, "queries", "A '+' was used in the 'queries' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs."));
}
return trimmedInput.split(",").map((query) => ({ uses: query.trim() }));
}
/**
* Pack names must be in the form of `scope/name`, with only alpha-numeric characters,
* and `-` allowed as long as not the first or last char.
@@ -725,7 +570,7 @@ const PACK_IDENTIFIER_PATTERN = (function () {
return new RegExp(`^${component}/${component}$`);
})();
// Exported for testing
function parsePacksFromConfig(packsByLanguage, languages, configFile, logger) {
function parsePacksFromConfig(packsByLanguage, languages, configFile) {
const packs = {};
if (Array.isArray(packsByLanguage)) {
if (languages.length === 1) {
@@ -745,23 +590,18 @@ function parsePacksFromConfig(packsByLanguage, languages, configFile, logger) {
throw new Error(getPacksInvalid(configFile));
}
if (!languages.includes(lang)) {
// This particular language is not being analyzed in this run.
if (languages_1.Language[lang]) {
logger.info(`Ignoring packs for ${lang} since this language is not being analyzed in this run.`);
continue;
throw new Error(getPacksRequireLanguage(lang, configFile));
}
else {
// This language is invalid, probably a misspelling
throw new Error(getPacksRequireLanguage(configFile, lang));
packs[lang] = [];
for (const packStr of packsArr) {
packs[lang].push(toPackWithVersion(packStr, configFile));
}
}
packs[lang] = packsArr.map((packStr) => validatePackSpecification(packStr, configFile));
}
return packs;
}
exports.parsePacksFromConfig = parsePacksFromConfig;
function parsePacksFromInput(rawPacksInput, languages, packsInputCombines) {
if (!(rawPacksInput === null || rawPacksInput === void 0 ? void 0 : rawPacksInput.trim())) {
function parsePacksFromInput(packsInput, languages) {
if (!(packsInput === null || packsInput === void 0 ? void 0 : packsInput.trim())) {
return undefined;
}
if (languages.length > 1) {
@@ -770,128 +610,56 @@ function parsePacksFromInput(rawPacksInput, languages, packsInputCombines) {
else if (languages.length === 0) {
throw new Error("No languages specified. Cannot process the packs input.");
}
rawPacksInput = rawPacksInput.trim();
if (packsInputCombines) {
rawPacksInput = rawPacksInput.trim().substring(1).trim();
if (!rawPacksInput) {
throw new Error(getConfigFilePropertyError(undefined, "packs", "A '+' was used in the 'packs' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs."));
packsInput = packsInput.trim();
if (packsInput.startsWith("+")) {
packsInput = packsInput.substring(1).trim();
if (!packsInput) {
throw new Error("A '+' was used in the 'packs' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs.");
}
}
return {
[languages[0]]: rawPacksInput.split(",").reduce((packs, pack) => {
packs.push(validatePackSpecification(pack, ""));
[languages[0]]: packsInput.split(",").reduce((packs, pack) => {
packs.push(toPackWithVersion(pack, ""));
return packs;
}, []),
};
}
/**
* Validates that this package specification is syntactically correct.
* It may not point to any real package, but after this function returns
* without throwing, we are guaranteed that the package specification
* is roughly correct.
*
* The CLI itself will do a more thorough validation of the package
* specification.
*
* A package specification looks like this:
*
* `scope/name@version:path`
*
* Version and path are optional.
*
* @param packStr the package specification to verify.
* @param configFile Config file to use for error reporting
*/
function parsePacksSpecification(packStr, configFile) {
function toPackWithVersion(packStr, configFile) {
if (typeof packStr !== "string") {
throw new Error(getPacksStrInvalid(packStr, configFile));
}
packStr = packStr.trim();
const atIndex = packStr.indexOf("@");
const colonIndex = packStr.indexOf(":", atIndex);
const packStart = 0;
const versionStart = atIndex + 1 || undefined;
const pathStart = colonIndex + 1 || undefined;
const packEnd = Math.min(atIndex > 0 ? atIndex : Infinity, colonIndex > 0 ? colonIndex : Infinity, packStr.length);
const versionEnd = versionStart
? Math.min(colonIndex > 0 ? colonIndex : Infinity, packStr.length)
: undefined;
const pathEnd = pathStart ? packStr.length : undefined;
const packName = packStr.slice(packStart, packEnd).trim();
const version = versionStart
? packStr.slice(versionStart, versionEnd).trim()
: undefined;
const packPath = pathStart
? packStr.slice(pathStart, pathEnd).trim()
: undefined;
if (!PACK_IDENTIFIER_PATTERN.test(packName)) {
const nameWithVersion = packStr.trim().split("@");
let version;
if (nameWithVersion.length > 2 ||
!PACK_IDENTIFIER_PATTERN.test(nameWithVersion[0])) {
throw new Error(getPacksStrInvalid(packStr, configFile));
}
if (version) {
try {
new semver.Range(version);
}
catch (e) {
// The range string is invalid. OK to ignore the caught error
else if (nameWithVersion.length === 2) {
version = semver.clean(nameWithVersion[1]) || undefined;
if (!version) {
throw new Error(getPacksStrInvalid(packStr, configFile));
}
}
if (packPath &&
(path.isAbsolute(packPath) ||
// Permit using "/" instead of "\" on Windows
// Use `x.split(y).join(z)` as a polyfill for `x.replaceAll(y, z)` since
// if we used a regex we'd need to escape the path separator on Windows
// which seems more awkward.
path.normalize(packPath).split(path.sep).join("/") !==
packPath.split(path.sep).join("/"))) {
throw new Error(getPacksStrInvalid(packStr, configFile));
}
if (!packPath && pathStart) {
// 0 length path
throw new Error(getPacksStrInvalid(packStr, configFile));
}
return {
name: packName,
packName: nameWithVersion[0].trim(),
version,
path: packPath,
};
}
exports.parsePacksSpecification = parsePacksSpecification;
function prettyPrintPack(pack) {
return `${pack.name}${pack.version ? `@${pack.version}` : ""}${pack.path ? `:${pack.path}` : ""}`;
}
exports.prettyPrintPack = prettyPrintPack;
function validatePackSpecification(pack, configFile) {
return prettyPrintPack(parsePacksSpecification(pack, configFile));
}
exports.validatePackSpecification = validatePackSpecification;
// exported for testing
function parsePacks(rawPacksFromConfig, rawPacksFromInput, packsInputCombines, languages, configFile, logger) {
const packsFomConfig = parsePacksFromConfig(rawPacksFromConfig, languages, configFile, logger);
const packsFromInput = parsePacksFromInput(rawPacksFromInput, languages, packsInputCombines);
function parsePacks(rawPacksFromConfig, rawPacksInput, languages, configFile) {
const packsFromInput = parsePacksFromInput(rawPacksInput, languages);
const packsFomConfig = parsePacksFromConfig(rawPacksFromConfig, languages, configFile);
if (!packsFromInput) {
return packsFomConfig;
}
if (!packsInputCombines) {
if (!packsFromInput) {
throw new Error(getPacksInvalid(configFile));
}
if (!shouldCombinePacks(rawPacksInput)) {
return packsFromInput;
}
return combinePacks(packsFromInput, packsFomConfig);
}
exports.parsePacks = parsePacks;
/**
* The convention in this action is that an input value that is prefixed with a '+' will
* be combined with the corresponding value in the config file.
*
* Without a '+', an input value will override the corresponding value in the config file.
*
* @param inputValue The input value to process.
* @returns true if the input value should replace the corresponding value in the config file, false if it should be appended.
*/
function shouldCombine(inputValue) {
return !!(inputValue === null || inputValue === void 0 ? void 0 : inputValue.trim().startsWith("+"));
function shouldCombinePacks(packsInput) {
return !!(packsInput === null || packsInput === void 0 ? void 0 : packsInput.trim().startsWith("+"));
}
function combinePacks(packs1, packs2) {
const packs = {};
@@ -914,16 +682,16 @@ function dbLocationOrDefault(dbLocation, tempDir) {
* This will parse the config from the user input if present, or generate
* a default config. The parsed config is then stored to a known location.
*/
async function initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
var _a, _b, _c;
let config;
// If no config file was provided create an empty one
if (!configFile) {
logger.debug("No configuration file was provided");
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger);
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
}
else {
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger);
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
}
// The list of queries should not be empty for any language. If it is then
// it is a user configuration error.
@@ -936,29 +704,11 @@ async function initConfig(languagesInput, queriesInput, packsInput, registriesIn
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
}
}
// When using the codescanning config in the CLI, pack downloads
// happen in the CLI during the `database init` command, so no need
// to download them here.
await (0, util_1.logCodeScanningConfigInCli)(codeQL, featureEnablement, logger);
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, featureEnablement))) {
const registries = parseRegistries(registriesInput);
await downloadPacks(codeQL, config.languages, config.packs, registries, apiDetails, config.tempDir, logger);
}
// Save the config so we can easily access it again in the future
await saveConfig(config, logger);
return config;
}
exports.initConfig = initConfig;
function parseRegistries(registriesInput) {
try {
return registriesInput
? yaml.load(registriesInput)
: undefined;
}
catch (e) {
throw new Error("Invalid registries input. Must be a YAML string.");
}
}
function isLocal(configPath) {
// If the path starts with ./, look locally
if (configPath.indexOf("./") === 0) {
@@ -986,7 +736,7 @@ async function getRemoteConfig(configFile, apiDetails) {
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
}
const response = await api
.getApiClientWithExternalAuth(apiDetails)
.getApiClient(apiDetails, { allowExternal: true })
.repos.getContent({
owner: pieces.groups.owner,
repo: pieces.groups.repo,
@@ -1038,95 +788,4 @@ async function getConfig(tempDir, logger) {
return JSON.parse(configString);
}
exports.getConfig = getConfig;
async function downloadPacks(codeQL, languages, packs, registries, apiDetails, tmpDir, logger) {
let qlconfigFile;
let registriesAuthTokens;
if (registries) {
if (!(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD))) {
throw new Error(`'registries' input is not supported on CodeQL versions less than ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD}.`);
}
// generate a qlconfig.yml file to hold the registry configs.
const qlconfig = createRegistriesBlock(registries);
qlconfigFile = path.join(tmpDir, "qlconfig.yml");
fs.writeFileSync(qlconfigFile, yaml.dump(qlconfig), "utf8");
registriesAuthTokens = registries
.map((registry) => `${registry.url}=${registry.token}`)
.join(",");
}
await wrapEnvironment({
GITHUB_TOKEN: apiDetails.auth,
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
}, async () => {
let numPacksDownloaded = 0;
logger.startGroup("Downloading packs");
for (const language of languages) {
const packsWithVersion = packs[language];
if (packsWithVersion === null || packsWithVersion === void 0 ? void 0 : packsWithVersion.length) {
logger.info(`Downloading custom packs for ${language}`);
const results = await codeQL.packDownload(packsWithVersion, qlconfigFile);
numPacksDownloaded += results.packs.length;
logger.info(`Downloaded: ${results.packs
.map((r) => `${r.name}@${r.version || "latest"}`)
.join(", ")}`);
}
}
if (numPacksDownloaded > 0) {
logger.info(`Downloaded ${numPacksDownloaded} ${packs === 1 ? "pack" : "packs"}`);
}
else {
logger.info("No packs to download");
}
logger.endGroup();
});
}
exports.downloadPacks = downloadPacks;
function createRegistriesBlock(registries) {
if (!Array.isArray(registries) ||
registries.some((r) => !r.url || !r.packages)) {
throw new Error("Invalid 'registries' input. Must be an array of objects with 'url' and 'packages' properties.");
}
// be sure to remove the `token` field from the registry before writing it to disk.
const safeRegistries = registries.map((registry) => ({
// ensure the url ends with a slash to avoid a bug in the CLI 2.10.4
url: !(registry === null || registry === void 0 ? void 0 : registry.url.endsWith("/")) ? `${registry.url}/` : registry.url,
packages: registry.packages,
}));
const qlconfig = {
registries: safeRegistries,
};
return qlconfig;
}
/**
* Create a temporary environment based on the existing environment and overridden
* by the given environment variables that are passed in as arguments.
*
* Use this new environment in the context of the given operation. After completing
* the operation, restore the original environment.
*
* This function does not support un-setting environment variables.
*
* @param env
* @param operation
*/
async function wrapEnvironment(env, operation) {
// Remember the original env
const oldEnv = { ...process.env };
// Set the new env
for (const [key, value] of Object.entries(env)) {
// Ignore undefined keys
if (value !== undefined) {
process.env[key] = value;
}
}
try {
// Run the operation
await operation();
}
finally {
// Restore the old env
for (const [key, value] of Object.entries(oldEnv)) {
process.env[key] = value;
}
}
}
//# sourceMappingURL=config-utils.js.map

Some files were not shown because too many files have changed in this diff Show More