Compare commits

..

6 Commits

Author SHA1 Message Date
Edoardo Pirovano
4c1021c504 Merge pull request #1006 from github/update-v1.1.6-28eead24
Merge v2 into v1
2022-03-30 15:28:29 +01:00
Edoardo Pirovano
9da34a6ec6 Avoid failure if @types/node is already 12.12 2022-03-30 14:00:43 +01:00
Edoardo Pirovano
f83be76fd8 Revert "Upgrade Node type definitions"
This reverts commit b949e494e4.
2022-03-30 13:49:37 +01:00
Edoardo Pirovano
b45efc9e42 Revert "Use Node.js v16 in all Actions"
This reverts commit 3d23aade46.
2022-03-30 13:49:29 +01:00
github-actions[bot]
75743c96fc Update checked-in dependencies 2022-03-30 12:20:06 +00:00
github-actions[bot]
03a275bc11 Update version and changelog for v1.1.6 2022-03-30 11:13:40 +00:00
2643 changed files with 19491 additions and 345414 deletions

View File

@@ -1,60 +0,0 @@
name: Check Code-Scanning Config
description: |
Checks the code scanning configuration file generated by the
action to ensure it contains the expected contents
inputs:
languages:
required: false
description: The languages field passed to the init action.
packs:
required: false
description: The packs field passed to the init action.
queries:
required: false
description: The queries field passed to the init action.
config-file-test:
required: false
description: |
The location of the config file to use. If empty,
then no config file is used.
expected-config-file-contents:
required: true
description: |
A JSON string containing the exact contents of the config file.
tools:
required: true
description: |
The url of codeql to use.
runs:
using: composite
steps:
- uses: ./../action/init
with:
languages: ${{ inputs.languages }}
config-file: ${{ inputs.config-file-test }}
queries: ${{ inputs.queries }}
packs: ${{ inputs.packs }}
tools: ${{ inputs.tools }}
db-location: ${{ runner.temp }}/codescanning-config-cli-test
- name: Install dependencies
shell: bash
run: npm install --location=global ts-node js-yaml
- name: Check config
working-directory: ${{ github.action_path }}
shell: bash
run: ts-node ./index.ts "${{ runner.temp }}/user-config.yaml" '${{ inputs.expected-config-file-contents }}'
- name: Clean up
shell: bash
if: always()
run: |
rm -rf ${{ runner.temp }}/codescanning-config-cli-test
rm -rf ${{ runner.temp }}/user-config.yaml

View File

@@ -1,39 +0,0 @@
import * as core from '@actions/core'
import * as yaml from 'js-yaml'
import * as fs from 'fs'
import * as assert from 'assert'
const actualConfig = loadActualConfig()
const rawExpectedConfig = process.argv[3].trim()
if (!rawExpectedConfig) {
core.info('No expected configuration provided')
} else {
core.startGroup('Expected generated user config')
core.info(yaml.dump(JSON.parse(rawExpectedConfig)))
core.endGroup()
}
const expectedConfig = rawExpectedConfig ? JSON.parse(rawExpectedConfig) : undefined;
assert.deepStrictEqual(
actualConfig,
expectedConfig,
'Expected configuration does not match actual configuration'
);
function loadActualConfig() {
if (!fs.existsSync(process.argv[2])) {
core.info('No configuration file found')
return undefined
} else {
const rawActualConfig = fs.readFileSync(process.argv[2], 'utf8')
core.startGroup('Actual generated user config')
core.info(rawActualConfig)
core.endGroup()
return yaml.load(rawActualConfig)
}
}

View File

@@ -1,20 +0,0 @@
name: Check SARIF
description: Checks a SARIF file to see if certain queries were run and others were not run.
inputs:
sarif-file:
required: true
description: The SARIF file to check
queries-run:
required: true
description: |
Comma separated list of query ids that should be included in this SARIF file.
queries-not-run:
required: true
description: |
Comma separated list of query ids that should NOT be included in this SARIF file.
runs:
using: node12
main: index.js

View File

@@ -1,43 +0,0 @@
'use strict'
const core = require('@actions/core')
const fs = require('fs')
const sarif = JSON.parse(fs.readFileSync(core.getInput('sarif-file'), 'utf8'))
const rules = sarif.runs[0].tool.extensions.flatMap(ext => ext.rules || [])
const ruleIds = rules.map(rule => rule.id)
// Check that all the expected queries ran
const expectedQueriesRun = getQueryIdsInput('queries-run')
const queriesThatShouldHaveRunButDidNot = expectedQueriesRun.filter(queryId => !ruleIds.includes(queryId))
if (queriesThatShouldHaveRunButDidNot.length > 0) {
core.setFailed(`The following queries were expected to run but did not: ${queriesThatShouldHaveRunButDidNot.join(', ')}`)
}
// Check that all the unexpected queries did not run
const expectedQueriesNotRun = getQueryIdsInput('queries-not-run')
const queriesThatShouldNotHaveRunButDid = expectedQueriesNotRun.filter(queryId => ruleIds.includes(queryId))
if (queriesThatShouldNotHaveRunButDid.length > 0) {
core.setFailed(`The following queries were NOT expected to have run but did: ${queriesThatShouldNotHaveRunButDid.join(', ')}`)
}
core.startGroup('All queries run')
rules.forEach(rule => {
core.info(`${rule.id}: ${(rule.properties && rule.properties.name) || rule.name}`)
})
core.endGroup()
core.startGroup('Full SARIF')
core.info(JSON.stringify(sarif, null, 2))
core.endGroup()
function getQueryIdsInput(name) {
return core.getInput(name)
.split(',')
.map(q => q.trim())
.filter(q => q.length > 0)
}

View File

@@ -1,54 +0,0 @@
name: Query Filter Test
description: Runs a test of query filters using the check SARIF action
inputs:
sarif-file:
required: true
description: The SARIF file to check
queries-run:
required: true
description: |
Comma separated list of query ids that should be included in this SARIF file.
queries-not-run:
required: true
description: |
Comma separated list of query ids that should NOT be included in this SARIF file.
config-file:
required: true
description: |
The location of the codeql configuration file to use.
tools:
required: true
description: |
The url of codeql to use.
runs:
using: composite
steps:
- uses: ./../action/init
with:
languages: javascript
config-file: ${{ inputs.config-file }}
tools: ${{ inputs.tools }}
db-location: ${{ runner.temp }}/query-filter-test
env:
TEST_MODE: "true"
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
upload-database: false
upload: false
env:
TEST_MODE: "true"
- name: Check SARIF
uses: ./../action/.github/check-sarif
with:
sarif-file: ${{ inputs.sarif-file }}
queries-run: ${{ inputs.queries-run}}
queries-not-run: ${{ inputs.queries-not-run}}
- name: Cleanup after test
shell: bash
run: rm -rf "$RUNNER_TEMP/results" "$RUNNER_TEMP/query-filter-test"

View File

@@ -19,19 +19,15 @@ V1_MODE = 'v1-release'
# Value of the mode flag for a v2 release # Value of the mode flag for a v2 release
V2_MODE = 'v2-release' V2_MODE = 'v2-release'
SOURCE_BRANCH_FOR_MODE = { V1_MODE: 'releases/v2', V2_MODE: 'main' }
TARGET_BRANCH_FOR_MODE = { V1_MODE: 'releases/v1', V2_MODE: 'releases/v2' }
# Name of the remote # Name of the remote
ORIGIN = 'origin' ORIGIN = 'origin'
# Runs git with the given args and returns the stdout. # Runs git with the given args and returns the stdout.
# Raises an error if git does not exit successfully (unless passed # Raises an error if git does not exit successfully.
# allow_non_zero_exit_code=True). def run_git(*args):
def run_git(*args, allow_non_zero_exit_code=False):
cmd = ['git', *args] cmd = ['git', *args]
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if not allow_non_zero_exit_code and p.returncode != 0: if (p.returncode != 0):
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii')) raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
return p.stdout.decode('ascii') return p.stdout.decode('ascii')
@@ -40,9 +36,7 @@ def branch_exists_on_remote(branch_name):
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != '' return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
# Opens a PR from the given branch to the target branch # Opens a PR from the given branch to the target branch
def open_pr( def open_pr(repo, all_commits, source_branch_short_sha, new_branch_name, source_branch, target_branch, conductor, is_v2_release, labels):
repo, all_commits, source_branch_short_sha, new_branch_name, source_branch, target_branch,
conductor, is_v2_release, labels, conflicted_files):
# Sort the commits into the pull requests that introduced them, # Sort the commits into the pull requests that introduced them,
# and any commits that don't have a pull request # and any commits that don't have a pull request
pull_requests = [] pull_requests = []
@@ -67,7 +61,7 @@ def open_pr(
body.append('Merging ' + source_branch_short_sha + ' into ' + target_branch) body.append('Merging ' + source_branch_short_sha + ' into ' + target_branch)
body.append('') body.append('')
body.append(f'Conductor for this PR is @{conductor}.') body.append('Conductor for this PR is @' + conductor)
# List all PRs merged # List all PRs merged
if len(pull_requests) > 0: if len(pull_requests) > 0:
@@ -75,40 +69,25 @@ def open_pr(
body.append('Contains the following pull requests:') body.append('Contains the following pull requests:')
for pr in pull_requests: for pr in pull_requests:
merger = get_merger_of_pr(repo, pr) merger = get_merger_of_pr(repo, pr)
body.append(f'- #{pr.number} (@{merger})') body.append('- #' + str(pr.number) + ' - ' + pr.title +' (@' + merger + ')')
# List all commits not part of a PR # List all commits not part of a PR
if len(commits_without_pull_requests) > 0: if len(commits_without_pull_requests) > 0:
body.append('') body.append('')
body.append('Contains the following commits not from a pull request:') body.append('Contains the following commits not from a pull request:')
for commit in commits_without_pull_requests: for commit in commits_without_pull_requests:
author_description = f' (@{commit.author.login})' if commit.author is not None else '' author_description = ' (@' + commit.author.login + ')' if commit.author is not None else ''
body.append(f'- {commit.sha} - {get_truncated_commit_message(commit)}{author_description}') body.append('- ' + commit.sha + ' - ' + get_truncated_commit_message(commit) + author_description)
body.append('') body.append('')
body.append('Please do the following:') body.append('Please review the following:')
if len(conflicted_files) > 0: body.append(' - [ ] The CHANGELOG displays the correct version and date.')
body.append(' - [ ] Ensure `package.json` file contains the correct version.') body.append(' - [ ] The CHANGELOG includes all relevant, user-facing changes since the last release.')
body.append(' - [ ] Add commits to this branch to resolve the merge conflicts ' + body.append(' - [ ] There are no unexpected commits being merged into the ' + target_branch + ' branch.')
'in the following files:') body.append(' - [ ] The docs team is aware of any documentation changes that need to be released.')
body.extend([f' - [ ] `{file}`' for file in conflicted_files])
body.append(' - [ ] Ensure another maintainer has reviewed the additional commits you added to this ' +
'branch to resolve the merge conflicts.')
body.append(' - [ ] Ensure the CHANGELOG displays the correct version and date.')
body.append(' - [ ] Ensure the CHANGELOG includes all relevant, user-facing changes since the last release.')
body.append(' - [ ] Check that there are not any unexpected commits being merged into the ' + target_branch + ' branch.')
body.append(' - [ ] Ensure the docs team is aware of any documentation changes that need to be released.')
if not is_v2_release:
body.append(' - [ ] Remove and re-add the "Update dependencies" label to the PR to trigger just this workflow.')
body.append(' - [ ] Wait for the "Update dependencies" workflow to push a commit updating the dependencies.')
body.append(' - [ ] Mark the PR as ready for review to trigger the full set of PR checks.')
body.append(' - [ ] Approve and merge this PR.')
if is_v2_release: if is_v2_release:
body.append(' - [ ] Merge the mergeback PR that will automatically be created once this PR is merged.') body.append(' - [ ] The mergeback PR is merged back into ' + source_branch + ' after this PR is merged.')
body.append(' - [ ] Merge the v1 release PR that will automatically be created once this PR is merged.') body.append(' - [ ] The v1 release PR is merged after this PR is merged.')
title = 'Merge ' + source_branch + ' into ' + target_branch title = 'Merge ' + source_branch + ' into ' + target_branch
@@ -212,10 +191,8 @@ def main():
type=str, type=str,
required=True, required=True,
choices=[V2_MODE, V1_MODE], choices=[V2_MODE, V1_MODE],
help=f"Which release to perform. '{V2_MODE}' uses {SOURCE_BRANCH_FOR_MODE[V2_MODE]} as the source " + help=f"Which release to perform. '{V2_MODE}' uses main as the source branch and v2 as the target branch. " +
f"branch and {TARGET_BRANCH_FOR_MODE[V2_MODE]} as the target branch. " + f"'{V1_MODE}' uses v2 as the source branch and v1 as the target branch."
f"'{V1_MODE}' uses {SOURCE_BRANCH_FOR_MODE[V1_MODE]} as the source branch and " +
f"{TARGET_BRANCH_FOR_MODE[V1_MODE]} as the target branch."
) )
parser.add_argument( parser.add_argument(
'--conductor', '--conductor',
@@ -226,8 +203,14 @@ def main():
args = parser.parse_args() args = parser.parse_args()
source_branch = SOURCE_BRANCH_FOR_MODE[args.mode] if args.mode == V2_MODE:
target_branch = TARGET_BRANCH_FOR_MODE[args.mode] source_branch = 'main'
target_branch = 'v2'
elif args.mode == V1_MODE:
source_branch = 'v2'
target_branch = 'v1'
else:
raise ValueError(f"Unexpected value for release mode: '{args.mode}'")
repo = Github(args.github_token).get_repo(args.repository_nwo) repo = Github(args.github_token).get_repo(args.repository_nwo)
version = get_current_version() version = get_current_version()
@@ -263,15 +246,10 @@ def main():
# Create the new branch and push it to the remote # Create the new branch and push it to the remote
print('Creating branch ' + new_branch_name) print('Creating branch ' + new_branch_name)
# The process of creating the v1 release can run into merge conflicts. We commit the unresolved
# conflicts so a maintainer can easily resolve them (vs erroring and requiring maintainers to
# reconstruct the release manually)
conflicted_files = []
if args.mode == V1_MODE: if args.mode == V1_MODE:
# If we're performing a backport, start from the target branch # If we're performing a backport, start from the v1 branch
print(f'Creating {new_branch_name} from the {ORIGIN}/{target_branch} branch') print(f'Creating {new_branch_name} from the {ORIGIN}/v1 branch')
run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/{target_branch}') run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/v1')
# Revert the commit that we made as part of the last release that updated the version number and # Revert the commit that we made as part of the last release that updated the version number and
# changelog to refer to 1.x.x variants. This avoids merge conflicts in the changelog and # changelog to refer to 1.x.x variants. This avoids merge conflicts in the changelog and
@@ -296,24 +274,19 @@ def main():
print(' Nothing to revert.') print(' Nothing to revert.')
print(f'Merging {ORIGIN}/{source_branch} into the release prep branch') print(f'Merging {ORIGIN}/{source_branch} into the release prep branch')
# Commit any conflicts (see the comment for `conflicted_files`) run_git('merge', f'{ORIGIN}/{source_branch}', '--no-edit')
run_git('merge', f'{ORIGIN}/{source_branch}', allow_non_zero_exit_code=True)
conflicted_files = run_git('diff', '--name-only', '--diff-filter', 'U').splitlines()
if len(conflicted_files) > 0:
run_git('add', '.')
run_git('commit', '--no-edit')
# Migrate the package version number from a v2 version number to a v1 version number # Migrate the package version number from a v2 version number to a v1 version number
print(f'Setting version number to {version}') print(f'Setting version number to {version}')
subprocess.check_output(['npm', 'version', version, '--no-git-tag-version']) subprocess.run(['npm', 'version', version, '--no-git-tag-version'])
run_git('add', 'package.json', 'package-lock.json') run_git('add', 'package.json', 'package-lock.json')
# Migrate the changelog notes from v2 version numbers to v1 version numbers # Migrate the changelog notes from v2 version numbers to v1 version numbers
print('Migrating changelog notes from v2 to v1') print('Migrating changelog notes from v2 to v1')
subprocess.check_output(['sed', '-i', 's/^## 2\./## 1./g', 'CHANGELOG.md']) subprocess.run(['sed', '-i', 's/^## 2\./## 1./g', 'CHANGELOG.md'])
# Remove changelog notes from v2 that don't apply to v1 # Remove changelog notes from v2 that don't apply to v1
subprocess.check_output(['sed', '-i', '/^- \[v2+ only\]/d', 'CHANGELOG.md']) subprocess.run(['sed', '-i', '/^- \[v2+ only\]/d', 'CHANGELOG.md'])
# Amend the commit generated by `npm version` to update the CHANGELOG # Amend the commit generated by `npm version` to update the CHANGELOG
run_git('add', 'CHANGELOG.md') run_git('add', 'CHANGELOG.md')
@@ -344,7 +317,6 @@ def main():
conductor=args.conductor, conductor=args.conductor,
is_v2_release=args.mode == V2_MODE, is_v2_release=args.mode == V2_MODE,
labels=['Update dependencies'] if args.mode == V1_MODE else [], labels=['Update dependencies'] if args.mode == V1_MODE else [],
conflicted_files=conflicted_files
) )
if __name__ == '__main__': if __name__ == '__main__':

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -70,7 +69,7 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
@@ -82,8 +81,6 @@ jobs:
languages: cpp,csharp,java,javascript,python languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }} github.sha }}
env:
TEST_MODE: true
- name: Build code - name: Build code
shell: bash shell: bash
run: ./build.sh run: ./build.sh

View File

@@ -1,74 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - autobuild-action
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
autobuild-action:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-2019
version: latest
- os: windows-2022
version: latest
name: autobuild-action
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
languages: csharp
tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- uses: ./../action/autobuild
env:
# Explicitly disable the CLR tracer.
COR_ENABLE_PROFILING: ''
COR_PROFILER: ''
COR_PROFILER_PATH_64: ''
CORECLR_ENABLE_PROFILING: ''
CORECLR_PROFILER: ''
CORECLR_PROFILER_PATH_64: ''
- uses: ./../action/analyze
env:
TEST_MODE: true
- name: Check database
shell: bash
run: |
cd "$RUNNER_TEMP/codeql_databases"
if [[ ! -d csharp ]]; then
echo "Did not find a C# database"
exit 1
fi
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -3,7 +3,7 @@
# pip install ruamel.yaml && python3 sync.py # pip install ruamel.yaml && python3 sync.py
# to regenerate this file. # to regenerate this file.
name: 'PR Check - Go: Reconciled tracing with legacy workflow' name: PR Check - Debug artifact upload
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto GO111MODULE: auto
@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -21,7 +20,7 @@ on:
- ready_for_review - ready_for_review
workflow_dispatch: {} workflow_dispatch: {}
jobs: jobs:
go-reconciled-tracing-legacy-workflow: debug-artifacts:
strategy: strategy:
matrix: matrix:
include: include:
@@ -49,38 +48,48 @@ jobs:
version: nightly-latest version: nightly-latest
- os: macos-latest - os: macos-latest
version: nightly-latest version: nightly-latest
name: 'Go: Reconciled tracing with legacy workflow' name: Debug artifact upload
timeout-minutes: 45 timeout-minutes: 45
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
with: with:
version: ${{ matrix.version }} version: ${{ matrix.version }}
- uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: ./../action/init - uses: ./../action/init
with: with:
languages: go
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}
env: debug: true
TEST_MODE: true debug-artifact-name: my-debug-artifacts
debug-database-name: my-db
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze - uses: ./../action/analyze
env: id: analysis
TEST_MODE: true - uses: actions/download-artifact@v2
with:
name: my-debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
- shell: bash - shell: bash
run: | run: |
cd "$RUNNER_TEMP/codeql_databases" LANGUAGES="cpp csharp go java javascript python"
if [[ ! -d go ]]; then for language in $LANGUAGES; do
echo "Did not find a Go database" echo "Checking $language"
exit 1 if [[ ! -f "$language.sarif" ]] ; then
fi echo "Missing a SARIF file for $language"
exit 1
fi
if [[ ! -f "my-db-$language.zip" ]] ; then
echo "Missing a database bundle for $language"
exit 1
fi
if [[ ! -d "$language/log" ]] ; then
echo "Missing logs for $language"
exit 1
fi
done
env: env:
# Enable reconciled Go tracing beta functionality
CODEQL_ACTION_RECONCILE_GO_EXTRACTION: 'true'
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -32,7 +31,7 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
@@ -43,8 +42,6 @@ jobs:
languages: java languages: java
ram: 230 ram: 230
threads: 1 threads: 1
env:
TEST_MODE: true
- name: Assert Results - name: Assert Results
shell: bash shell: bash
run: | run: |

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -70,13 +69,13 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
with: with:
version: ${{ matrix.version }} version: ${{ matrix.version }}
- uses: actions/setup-go@v3 - uses: actions/setup-go@v2
with: with:
go-version: ^1.13.1 go-version: ^1.13.1
- uses: ./../action/init - uses: ./../action/init
@@ -84,8 +83,6 @@ jobs:
languages: go languages: go
config-file: ./.github/codeql/custom-queries.yml config-file: ./.github/codeql/custom-queries.yml
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- name: Build code - name: Build code
shell: bash shell: bash
run: ./build.sh run: ./build.sh
@@ -93,5 +90,4 @@ jobs:
env: env:
TEST_MODE: true TEST_MODE: true
env: env:
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -54,21 +53,19 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
with: with:
version: ${{ matrix.version }} version: ${{ matrix.version }}
- uses: actions/setup-go@v3 - uses: actions/setup-go@v2
with: with:
go-version: ^1.13.1 go-version: ^1.13.1
- uses: ./../action/init - uses: ./../action/init
with: with:
languages: go languages: go
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- uses: ./../action/autobuild - uses: ./../action/autobuild
- uses: ./../action/analyze - uses: ./../action/analyze
env: env:
@@ -81,6 +78,5 @@ jobs:
exit 1 exit 1
fi fi
env: env:
CODEQL_EXTRACTOR_GO_BUILD_TRACING: on CODEQL_EXTRACTOR_GO_BUILD_TRACING: 'true'
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -70,21 +69,19 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
with: with:
version: ${{ matrix.version }} version: ${{ matrix.version }}
- uses: actions/setup-go@v3 - uses: actions/setup-go@v2
with: with:
go-version: ^1.13.1 go-version: ^1.13.1
- uses: ./../action/init - uses: ./../action/init
with: with:
languages: go languages: go
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- name: Build code - name: Build code
shell: bash shell: bash
run: go build main.go run: go build main.go
@@ -92,5 +89,5 @@ jobs:
env: env:
TEST_MODE: true TEST_MODE: true
env: env:
CODEQL_EXTRACTOR_GO_BUILD_TRACING: on CODEQL_EXTRACTOR_GO_BUILD_TRACING: 'true'
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -1,91 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: 'PR Check - Go: Reconciled tracing with autobuilder'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
go-reconciled-tracing-autobuilder:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210308
- os: macos-latest
version: stable-20210308
- os: ubuntu-latest
version: stable-20210319
- os: macos-latest
version: stable-20210319
- os: ubuntu-latest
version: stable-20210809
- os: macos-latest
version: stable-20210809
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
name: 'Go: Reconciled tracing with autobuilder'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: ./../action/init
with:
languages: go
tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- uses: ./../action/autobuild
- uses: ./../action/analyze
env:
TEST_MODE: true
- shell: bash
run: |
if [[ "${CODEQL_ACTION_DID_AUTOBUILD_GOLANG}" != true ]]; then
echo "Expected the Go autobuilder to be run, but the" \
"CODEQL_ACTION_DID_AUTOBUILD_GOLANG environment variable was not true."
exit 1
fi
cd "$RUNNER_TEMP/codeql_databases"
if [[ ! -d go ]]; then
echo "Did not find a Go database"
exit 1
fi
env:
CODEQL_ACTION_RECONCILE_GO_EXTRACTION: 'true'
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -1,112 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: 'PR Check - Go: Reconciled tracing with custom build steps'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
go-reconciled-tracing-custom-build-steps:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210308
- os: macos-latest
version: stable-20210308
- os: windows-2019
version: stable-20210308
- os: ubuntu-latest
version: stable-20210319
- os: macos-latest
version: stable-20210319
- os: windows-2019
version: stable-20210319
- os: ubuntu-latest
version: stable-20210809
- os: macos-latest
version: stable-20210809
- os: windows-2019
version: stable-20210809
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-2019
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-2019
version: latest
- os: windows-2022
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-2019
version: nightly-latest
- os: windows-2022
version: nightly-latest
name: 'Go: Reconciled tracing with custom build steps'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: actions/setup-go@v3
with:
go-version: ^1.13.1
- uses: ./../action/init
with:
languages: go
tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- name: Build code
shell: bash
run: go build main.go
- uses: ./../action/analyze
env:
TEST_MODE: true
- shell: bash
run: |
# Once we start running Bash 4.2 in all environments, we can replace the
# `! -z` flag with the more elegant `-v` which confirms that the variable
# is actually unset and not potentially set to a blank value.
if [[ ! -z "${CODEQL_ACTION_DID_AUTOBUILD_GOLANG}" ]]; then
echo "Expected the Go autobuilder not to be run, but the" \
"CODEQL_ACTION_DID_AUTOBUILD_GOLANG environment variable was set."
exit 1
fi
cd "$RUNNER_TEMP/codeql_databases"
if [[ ! -d go ]]; then
echo "Did not find a Go database"
exit 1
fi
env:
# Enable reconciled Go tracing beta functionality
CODEQL_ACTION_RECONCILE_GO_EXTRACTION: 'true'
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -1,83 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: 'PR Check - Packaging: Download using registries'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
init-with-registries:
strategy:
matrix:
include:
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-2019
version: nightly-latest
- os: windows-2022
version: nightly-latest
name: 'Packaging: Download using registries'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Init with registries
uses: ./../action/init
with:
db-location: ${{ runner.temp }}/customDbLocation
tools: ${{ steps.prepare-test.outputs.tools-url }}
config-file: ./.github/codeql/codeql-config-registries.yml
languages: javascript
registries: |
- url: "https://ghcr.io/v2/"
packages: "*/*"
token: "${{ secrets.GITHUB_TOKEN }}"
env:
TEST_MODE: true
- name: Verify packages installed
shell: bash
run: |
PRIVATE_PACK="$HOME/.codeql/packages/dsp-testing/private-pack"
CODEQL_PACK1="$HOME/.codeql/packages/dsp-testing/codeql-pack1"
if [[ -d $PRIVATE_PACK ]]
then
echo "$PRIVATE_PACK was installed."
else
echo "::error $PRIVATE_PACK pack was not installed."
exit 1
fi
if [[ -d $CODEQL_PACK1 ]]
then
echo "$CODEQL_PACK1 was installed."
else
echo "::error $CODEQL_PACK1 pack was not installed."
exit 1
fi
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -36,7 +35,7 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
@@ -52,8 +51,6 @@ jobs:
languages: javascript languages: javascript
source-root: ../new-source-root source-root: ../new-source-root
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- uses: ./../action/analyze - uses: ./../action/analyze
with: with:
skip-queries: true skip-queries: true

View File

@@ -1,140 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - ML-powered queries
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
ml-powered-queries:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20220120
- os: macos-latest
version: stable-20220120
- os: windows-latest
version: stable-20220120
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-latest
version: cached
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-latest
version: nightly-latest
name: ML-powered queries
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
languages: javascript
queries: security-extended
source-root: ./../action/tests/ml-powered-queries-repo
tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
upload-database: false
env:
TEST_MODE: true
- name: Upload SARIF
uses: actions/upload-artifact@v3
with:
name: ml-powered-queries-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
retention-days: 7
- name: Check sarif
uses: ./../action/.github/check-sarif
if: matrix.os != 'windows-latest' || matrix.version == 'latest' || matrix.version
== 'nightly-latest'
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: js/ml-powered/nosql-injection,js/ml-powered/path-injection,js/ml-powered/sql-injection,js/ml-powered/xss
queries-not-run: foo,bar
- name: Check results
# Running ML-powered queries on Windows requires CodeQL CLI 2.9.0+. We don't run these checks
# against Windows and `cached` while CodeQL CLI 2.9.0 makes its way into `cached` to avoid the
# test starting to fail when the cached CodeQL Bundle gets updated. Once the CodeQL Bundle
# containing CodeQL CLI 2.9.0 has been fully released, we can drop this line and start running
# these checks on Windows and `cached`.
if: matrix.os != 'windows-latest' || matrix.version != 'cached'
env:
# Running on Windows requires CodeQL CLI 2.9.0+, which has so far only made it to 'latest'.
SHOULD_RUN_ML_POWERED_QUERIES: ${{ matrix.os != 'windows-latest' || matrix.version
== 'latest' || matrix.version == 'nightly-latest' }}
shell: bash
run: |
echo "Expecting ML-powered queries to be run: ${SHOULD_RUN_ML_POWERED_QUERIES}"
cd "$RUNNER_TEMP/results"
# We should run at least the ML-powered queries in `expected_rules`.
expected_rules="js/ml-powered/nosql-injection js/ml-powered/path-injection js/ml-powered/sql-injection js/ml-powered/xss"
for rule in ${expected_rules}; do
found_rule=$(jq --arg rule "${rule}" '[.runs[0].tool.extensions[].rules | select(. != null) |
flatten | .[].id] | any(. == $rule)' javascript.sarif)
echo "Did find rule '${rule}': ${found_rule}"
if [[ "${found_rule}" != "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
echo "Expected SARIF output to contain rule '${rule}', but found no such rule."
exit 1
elif [[ "${found_rule}" == "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
echo "Found rule '${rule}' in the SARIF output which shouldn't have been part of the analysis."
exit 1
fi
done
# We should have at least one alert from an ML-powered query.
num_alerts=$(jq '[.runs[0].results[] |
select(.properties.score != null and (.rule.id | startswith("js/ml-powered/")))] | length' \
javascript.sarif)
echo "Found ${num_alerts} alerts from ML-powered queries.";
if [[ "${num_alerts}" -eq 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
echo "Expected to find at least one alert from an ML-powered query but found ${num_alerts}."
exit 1
elif [[ "${num_alerts}" -ne 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
echo "Expected not to find any alerts from an ML-powered query but found ${num_alerts}."
exit 1
fi
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -54,7 +53,7 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
@@ -64,8 +63,6 @@ jobs:
with: with:
db-location: ${{ runner.temp }}/customDbLocation db-location: ${{ runner.temp }}/customDbLocation
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- name: Build code - name: Build code
shell: bash shell: bash
run: ./build.sh run: ./build.sh

View File

@@ -1,102 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: 'PR Check - Packaging: Config and input passed to the CLI'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
packaging-codescanning-config-inputs-js:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: windows-2019
version: latest
- os: windows-2022
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-2019
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-2019
version: nightly-latest
- os: windows-2022
version: nightly-latest
name: 'Packaging: Config and input passed to the CLI'
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging3.yml
packs: +dsp-testing/codeql-pack1@1.0.0
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
env:
TEST_MODE: true
- name: Check results
uses: ./../action/.github/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results
shell: bash
run: |
cd "$RUNNER_TEMP/results"
# We should have 4 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
echo "Found matching rules '$RULES'"
if [ "$RULES" != "$EXPECTED_RULES" ]; then
echo "Did not match expected rules '$EXPECTED_RULES'."
exit 1
fi
env:
CODEQL_PASS_CONFIG_TO_CLI: true
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -26,33 +25,15 @@ jobs:
matrix: matrix:
include: include:
- os: ubuntu-latest - os: ubuntu-latest
version: latest version: nightly-20210831
- os: macos-latest - os: macos-latest
version: latest version: nightly-20210831
- os: windows-2019
version: latest
- os: windows-2022
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-2019
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-2019
version: nightly-latest
- os: windows-2022
version: nightly-latest
name: 'Packaging: Config and input' name: 'Packaging: Config and input'
timeout-minutes: 45 timeout-minutes: 45
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
@@ -61,11 +42,9 @@ jobs:
- uses: ./../action/init - uses: ./../action/init
with: with:
config-file: .github/codeql/codeql-config-packaging3.yml config-file: .github/codeql/codeql-config-packaging3.yml
packs: +dsp-testing/codeql-pack1@1.0.0 packs: +dsp-testing/codeql-pack1@0.1.0
languages: javascript languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- name: Build code - name: Build code
shell: bash shell: bash
run: ./build.sh run: ./build.sh
@@ -74,23 +53,15 @@ jobs:
output: ${{ runner.temp }}/results output: ${{ runner.temp }}/results
env: env:
TEST_MODE: true TEST_MODE: true
- name: Check results
uses: ./../action/.github/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results - name: Assert Results
shell: bash shell: bash
run: | run: |
cd "$RUNNER_TEMP/results" cd "$RUNNER_TEMP/results"
# We should have 4 hits from these rules # We should have 3 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block" EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace # use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)" RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
echo "Found matching rules '$RULES'" echo "Found matching rules '$RULES'"
if [ "$RULES" != "$EXPECTED_RULES" ]; then if [ "$RULES" != "$EXPECTED_RULES" ]; then
echo "Did not match expected rules '$EXPECTED_RULES'." echo "Did not match expected rules '$EXPECTED_RULES'."

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -26,33 +25,15 @@ jobs:
matrix: matrix:
include: include:
- os: ubuntu-latest - os: ubuntu-latest
version: latest version: nightly-20210831
- os: macos-latest - os: macos-latest
version: latest version: nightly-20210831
- os: windows-2019
version: latest
- os: windows-2022
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-2019
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-2019
version: nightly-latest
- os: windows-2022
version: nightly-latest
name: 'Packaging: Config file' name: 'Packaging: Config file'
timeout-minutes: 45 timeout-minutes: 45
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
@@ -63,8 +44,6 @@ jobs:
config-file: .github/codeql/codeql-config-packaging.yml config-file: .github/codeql/codeql-config-packaging.yml
languages: javascript languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- name: Build code - name: Build code
shell: bash shell: bash
run: ./build.sh run: ./build.sh
@@ -73,23 +52,15 @@ jobs:
output: ${{ runner.temp }}/results output: ${{ runner.temp }}/results
env: env:
TEST_MODE: true TEST_MODE: true
- name: Check results
uses: ./../action/.github/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results - name: Assert Results
shell: bash shell: bash
run: | run: |
cd "$RUNNER_TEMP/results" cd "$RUNNER_TEMP/results"
# We should have 4 hits from these rules # We should have 3 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block" EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace # use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)" RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
echo "Found matching rules '$RULES'" echo "Found matching rules '$RULES'"
if [ "$RULES" != "$EXPECTED_RULES" ]; then if [ "$RULES" != "$EXPECTED_RULES" ]; then
echo "Did not match expected rules '$EXPECTED_RULES'." echo "Did not match expected rules '$EXPECTED_RULES'."

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -26,33 +25,15 @@ jobs:
matrix: matrix:
include: include:
- os: ubuntu-latest - os: ubuntu-latest
version: latest version: nightly-20210831
- os: macos-latest - os: macos-latest
version: latest version: nightly-20210831
- os: windows-2019
version: latest
- os: windows-2022
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: windows-2019
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
- os: windows-2019
version: nightly-latest
- os: windows-2022
version: nightly-latest
name: 'Packaging: Action input' name: 'Packaging: Action input'
timeout-minutes: 45 timeout-minutes: 45
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
@@ -62,10 +43,8 @@ jobs:
with: with:
config-file: .github/codeql/codeql-config-packaging2.yml config-file: .github/codeql/codeql-config-packaging2.yml
languages: javascript languages: javascript
packs: dsp-testing/codeql-pack1@1.0.0, dsp-testing/codeql-pack2, dsp-testing/codeql-pack3:other-query.ql packs: dsp-testing/codeql-pack1@0.1.0, dsp-testing/codeql-pack2
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- name: Build code - name: Build code
shell: bash shell: bash
run: ./build.sh run: ./build.sh
@@ -74,23 +53,15 @@ jobs:
output: ${{ runner.temp }}/results output: ${{ runner.temp }}/results
env: env:
TEST_MODE: true TEST_MODE: true
- name: Check results
uses: ./../action/.github/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results - name: Assert Results
shell: bash shell: bash
run: | run: |
cd "$RUNNER_TEMP/results" cd "$RUNNER_TEMP/results"
# We should have 4 hits from these rules # We should have 3 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block" EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace # use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)" RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
echo "Found matching rules '$RULES'" echo "Found matching rules '$RULES'"
if [ "$RULES" != "$EXPECTED_RULES" ]; then if [ "$RULES" != "$EXPECTED_RULES" ]; then
echo "Did not match expected rules '$EXPECTED_RULES'." echo "Did not match expected rules '$EXPECTED_RULES'."

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -70,7 +69,7 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
@@ -82,8 +81,6 @@ jobs:
languages: cpp,csharp,java,javascript,python languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }} github.sha }}
env:
TEST_MODE: true
- name: Build code - name: Build code
shell: bash shell: bash
run: ./build.sh run: ./build.sh

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -42,7 +41,7 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -26,23 +25,15 @@ jobs:
matrix: matrix:
include: include:
- os: ubuntu-latest - os: ubuntu-latest
version: latest version: nightly-20210831
- os: macos-latest - os: macos-latest
version: latest version: nightly-20210831
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
name: Split workflow name: Split workflow
timeout-minutes: 45 timeout-minutes: 45
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
@@ -51,11 +42,9 @@ jobs:
- uses: ./../action/init - uses: ./../action/init
with: with:
config-file: .github/codeql/codeql-config-packaging3.yml config-file: .github/codeql/codeql-config-packaging3.yml
packs: +dsp-testing/codeql-pack1@1.0.0 packs: +dsp-testing/codeql-pack1@0.1.0
languages: javascript languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- name: Build code - name: Build code
shell: bash shell: bash
run: ./build.sh run: ./build.sh
@@ -65,7 +54,6 @@ jobs:
output: ${{ runner.temp }}/results output: ${{ runner.temp }}/results
env: env:
TEST_MODE: true TEST_MODE: true
- name: Assert No Results - name: Assert No Results
shell: bash shell: bash
run: | run: |
@@ -83,11 +71,11 @@ jobs:
shell: bash shell: bash
run: | run: |
cd "$RUNNER_TEMP/results" cd "$RUNNER_TEMP/results"
# We should have 4 hits from these rules # We should have 3 hits from these rules
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block" EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace # use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)" RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
echo "Found matching rules '$RULES'" echo "Found matching rules '$RULES'"
if [ "$RULES" != "$EXPECTED_RULES" ]; then if [ "$RULES" != "$EXPECTED_RULES" ]; then
echo "Did not match expected rules '$EXPECTED_RULES'." echo "Did not match expected rules '$EXPECTED_RULES'."

View File

@@ -1,69 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Autobuild working directory
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
test-autobuild-working-dir:
strategy:
matrix:
include:
- os: ubuntu-latest
version: latest
name: Autobuild working directory
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Test setup
shell: bash
run: |
# Make sure that Gradle build succeeds in autobuild-dir ...
cp -a ../action/tests/java-repo autobuild-dir
# ... and fails if attempted in the current directory
echo > build.gradle
- uses: ./../action/init
with:
languages: java
tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- uses: ./../action/autobuild
with:
working-directory: autobuild-dir
- uses: ./../action/analyze
env:
TEST_MODE: true
- name: Check database
shell: bash
run: |
cd "$RUNNER_TEMP/codeql_databases"
if [[ ! -d java ]]; then
echo "Did not find a Java database"
exit 1
fi
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -32,7 +31,7 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
@@ -47,8 +46,6 @@ jobs:
- uses: ./../action/init - uses: ./../action/init
with: with:
tools: ./codeql-bundle.tar.gz tools: ./codeql-bundle.tar.gz
env:
TEST_MODE: true
- name: Build code - name: Build code
shell: bash shell: bash
run: ./build.sh run: ./build.sh

7
.github/workflows/__test-proxy.yml generated vendored
View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -32,7 +31,7 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
@@ -42,8 +41,6 @@ jobs:
with: with:
languages: javascript languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- uses: ./../action/analyze - uses: ./../action/analyze
env: env:
TEST_MODE: true TEST_MODE: true

7
.github/workflows/__test-ruby.yml generated vendored
View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -42,7 +41,7 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
@@ -52,8 +51,6 @@ jobs:
with: with:
languages: ruby languages: ruby
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- uses: ./../action/analyze - uses: ./../action/analyze
id: analysis id: analysis
env: env:

94
.github/workflows/__unset-environment.yml generated vendored Normal file
View File

@@ -0,0 +1,94 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Test unsetting environment variables
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- v1
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
unset-environment:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210308
- os: ubuntu-latest
version: stable-20210319
- os: ubuntu-latest
version: stable-20210809
- os: ubuntu-latest
version: cached
- os: ubuntu-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
name: Test unsetting environment variables
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
db-location: ${{ runner.temp }}/customDbLocation
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
shell: bash
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
- uses: ./../action/analyze
id: analysis
env:
TEST_MODE: true
- shell: bash
run: |
CPP_DB=${{ fromJson(steps.analysis.outputs.db-locations).cpp }}
if [[ ! -d $CPP_DB ]] || [[ ! $CPP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for CPP, or created it in the wrong location."
exit 1
fi
CSHARP_DB=${{ fromJson(steps.analysis.outputs.db-locations).csharp }}
if [[ ! -d $CSHARP_DB ]] || [[ ! $CSHARP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for C Sharp, or created it in the wrong location."
exit 1
fi
GO_DB=${{ fromJson(steps.analysis.outputs.db-locations).go }}
if [[ ! -d $GO_DB ]] || [[ ! $GO_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Go, or created it in the wrong location."
exit 1
fi
JAVA_DB=${{ fromJson(steps.analysis.outputs.db-locations).java }}
if [[ ! -d $JAVA_DB ]] || [[ ! $JAVA_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Java, or created it in the wrong location."
exit 1
fi
JAVASCRIPT_DB=${{ fromJson(steps.analysis.outputs.db-locations).javascript }}
if [[ ! -d $JAVASCRIPT_DB ]] || [[ ! $JAVASCRIPT_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Javascript, or created it in the wrong location."
exit 1
fi
PYTHON_DB=${{ fromJson(steps.analysis.outputs.db-locations).python }}
if [[ ! -d $PYTHON_DB ]] || [[ ! $PYTHON_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Python, or created it in the wrong location."
exit 1
fi
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -70,7 +69,7 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
@@ -82,8 +81,6 @@ jobs:
languages: cpp,csharp,java,javascript,python languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }} github.sha }}
env:
TEST_MODE: true
- name: Build code - name: Build code
shell: bash shell: bash
run: ./build.sh run: ./build.sh

View File

@@ -11,8 +11,7 @@ on:
push: push:
branches: branches:
- main - main
- releases/v1 - v1
- releases/v2
pull_request: pull_request:
types: types:
- opened - opened
@@ -70,13 +69,13 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Prepare test - name: Prepare test
id: prepare-test id: prepare-test
uses: ./.github/prepare-test uses: ./.github/prepare-test
with: with:
version: ${{ matrix.version }} version: ${{ matrix.version }}
- uses: actions/checkout@v3 - uses: actions/checkout@v2
with: with:
ref: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6 ref: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
path: x/y/z/some-path path: x/y/z/some-path
@@ -87,8 +86,6 @@ jobs:
languages: csharp,javascript languages: csharp,javascript
source-path: x/y/z/some-path/tests/multi-language-repo source-path: x/y/z/some-path/tests/multi-language-repo
debug: true debug: true
env:
TEST_MODE: true
- name: Build code (non-windows) - name: Build code (non-windows)
shell: bash shell: bash
if: ${{ runner.os != 'Windows' }} if: ${{ runner.os != 'Windows' }}

View File

@@ -15,7 +15,7 @@ jobs:
steps: steps:
- name: Checkout CodeQL Action - name: Checkout CodeQL Action
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Check Expected Release Files - name: Check Expected Release Files
run: | run: |
bundle_version="$(cat "./src/defaults.json" | jq -r ".bundleVersion")" bundle_version="$(cat "./src/defaults.json" | jq -r ".bundleVersion")"

View File

@@ -2,9 +2,9 @@ name: "CodeQL action"
on: on:
push: push:
branches: [main, releases/v1, releases/v2] branches: [main, v1, v2]
pull_request: pull_request:
branches: [main, releases/v1, releases/v2] branches: [main, v1, v2]
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened # Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
# by other workflows. # by other workflows.
types: [opened, synchronize, reopened, ready_for_review] types: [opened, synchronize, reopened, ready_for_review]
@@ -20,7 +20,7 @@ jobs:
security-events: write security-events: write
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v2
- name: Init with default CodeQL bundle from the VM image - name: Init with default CodeQL bundle from the VM image
id: init-default id: init-default
uses: ./init uses: ./init
@@ -75,7 +75,7 @@ jobs:
security-events: write security-events: write
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v2
- uses: ./init - uses: ./init
id: init id: init
with: with:

View File

@@ -1,220 +0,0 @@
# Tests that the generated code scanning config file contains the expected contents
name: Code-Scanning config CLI tests
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CODEQL_PASS_CONFIG_TO_CLI: true
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
code-scanning-config-tests:
continue-on-error: true
strategy:
fail-fast: true
matrix:
include:
- os: ubuntu-latest
version: latest
- os: macos-latest
version: latest
- os: ubuntu-latest
version: cached
- os: macos-latest
version: cached
- os: ubuntu-latest
version: nightly-latest
- os: macos-latest
version: nightly-latest
# Code-Scanning config not created because environment variable is not set
name: Code Scanning Configuration tests
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- name: Empty file
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: "{}"
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Packs from input
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"packs": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2" ]
}
languages: javascript
packs: dsp-testing/codeql-pack1@1.0.0, dsp-testing/codeql-pack2
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Packs from input with +
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"packs": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2" ]
}
languages: javascript
packs: + dsp-testing/codeql-pack1@1.0.0, dsp-testing/codeql-pack2
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Queries from input
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"queries": [{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql" }]
}
languages: javascript
queries: ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Queries from input with +
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"queries": [{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql" }]
}
languages: javascript
queries: + ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Queries and packs from input with +
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"queries": [{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql" }],
"packs": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2" ]
}
languages: javascript
queries: + ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
packs: + dsp-testing/codeql-pack1@1.0.0, dsp-testing/codeql-pack2
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Queries and packs from config
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"queries": [{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/foo2/show_ifs.ql" }],
"packs": {
"javascript": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2" ]
}
}
languages: javascript
config-file-test: .github/codeql/queries-and-packs-config.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Queries and packs from config overriden by input
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"queries": [{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql" }],
"packs": ["codeql/javascript-queries"]
}
languages: javascript
queries: ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
packs: codeql/javascript-queries
config-file-test: .github/codeql/queries-and-packs-config.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Queries and packs from config merging with input
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"queries": [
{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/foo2/show_ifs.ql" },
{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql" }
],
"packs": {
"javascript": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2", "codeql/javascript-queries" ]
}
}
languages: javascript
queries: + ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
packs: + codeql/javascript-queries
config-file-test: .github/codeql/queries-and-packs-config.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Multi-language packs from config
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"packs": {
"javascript": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2" ],
"ruby": ["codeql/ruby-queries"]
},
"queries": [
{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/foo2/show_ifs.ql" }
]
}
languages: javascript,ruby
config-file-test: .github/codeql/multi-language-packs-config.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Other config properties
if: success() || failure()
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: |
{
"name": "Config using all properties",
"packs": ["codeql/javascript-queries" ],
"disable-default-queries": true,
"paths-ignore": ["xxx"],
"paths": ["yyy"]
}
languages: javascript
packs: + codeql/javascript-queries
config-file-test: .github/codeql/other-config-properties.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Config not generated when env var is not set
if: success() || failure()
env:
CODEQL_PASS_CONFIG_TO_CLI: false
uses: ./../action/.github/check-codescanning-config
with:
expected-config-file-contents: ""
languages: javascript
packs: + codeql/javascript-queries
config-file-test: .github/codeql/other-config-properties.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}

View File

@@ -1,90 +0,0 @@
# Checks logs, SARIF, and database bundle debug artifacts exist
# when the analyze step fails.
name: PR Check - Debug artifacts after failure
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
upload-artifacts:
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
name: Upload debug artifacts after failure in analyze
continue-on-error: true
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Dump GitHub event
run: cat "${GITHUB_EVENT_PATH}"
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: latest
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
debug: true
debug-artifact-name: my-debug-artifacts
debug-database-name: my-db
env:
TEST_MODE: true
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
id: analysis
with:
expect-error: true
ram: 1
env:
TEST_MODE: true
download-and-check-artifacts:
name: Download and check debug artifacts after failure in analyze
needs: upload-artifacts
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- name: Download all artifacts
uses: actions/download-artifact@v3
- name: Check expected artifacts exist
shell: bash
run: |
OPERATING_SYSTEMS="ubuntu-latest macos-latest"
LANGUAGES="cpp csharp go java javascript python"
for os in $OPERATING_SYSTEMS; do
pushd "./my-debug-artifacts-$os"
echo "Artifacts from run on $os:"
for language in $LANGUAGES; do
echo "- Checking $language"
if [[ ! -f "my-db-$language-partial.zip" ]] ; then
echo "Missing a partial database bundle for $language"
exit 1
fi
if [[ ! -d "log" ]] ; then
echo "Missing database initialization logs"
exit 1
fi
if [[ ! -d "$language/log" ]] ; then
echo "Missing logs for $language"
exit 1
fi
done
popd
done
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -1,87 +0,0 @@
# Checks logs, SARIF, and database bundle debug artifacts exist.
name: PR Check - Debug artifact upload
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
upload-artifacts:
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
version: [stable-20210308, stable-20210319, stable-20210809, cached, latest, nightly-latest]
name: Upload debug artifacts
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
debug: true
debug-artifact-name: my-debug-artifacts
debug-database-name: my-db
env:
TEST_MODE: true
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
id: analysis
env:
TEST_MODE: true
download-and-check-artifacts:
name: Download and check debug artifacts
needs: upload-artifacts
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- name: Download all artifacts
uses: actions/download-artifact@v3
- name: Check expected artifacts exist
shell: bash
run: |
OPERATING_SYSTEMS="ubuntu-latest macos-latest"
VERSIONS="stable-20210308 stable-20210319 stable-20210809 cached latest nightly-latest"
LANGUAGES="cpp csharp go java javascript python"
for os in $OPERATING_SYSTEMS; do
for version in $VERSIONS; do
pushd "./my-debug-artifacts-$os-$version"
echo "Artifacts from version $version on $os:"
for language in $LANGUAGES; do
echo "- Checking $language"
if [[ ! -f "$language.sarif" ]] ; then
echo "Missing a SARIF file for $language"
exit 1
fi
if [[ ! -f "my-db-$language.zip" ]] ; then
echo "Missing a database bundle for $language"
exit 1
fi
if [[ ! -d "$language/log" ]] ; then
echo "Missing logs for $language"
exit 1
fi
done
popd
done
done
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -1,49 +0,0 @@
name: Check queries that ran
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
expected-queries:
name: Expected Queries Tests
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: latest
- uses: ./../action/init
with:
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
upload-database: false
upload: false
env:
TEST_MODE: true
- name: Check Sarif
uses: ./../action/.github/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: js/incomplete-hostname-regexp,js/path-injection
queries-not-run: foo,bar

View File

@@ -1,8 +1,7 @@
# This workflow runs after a release of the action. For v2 releases, it merges any changes from the # This workflow runs after a release of the action.
# release back into the main branch. Typically, this is just a single commit that updates the # It merges any changes from the release back into the
# changelog. For v2 and v1 releases, it then (a) tags the merge commit on the release branch that # main branch. Typically, this is just a single commit
# represents the new release with an `vx.y.z` tag and (b) updates the `vx` tag to refer to this # that updates the changelog.
# commit.
name: Tag release and merge back name: Tag release and merge back
on: on:
@@ -15,8 +14,8 @@ on:
push: push:
branches: branches:
- releases/v1 - v1
- releases/v2 - v2
jobs: jobs:
merge-back: merge-back:
@@ -33,10 +32,10 @@ jobs:
- name: Dump GitHub context - name: Dump GitHub context
env: env:
GITHUB_CONTEXT: '${{ toJson(github) }}' GITHUB_CONTEXT: '${{ toJson(github) }}'
run: echo "${GITHUB_CONTEXT}" run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v3 - uses: actions/checkout@v2
- uses: actions/setup-node@v3 - uses: actions/setup-node@v2
- name: Update git config - name: Update git config
run: | run: |
@@ -47,25 +46,25 @@ jobs:
id: getVersion id: getVersion
run: | run: |
VERSION="v$(jq '.version' -r 'package.json')" VERSION="v$(jq '.version' -r 'package.json')"
echo "::set-output name=version::${VERSION}" SHORT_SHA="${GITHUB_SHA:0:8}"
short_sha="${GITHUB_SHA:0:8}" echo "::set-output name=version::$VERSION"
NEW_BRANCH="mergeback/${VERSION}-to-${BASE_BRANCH}-${short_sha}" NEW_BRANCH="mergeback/${VERSION}-to-${BASE_BRANCH}-${SHORT_SHA}"
echo "::set-output name=newBranch::${NEW_BRANCH}" echo "::set-output name=newBranch::$NEW_BRANCH"
- name: Dump branches - name: Dump branches
env: env:
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}" NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
run: | run: |
echo "BASE_BRANCH ${BASE_BRANCH}" echo "BASE_BRANCH $BASE_BRANCH"
echo "HEAD_BRANCH ${HEAD_BRANCH}" echo "HEAD_BRANCH $HEAD_BRANCH"
echo "NEW_BRANCH ${NEW_BRANCH}" echo "NEW_BRANCH $NEW_BRANCH"
- name: Create mergeback branch - name: Create mergeback branch
env: env:
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}" NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
run: | run: |
git checkout -b "${NEW_BRANCH}" git checkout -b "$NEW_BRANCH"
- name: Check for tag - name: Check for tag
id: check id: check
@@ -73,13 +72,13 @@ jobs:
VERSION: "${{ steps.getVersion.outputs.version }}" VERSION: "${{ steps.getVersion.outputs.version }}"
run: | run: |
set +e # don't fail on an errored command set +e # don't fail on an errored command
git ls-remote --tags origin | grep "${VERSION}" git ls-remote --tags origin | grep "$VERSION"
exists="$?" EXISTS="$?"
if [ "${exists}" -eq 0 ]; then if [ "$EXISTS" -eq 0 ]; then
echo "Tag ${VERSION} exists. Not going to re-release." echo "Tag $TAG exists. Not going to re-release."
echo "::set-output name=exists::true" echo "::set-output name=exists::true"
else else
echo "Tag ${VERSION} does not exist yet." echo "Tag $TAG does not exist yet."
fi fi
# we didn't tag the release during the update-release-branch workflow because the # we didn't tag the release during the update-release-branch workflow because the
@@ -90,41 +89,20 @@ jobs:
env: env:
VERSION: ${{ steps.getVersion.outputs.version }} VERSION: ${{ steps.getVersion.outputs.version }}
run: | run: |
# Unshallow the repo in order to allow pushes git tag -a "$VERSION" -m "$VERSION"
git fetch --unshallow git fetch --unshallow # unshallow the repo in order to allow pushes
# Create the `vx.y.z` tag git push origin --follow-tags "$VERSION"
git tag --annotate "${VERSION}" --message "${VERSION}"
# Update the `vx` tag
major_version_tag=$(cut -d '.' -f1 <<< "${VERSION}")
# Use `--force` to overwrite the major version tag
git tag --annotate "${major_version_tag}" --message "${major_version_tag}" --force
# Push the tags, using:
# - `--atomic` to make sure we either update both tags or neither (an intermediate state,
# e.g. where we update the v2.x.y tag on the remote but not the v2 tag, could result in
# unwanted Dependabot updates, e.g. from v2 to v2.x.y)
# - `--force` since we're overwriting the `vx` tag
git push origin --atomic --force refs/tags/"${VERSION}" refs/tags/"${major_version_tag}"
- name: Create mergeback branch - name: Create mergeback branch
if: steps.check.outputs.exists != 'true' && contains(github.ref, 'releases/v2') if: steps.check.outputs.exists != 'true' && contains(github.ref, 'v2')
env: env:
VERSION: "${{ steps.getVersion.outputs.version }}" VERSION: "${{ steps.getVersion.outputs.version }}"
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}" NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
run: | run: |
set -exu set -exu
pr_title="Mergeback ${VERSION} ${HEAD_BRANCH} into ${BASE_BRANCH}" PR_TITLE="Mergeback $VERSION $HEAD_BRANCH into $BASE_BRANCH"
pr_body=$(cat << EOF PR_BODY="Updates version and changelog."
This PR bumps the version number and updates the changelog after the ${VERSION} release.
Please do the following:
- [ ] Remove and re-add the "Update dependencies" label to the PR to trigger just this workflow.
- [ ] Wait for the "Update dependencies" workflow to push a commit updating the dependencies.
- [ ] Mark the PR as ready for review to trigger the full set of PR checks.
- [ ] Approve and merge the PR.
EOF
)
# Update the version number ready for the next release # Update the version number ready for the next release
npm version patch --no-git-tag-version npm version patch --no-git-tag-version
@@ -132,17 +110,16 @@ jobs:
# Update the changelog # Update the changelog
perl -i -pe 's/^/## \[UNRELEASED\]\n\nNo user facing changes.\n\n/ if($.==3)' CHANGELOG.md perl -i -pe 's/^/## \[UNRELEASED\]\n\nNo user facing changes.\n\n/ if($.==3)' CHANGELOG.md
git add . git add .
git commit -m "Update changelog and version after ${VERSION}" git commit -m "Update changelog and version after $VERSION"
git push origin "${NEW_BRANCH}" git push origin "$NEW_BRANCH"
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft # PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft
# so that a maintainer can take the PR out of draft, thereby triggering the PR checks. # so that a maintainer can take the PR out of draft, thereby triggering the PR checks.
gh pr create \ gh pr create \
--head "${NEW_BRANCH}" \ --head "$NEW_BRANCH" \
--base "${BASE_BRANCH}" \ --base "$BASE_BRANCH" \
--title "${pr_title}" \ --title "$PR_TITLE" \
--label "Update dependencies" \ --label "Update dependencies" \
--body "${pr_body}" \ --body "$PR_BODY" \
--assignee "${GITHUB_ACTOR}" \
--draft --draft

View File

@@ -1,8 +1,8 @@
name: PR Checks name: PR Checks (Basic Checks and Runner)
on: on:
push: push:
branches: [main, releases/v1, releases/v2] branches: [main, v1]
pull_request: pull_request:
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened # Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
# by other workflows. # by other workflows.
@@ -10,8 +10,17 @@ on:
workflow_dispatch: workflow_dispatch:
jobs: jobs:
lint-js:
name: Lint
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v2
- name: Run Lint
run: npm run-script lint
check-js: check-js:
name: Check JS
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 45 timeout-minutes: 45
@@ -21,11 +30,7 @@ jobs:
node-types-version: [12.12, current] node-types-version: [12.12, current]
steps: steps:
- name: Checkout - uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Lint
run: npm run-script lint
- name: Update version of @types/node - name: Update version of @types/node
if: matrix.node-types-version != 'current' if: matrix.node-types-version != 'current'
@@ -58,47 +63,25 @@ jobs:
timeout-minutes: 45 timeout-minutes: 45
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v2
- name: Check node modules up to date - name: Check node modules up to date
run: .github/workflows/script/check-node-modules.sh run: .github/workflows/script/check-node-modules.sh
check-file-contents: verify-pr-checks:
name: Check file contents name: Verify PR checks up to date
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 45 timeout-minutes: 45
steps: steps:
- name: Checkout - uses: actions/checkout@v2
uses: actions/checkout@v3
# Checks for any conflict markers created by git. This check is primarily intended to validate that
# any merge conflicts in the v2 -> v1 backport PR are fixed before the PR is merged.
- name: Check for merge conflicts
run: |
# Use `|| true` since grep returns exit code 1 if there are no matches, and we don't want
# this to fail the workflow.
FILES_WITH_CONFLICTS=$(grep --extended-regexp --ignore-case --line-number --recursive \
'^(<<<<<<<|>>>>>>>)' . || true)
if [[ "${FILES_WITH_CONFLICTS}" ]]; then
echo "Fail: Found merge conflict markers in the following files:"
echo ""
echo "${FILES_WITH_CONFLICTS}"
exit 1
else
echo "Success: Found no merge conflict markers."
fi
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v3 uses: actions/setup-python@v2
with: with:
python-version: 3.8 python-version: 3.8
- name: Install dependencies - name: Install dependencies
run: | run: |
python -m pip install --upgrade pip python -m pip install --upgrade pip
pip install ruamel.yaml pip install ruamel.yaml
# Ensure the generated PR check workflows are up to date.
- name: Verify PR checks up to date - name: Verify PR checks up to date
run: .github/workflows/script/verify-pr-checks.sh run: .github/workflows/script/verify-pr-checks.sh
@@ -107,15 +90,393 @@ jobs:
needs: [check-js, check-node-modules] needs: [check-js, check-node-modules]
strategy: strategy:
matrix: matrix:
os: [ubuntu-latest, macos-latest, windows-latest] os: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
timeout-minutes: 45 timeout-minutes: 45
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v2
- name: npm test - name: npm run-script test
run: npm run-script test
runner-analyze-javascript-ubuntu:
name: Runner ubuntu JS analyze
needs: [check-js, check-node-modules]
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: | run: |
# Run any commands referenced in package.json using Bash, otherwise cd runner
# we won't be able to find them on Windows. npm install
npm config set script-shell bash npm run build-runner
npm test
- name: Run init
run: |
# Pass --config-file here, but not for other jobs in this workflow.
# This means we're testing the config file parsing in the runner
# but not slowing down all jobs unnecessarily as it doesn't add much
# testing the parsing on different operating systems and languages.
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-windows:
name: Runner windows JS analyze
needs: [check-js, check-node-modules]
timeout-minutes: 45
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-macos:
name: Runner macos JS analyze
needs: [check-js, check-node-modules]
timeout-minutes: 45
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-ubuntu:
name: Runner ubuntu C# analyze
needs: [check-js, check-node-modules]
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
. ./codeql-runner/codeql-env.sh
$CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-windows:
name: Runner windows C# analyze
needs: [check-js, check-node-modules]
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
# `windows-latest`.
timeout-minutes: 45
runs-on: windows-2019
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
$Env:CODEQL_EXTRACTOR_CSHARP_ROOT = "" # Unset an environment variable to make sure the tracer resists this
& $Env:CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
- name: Upload tracer logs
uses: actions/upload-artifact@v2
with:
name: tracer-logs
path: ./codeql-runner/compound-build-tracer.log
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-macos:
name: Runner macos C# analyze
timeout-minutes: 45
needs: [check-js, check-node-modules]
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
. ./codeql-runner/codeql-env.sh
$CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-ubuntu:
name: Runner ubuntu autobuild C# analyze
timeout-minutes: 45
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
../action/runner/dist/codeql-runner-linux autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-windows:
timeout-minutes: 45
name: Runner windows autobuild C# analyze
needs: [check-js, check-node-modules]
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
# `windows-latest`.
runs-on: windows-2019
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
../action/runner/dist/codeql-runner-win.exe autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-macos:
name: Runner macos autobuild C# analyze
needs: [check-js, check-node-modules]
runs-on: macos-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
../action/runner/dist/codeql-runner-macos autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-upload-sarif:
name: Runner upload sarif
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
timeout-minutes: 45
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Upload with runner
run: |
# Deliberately don't use TEST_MODE here. This is specifically testing
# the compatibility with the API.
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
runner-extractor-ram-threads-options:
name: Runner ubuntu extractor RAM and threads options
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-linux init --ram=230 --threads=1 --repository $GITHUB_REPOSITORY --languages java --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Assert Results
shell: bash
run: |
. ./codeql-runner/codeql-env.sh
if [ "${CODEQL_RAM}" != "230" ]; then
echo "CODEQL_RAM is '${CODEQL_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_RAM}" != "230" ]; then
echo "CODEQL_EXTRACTOR_JAVA_RAM is '${CODEQL_EXTRACTOR_JAVA_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_THREADS}" != "1" ]; then
echo "CODEQL_THREADS is '${CODEQL_THREADS}' instead of 1"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_THREADS}" != "1" ]; then
echo "CODEQL_EXTRACTOR_JAVA_THREADS is '${CODEQL_EXTRACTOR_JAVA_THREADS}' instead of 1"
exit 1
fi

View File

@@ -2,22 +2,11 @@ name: Test Python Package Installation on Linux and Mac
on: on:
push: push:
branches: [main, releases/v1, releases/v2] branches: [main, v1]
pull_request: pull_request:
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened # Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
# by other workflows. # by other workflows.
types: [opened, synchronize, reopened, ready_for_review] types: [opened, synchronize, reopened, ready_for_review]
paths:
# Changes to this workflow.
- '.github/workflows/python-deps.yml'
# Changes to the Python package installation scripts and their tests.
- 'python-setup/**'
# Changes to the default CodeQL bundle version.
- '**/defaults.json'
schedule:
# Weekly on Monday.
- cron: '0 0 * * 1'
workflow_dispatch:
jobs: jobs:
test-setup-python-scripts: test-setup-python-scripts:
@@ -26,20 +15,9 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
os: [ubuntu-latest, ubuntu-22.04, macos-latest] os: [ubuntu-latest, macos-latest]
python_deps_type: [pipenv, poetry, requirements, setup_py] python_deps_type: [pipenv, poetry, requirements, setup_py]
python_version: [2, 3] python_version: [2, 3]
exclude:
# Python2 and poetry are not supported. See https://github.com/actions/setup-python/issues/374
- python_version: 2
python_deps_type: poetry
# Python2 and pipenv are not supported since pipenv v2021.11.5
- python_version: 2
python_deps_type: pipenv
# Python2 is not available on ubuntu-22.04 by default -- see https://github.com/github/codeql-action/pull/1257
- python_version: 2
os: ubuntu-22.04
env: env:
PYTHON_DEPS_TYPE: ${{ matrix.python_deps_type }} PYTHON_DEPS_TYPE: ${{ matrix.python_deps_type }}
@@ -47,7 +25,7 @@ jobs:
steps: steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v3 - uses: actions/checkout@v2
- name: Initialize CodeQL - name: Initialize CodeQL
uses: ./init uses: ./init
@@ -66,7 +44,6 @@ jobs:
case ${{ matrix.os }} in case ${{ matrix.os }} in
ubuntu-latest*) basePath="/opt";; ubuntu-latest*) basePath="/opt";;
ubuntu-22.04*) basePath="/opt";;
macos-latest*) basePath="/Users/runner";; macos-latest*) basePath="/Users/runner";;
esac esac
echo ${basePath} echo ${basePath}
@@ -90,11 +67,11 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
os: [ubuntu-latest, ubuntu-22.04, macos-latest] os: [ubuntu-latest, macos-latest]
steps: steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v3 - uses: actions/checkout@v2
- name: Initialize CodeQL - name: Initialize CodeQL
uses: ./init uses: ./init
@@ -113,7 +90,6 @@ jobs:
case ${{ matrix.os }} in case ${{ matrix.os }} in
ubuntu-latest*) basePath="/opt";; ubuntu-latest*) basePath="/opt";;
ubuntu-22.04*) basePath="/opt";;
macos-latest*) basePath="/Users/runner";; macos-latest*) basePath="/Users/runner";;
esac esac
echo ${basePath} echo ${basePath}
@@ -139,13 +115,6 @@ jobs:
matrix: matrix:
python_deps_type: [pipenv, poetry, requirements, setup_py] python_deps_type: [pipenv, poetry, requirements, setup_py]
python_version: [2, 3] python_version: [2, 3]
exclude:
# Python2 and poetry are not supported. See https://github.com/actions/setup-python/issues/374
- python_version: 2
python_deps_type: poetry
# Python2 and pipenv are not supported since pipenv v2021.11.5
- python_version: 2
python_deps_type: pipenv
env: env:
PYTHON_DEPS_TYPE: ${{ matrix.python_deps_type }} PYTHON_DEPS_TYPE: ${{ matrix.python_deps_type }}
@@ -153,9 +122,9 @@ jobs:
steps: steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v3 - uses: actions/checkout@v2
- uses: actions/setup-python@v3 - uses: actions/setup-python@v2
with: with:
python-version: ${{ matrix.python_version }} python-version: ${{ matrix.python_version }}
@@ -165,8 +134,6 @@ jobs:
tools: latest tools: latest
languages: python languages: python
setup-python-dependencies: false setup-python-dependencies: false
env:
TEST_MODE: true
- name: Test Auto Package Installation - name: Test Auto Package Installation
run: | run: |

View File

@@ -1,56 +0,0 @@
name: Query filters tests
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
query-filters:
name: Query Filters Tests
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: latest
- name: Check SARIF for default queries with Single include, Single exclude
uses: ./../action/.github/query-filter-test
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: js/zipslip
queries-not-run: js/path-injection
config-file: ./.github/codeql/codeql-config-query-filters1.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Check SARIF for query packs with Single include, Single exclude
uses: ./../action/.github/query-filter-test
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: js/zipslip,javascript/example/empty-or-one-block
queries-not-run: js/path-injection
config-file: ./.github/codeql/codeql-config-query-filters2.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Check SARIF for query packs and local queries with Single include, Single exclude
uses: ./../action/.github/query-filter-test
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run: js/zipslip,javascript/example/empty-or-one-block,inrepo-javascript-querypack/show-ifs
queries-not-run: js/path-injection,complex-python-querypack/show-ifs,complex-python-querypack/foo/bar/show-ifs
config-file: ./.github/codeql/codeql-config-query-filters3.yml
tools: ${{ steps.prepare-test.outputs.tools-url }}

55
.github/workflows/release-runner.yml vendored Normal file
View File

@@ -0,0 +1,55 @@
name: Release runner
on:
workflow_dispatch:
inputs:
bundle-tag:
description: 'Tag of the bundle release (e.g., "codeql-bundle-20200826")'
required: false
jobs:
release-runner:
timeout-minutes: 45
runs-on: ubuntu-latest
env:
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
strategy:
matrix:
extension: ["linux", "macos", "win.exe"]
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- uses: actions/upload-artifact@v2
with:
name: codeql-runner-${{matrix.extension}}
path: runner/dist/codeql-runner-${{matrix.extension}}
- name: Resolve Upload URL for the release
if: ${{ github.event.inputs.bundle-tag != null }}
id: save_url
run: |
UPLOAD_URL=$(curl -sS \
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}" \
-H "Accept: application/json" \
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" | jq .upload_url | sed s/\"//g)
echo ${UPLOAD_URL}
echo "::set-output name=upload_url::${UPLOAD_URL}"
- name: Upload Platform Package
if: ${{ github.event.inputs.bundle-tag != null }}
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.save_url.outputs.upload_url }}
asset_path: runner/dist/codeql-runner-${{matrix.extension}}
asset_name: codeql-runner-${{matrix.extension}}
asset_content_type: application/octet-stream

View File

@@ -1,412 +0,0 @@
name: CodeQL Runner Checks
on:
push:
branches: [main, releases/v1, releases/v2]
pull_request:
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
# by other workflows.
types: [opened, synchronize, reopened, ready_for_review]
workflow_dispatch:
jobs:
runner-analyze-javascript-ubuntu:
name: Runner ubuntu JS analyze
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
# Pass --config-file here, but not for other jobs in this workflow.
# This means we're testing the config file parsing in the runner
# but not slowing down all jobs unnecessarily as it doesn't add much
# testing the parsing on different operating systems and languages.
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
- name: Run analyze
run: |
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-windows:
name: Runner windows JS analyze
timeout-minutes: 45
runs-on: windows-latest
steps:
- uses: actions/checkout@v3
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
- name: Run analyze
run: |
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-macos:
name: Runner macos JS analyze
timeout-minutes: 45
runs-on: macos-latest
steps:
- uses: actions/checkout@v3
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
- name: Run analyze
run: |
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-ubuntu:
name: Runner ubuntu C# analyze
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
- name: Build code
run: |
. ./codeql-runner/codeql-env.sh
$CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-windows:
name: Runner windows C# analyze
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
# `windows-latest`.
timeout-minutes: 45
runs-on: windows-2019
steps:
- uses: actions/checkout@v3
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
- name: Build code
shell: powershell
run: |
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
& $Env:CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
- name: Upload tracer logs
uses: actions/upload-artifact@v3
with:
name: tracer-logs
path: ./codeql-runner/compound-build-tracer.log
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-macos:
name: Runner macos C# analyze
timeout-minutes: 45
runs-on: macos-latest
steps:
- uses: actions/checkout@v3
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
- name: Build code
shell: bash
run: |
. ./codeql-runner/codeql-env.sh
$CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-ubuntu:
name: Runner ubuntu autobuild C# analyze
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
- name: Build code
run: |
../action/runner/dist/codeql-runner-linux autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-windows:
timeout-minutes: 45
name: Runner windows autobuild C# analyze
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
# `windows-latest`.
runs-on: windows-2019
steps:
- uses: actions/checkout@v3
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
- name: Build code
shell: powershell
run: |
../action/runner/dist/codeql-runner-win.exe autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-macos:
name: Runner macos autobuild C# analyze
runs-on: macos-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v3
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
- name: Build code
shell: bash
run: |
. codeql-runner/codeql-env.sh
CODEQL_RUNNER="$(cat codeql-runner/codeql-env.json | jq -r '.CODEQL_RUNNER')"
echo "$CODEQL_RUNNER"
$CODEQL_RUNNER ../action/runner/dist/codeql-runner-macos autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-upload-sarif:
name: Runner upload sarif
runs-on: ubuntu-latest
timeout-minutes: 45
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
steps:
- uses: actions/checkout@v3
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Upload with runner
run: |
# Deliberately don't use TEST_MODE here. This is specifically testing
# the compatibility with the API.
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
runner-extractor-ram-threads-options:
name: Runner ubuntu extractor RAM and threads options
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v3
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-linux init --ram=230 --threads=1 --repository $GITHUB_REPOSITORY --languages java --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
- name: Assert Results
shell: bash
run: |
. ./codeql-runner/codeql-env.sh
if [ "${CODEQL_RAM}" != "230" ]; then
echo "CODEQL_RAM is '${CODEQL_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_RAM}" != "230" ]; then
echo "CODEQL_EXTRACTOR_JAVA_RAM is '${CODEQL_EXTRACTOR_JAVA_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_THREADS}" != "1" ]; then
echo "CODEQL_THREADS is '${CODEQL_THREADS}' instead of 1"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_THREADS}" != "1" ]; then
echo "CODEQL_EXTRACTOR_JAVA_THREADS is '${CODEQL_EXTRACTOR_JAVA_THREADS}' instead of 1"
exit 1
fi

View File

@@ -14,8 +14,8 @@ npm run-script build
# Check that repo is still clean # Check that repo is still clean
if [ ! -z "$(git status --porcelain)" ]; then if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention # If we get a fail here then the PR needs attention
>&2 echo "Failed: JavaScript files are not up to date. Run 'rm -rf lib && npm run-script build' to update" >&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
git status git status
exit 1 exit 1
fi fi
echo "Success: JavaScript files are up to date" echo "Success: JavaScript files are up to date"

View File

@@ -1,37 +0,0 @@
#!/usr/bin/env bash
# Update the required checks based on the current branch.
# Typically, this will be main.
if ! gh auth status 2>/dev/null; then
gh auth status
echo "Failed: Not authorized. This script requires admin access to github/codeql-action through the gh CLI."
exit 1
fi
if [ "$#" -eq 1 ]; then
# If we were passed an argument, use that as the SHA
GITHUB_SHA="$0"
elif [ "$#" -gt 1 ]; then
echo "Usage: $0 [SHA]"
echo "Update the required checks based on the SHA, or main."
exit 1
elif [ -z "$GITHUB_SHA" ]; then
# If we don't have a SHA, use main
GITHUB_SHA="$(git rev-parse main)"
fi
echo "Getting checks for $GITHUB_SHA"
# Ignore any checks with "https://", CodeQL, LGTM, and Update checks.
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or contains("Update") or contains("update") | not)] | unique | sort')"
echo "$CHECKS" | jq
echo "{\"contexts\": ${CHECKS}}" > checks.json
for BRANCH in main releases/v2 releases/v1; do
echo "Updating $BRANCH"
gh api --silent -X "PATCH" "repos/github/codeql-action/branches/$BRANCH/protection/required_status_checks" --input checks.json
done
rm checks.json

74
.github/workflows/split.yml vendored Normal file
View File

@@ -0,0 +1,74 @@
#
# Split the CodeQL Bundle into platform bundles
#
# Instructions:
# 1. Upload the new codeql-bundle (codeql-bundle.tar.gz) as an asset of the
# release (codeql-bundle-20200826)
# 2. Take note of the CLI Release used by the bundle (e.g., v2.2.5)
# 3. Manually launch this workflow file (via the Actions UI) specifying
# - The CLI Release (e.g., v2.2.5)
# - The release tag (e.g., codeql-bundle-20200826)
# 4. If everything succeeds you should see 3 new assets.
#
name: Split Bundle
on:
workflow_dispatch:
inputs:
cli-release:
description: 'CodeQL CLI Release (e.g., "v2.2.5")'
required: true
bundle-tag:
description: 'Tag of the bundle release (e.g., "codeql-bundle-20200826")'
required: true
jobs:
build:
runs-on: ubuntu-latest
timeout-minutes: 45
env:
CLI_RELEASE: "${{ github.event.inputs.cli-release }}"
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
strategy:
fail-fast: false
matrix:
platform: ["linux64", "osx64", "win64"]
steps:
- name: Resolve Upload URL for the release
id: save_url
run: |
UPLOAD_URL=$(curl -sS \
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}" \
-H "Accept: application/json" \
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" | jq .upload_url | sed s/\"//g)
echo ${UPLOAD_URL}
echo "::set-output name=upload_url::${UPLOAD_URL}"
- name: Download CodeQL CLI and Bundle
run: |
wget --no-verbose "https://github.com/${GITHUB_REPOSITORY}/releases/download/${RELEASE_TAG}/codeql-bundle.tar.gz"
wget --no-verbose "https://github.com/github/codeql-cli-binaries/releases/download/${CLI_RELEASE}/codeql-${{matrix.platform}}.zip"
- name: Create Platform Package
# Replace the codeql-binaries with the platform specific ones
run: |
gunzip codeql-bundle.tar.gz
tar -f codeql-bundle.tar --delete codeql
unzip -q codeql-${{matrix.platform}}.zip
tar -f codeql-bundle.tar --append codeql
gzip codeql-bundle.tar
mv codeql-bundle.tar.gz codeql-bundle-${{matrix.platform}}.tar.gz
du -sh codeql-bundle-${{matrix.platform}}.tar.gz
- name: Upload Platform Package
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.save_url.outputs.upload_url }}
asset_path: ./codeql-bundle-${{matrix.platform}}.tar.gz
asset_name: codeql-bundle-${{matrix.platform}}.tar.gz
asset_content_type: application/tar+gzip

View File

@@ -1,95 +0,0 @@
# See `unset-environment-old-cli.yml` for reasoning behind the separate tests.
name: PR Check - Test unsetting environment variables for CLI version >= 2.5.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
unset-environment:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210809
- os: ubuntu-latest
version: cached
- os: ubuntu-latest
version: latest
- os: ubuntu-latest
version: nightly-latest
name: Test unsetting environment variables
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
db-location: ${{ runner.temp }}/customDbLocation
tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- name: Build code
shell: bash
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
- uses: ./../action/analyze
id: analysis
env:
TEST_MODE: true
- shell: bash
run: |
CPP_DB="${{ fromJson(steps.analysis.outputs.db-locations).cpp }}"
if [[ ! -d "$CPP_DB" ]] || [[ ! "$CPP_DB" == "${RUNNER_TEMP}/customDbLocation/cpp" ]]; then
echo "::error::Did not create a database for CPP, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/cpp' but actual was '${CPP_DB}'"
exit 1
fi
CSHARP_DB="${{ fromJson(steps.analysis.outputs.db-locations).csharp }}"
if [[ ! -d "$CSHARP_DB" ]] || [[ ! "$CSHARP_DB" == "${RUNNER_TEMP}/customDbLocation/csharp" ]]; then
echo "::error::Did not create a database for C Sharp, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/csharp' but actual was '${CSHARP_DB}'"
exit 1
fi
GO_DB="${{ fromJson(steps.analysis.outputs.db-locations).go }}"
if [[ ! -d "$GO_DB" ]] || [[ ! "$GO_DB" == "${RUNNER_TEMP}/customDbLocation/go" ]]; then
echo "::error::Did not create a database for Go, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/go' but actual was '${GO_DB}'"
exit 1
fi
JAVA_DB="${{ fromJson(steps.analysis.outputs.db-locations).java }}"
if [[ ! -d "$JAVA_DB" ]] || [[ ! "$JAVA_DB" == "${RUNNER_TEMP}/customDbLocation/java" ]]; then
echo "::error::Did not create a database for Java, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/java' but actual was '${JAVA_DB}'"
exit 1
fi
JAVASCRIPT_DB="${{ fromJson(steps.analysis.outputs.db-locations).javascript }}"
if [[ ! -d "$JAVASCRIPT_DB" ]] || [[ ! "$JAVASCRIPT_DB" == "${RUNNER_TEMP}/customDbLocation/javascript" ]]; then
echo "::error::Did not create a database for Javascript, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/javascript' but actual was '${JAVASCRIPT_DB}'"
exit 1
fi
PYTHON_DB="${{ fromJson(steps.analysis.outputs.db-locations).python }}"
if [[ ! -d "$PYTHON_DB" ]] || [[ ! "$PYTHON_DB" == "${RUNNER_TEMP}/customDbLocation/python" ]]; then
echo "::error::Did not create a database for Python, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/python' but actual was '${PYTHON_DB}'"
exit 1
fi
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -1,89 +0,0 @@
# There was a bug, fixed in CLI v2.5.1, that didn't propagate environment
# variables that the Java tracer needed. Here we test all languages
# except Java for these CLI versions. In `unset-environment-new-cli.yml`
# we test all languages for recent CLI versions.
name: PR Check - Test unsetting environment variables for CLI version < 2.5.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v1
- releases/v2
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
unset-environment:
strategy:
matrix:
include:
- os: ubuntu-latest
version: stable-20210308
- os: ubuntu-latest
version: stable-20210319
name: Test unsetting environment variables
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
languages: csharp,cpp,go,javascript,python
db-location: ${{ runner.temp }}/customDbLocation
tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
TEST_MODE: true
- name: Build code
shell: bash
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
- uses: ./../action/analyze
id: analysis
env:
TEST_MODE: true
- shell: bash
run: |
CPP_DB="${{ fromJson(steps.analysis.outputs.db-locations).cpp }}"
if [[ ! -d "$CPP_DB" ]] || [[ ! "$CPP_DB" == "${RUNNER_TEMP}/customDbLocation/cpp" ]]; then
echo "::error::Did not create a database for CPP, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/cpp' but actual was '${CPP_DB}'"
exit 1
fi
CSHARP_DB="${{ fromJson(steps.analysis.outputs.db-locations).csharp }}"
if [[ ! -d "$CSHARP_DB" ]] || [[ ! "$CSHARP_DB" == "${RUNNER_TEMP}/customDbLocation/csharp" ]]; then
echo "::error::Did not create a database for C Sharp, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/csharp' but actual was '${CSHARP_DB}'"
exit 1
fi
GO_DB="${{ fromJson(steps.analysis.outputs.db-locations).go }}"
if [[ ! -d "$GO_DB" ]] || [[ ! "$GO_DB" == "${RUNNER_TEMP}/customDbLocation/go" ]]; then
echo "::error::Did not create a database for Go, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/go' but actual was '${GO_DB}'"
exit 1
fi
JAVASCRIPT_DB="${{ fromJson(steps.analysis.outputs.db-locations).javascript }}"
if [[ ! -d "$JAVASCRIPT_DB" ]] || [[ ! "$JAVASCRIPT_DB" == "${RUNNER_TEMP}/customDbLocation/javascript" ]]; then
echo "::error::Did not create a database for Javascript, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/javascript' but actual was '${JAVASCRIPT_DB}'"
exit 1
fi
PYTHON_DB="${{ fromJson(steps.analysis.outputs.db-locations).python }}"
if [[ ! -d "$PYTHON_DB" ]] || [[ ! "$PYTHON_DB" == "${RUNNER_TEMP}/customDbLocation/python" ]]; then
echo "::error::Did not create a database for Python, or created it in the wrong location." \
"Expected location was '${RUNNER_TEMP}/customDbLocation/python' but actual was '${PYTHON_DB}'"
exit 1
fi
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -11,7 +11,7 @@ jobs:
if: contains(github.event.pull_request.labels.*.name, 'Update dependencies') && (github.event.pull_request.head.repo.full_name == 'github/codeql-action') if: contains(github.event.pull_request.labels.*.name, 'Update dependencies') && (github.event.pull_request.head.repo.full_name == 'github/codeql-action')
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Remove PR label - name: Remove PR label
env: env:

View File

@@ -7,7 +7,7 @@ on:
# When the v2 release is complete, this workflow will open a PR to update the v1 release branch. # When the v2 release is complete, this workflow will open a PR to update the v1 release branch.
push: push:
branches: branches:
- releases/v2 - v2
jobs: jobs:
update: update:
@@ -23,13 +23,13 @@ jobs:
GITHUB_CONTEXT: '${{ toJson(github) }}' GITHUB_CONTEXT: '${{ toJson(github) }}'
run: echo "$GITHUB_CONTEXT" run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v3 - uses: actions/checkout@v2
with: with:
# Need full history so we calculate diffs # Need full history so we calculate diffs
fetch-depth: 0 fetch-depth: 0
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v3 uses: actions/setup-python@v2
with: with:
python-version: 3.8 python-version: 3.8

View File

@@ -13,13 +13,13 @@ jobs:
steps: steps:
- name: Setup Python - name: Setup Python
uses: actions/setup-python@v3 uses: actions/setup-python@v2
with: with:
python-version: "3.7" python-version: "3.7"
- name: Checkout CodeQL Action - name: Checkout CodeQL Action
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Checkout Enterprise Releases - name: Checkout Enterprise Releases
uses: actions/checkout@v3 uses: actions/checkout@v2
with: with:
repository: github/enterprise-releases repository: github/enterprise-releases
ssh-key: ${{ secrets.ENTERPRISE_RELEASES_SSH_KEY }} ssh-key: ${{ secrets.ENTERPRISE_RELEASES_SSH_KEY }}

View File

@@ -1,104 +1,7 @@
# CodeQL Action Changelog # CodeQL Action Changelog
## [UNRELEASED] ## 1.1.6 - 30 Mar 2022
- Update default CodeQL bundle version to 2.11.0. [#1267](https://github.com/github/codeql-action/pull/1267)
## 2.1.25 - 21 Sep 2022
- We will soon be rolling out a feature of the CodeQL Action that stores some information used to make future runs faster in the GitHub Actions cache. Initially, this will only be enabled on JavaScript repositories, but we plan to add more languages to this soon. The new feature can be disabled by passing the `trap-caching: false` option to your workflow's `init` step, for example if you are already using the GitHub Actions cache for a different purpose and are near the storage limit for it.
- Add support for Python automatic dependency installation with Poetry 1.2 [#1258](https://github.com/github/codeql-action/pull/1258).
## 2.1.24 - 16 Sep 2022
No user facing changes.
## 2.1.23 - 14 Sep 2022
- Allow CodeQL packs to be downloaded from GitHub Enterprise Server instances, using the new `registries` input for the `init` action. [#1221](https://github.com/github/codeql-action/pull/1221)
- Update default CodeQL bundle version to 2.10.5. [#1240](https://github.com/github/codeql-action/pull/1240)
## 2.1.22 - 01 Sep 2022
- Downloading CodeQL packs has been moved to the `init` step. Previously, CodeQL packs were downloaded during the `analyze` step. [#1218](https://github.com/github/codeql-action/pull/1218)
- Update default CodeQL bundle version to 2.10.4. [#1224](https://github.com/github/codeql-action/pull/1224)
- The newly released [Poetry 1.2](https://python-poetry.org/blog/announcing-poetry-1.2.0) is not yet supported. In the most common case where the CodeQL Action is automatically installing Python dependencies, it will continue to install and use Poetry 1.1 on its own. However, in certain cases such as with self-hosted runners, you may need to ensure Poetry 1.1 is installed yourself.
## 2.1.21 - 25 Aug 2022
- Improve error messages when the code scanning configuration file includes an invalid `queries` block or an invalid `query-filters` block. [#1208](https://github.com/github/codeql-action/pull/1208)
- Fix a bug where Go build tracing could fail on Windows. [#1209](https://github.com/github/codeql-action/pull/1209)
## 2.1.20 - 22 Aug 2022
No user facing changes.
## 2.1.19 - 17 Aug 2022
- Add the ability to filter queries from a code scanning run by using the `query-filters` option in the code scanning configuration file. [#1098](https://github.com/github/codeql-action/pull/1098)
- In debug mode, debug artifacts are now uploaded even if a step in the Actions workflow fails. [#1159](https://github.com/github/codeql-action/pull/1159)
- Update default CodeQL bundle version to 2.10.3. [#1178](https://github.com/github/codeql-action/pull/1178)
- The combination of python2 and Pipenv is no longer supported. [#1181](https://github.com/github/codeql-action/pull/1181)
## 2.1.18 - 03 Aug 2022
- Update default CodeQL bundle version to 2.10.2. [#1156](https://github.com/github/codeql-action/pull/1156)
## 2.1.17 - 28 Jul 2022
- Update default CodeQL bundle version to 2.10.1. [#1143](https://github.com/github/codeql-action/pull/1143)
## 2.1.16 - 13 Jul 2022
- You can now quickly debug a job that uses the CodeQL Action by re-running the job from the GitHub UI and selecting the "Enable debug logging" option. [#1132](https://github.com/github/codeql-action/pull/1132)
- You can now see diagnostic messages produced by the analysis in the logs of the `analyze` Action by enabling debug mode. To enable debug mode, pass `debug: true` to the `init` Action, or [enable step debug logging](https://docs.github.com/en/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging). This feature is available for CodeQL CLI version 2.10.0 and later. [#1133](https://github.com/github/codeql-action/pull/1133)
## 2.1.15 - 28 Jun 2022
- CodeQL query packs listed in the `packs` configuration field will be skipped if their target language is not being analyzed in the current Actions job. Previously, this would throw an error. [#1116](https://github.com/github/codeql-action/pull/1116)
- The combination of python2 and poetry is no longer supported. See <https://github.com/actions/setup-python/issues/374> for more details. [#1124](https://github.com/github/codeql-action/pull/1124)
- Update default CodeQL bundle version to 2.10.0. [#1123](https://github.com/github/codeql-action/pull/1123)
## 2.1.14 - 22 Jun 2022
No user facing changes.
## 2.1.13 - 21 Jun 2022
- Update default CodeQL bundle version to 2.9.4. [#1100](https://github.com/github/codeql-action/pull/1100)
## 2.1.12 - 01 Jun 2022
- Update default CodeQL bundle version to 2.9.3. [#1084](https://github.com/github/codeql-action/pull/1084)
## 2.1.11 - 17 May 2022
- Update default CodeQL bundle version to 2.9.2. [#1074](https://github.com/github/codeql-action/pull/1074)
## 2.1.10 - 10 May 2022
- Update default CodeQL bundle version to 2.9.1. [#1056](https://github.com/github/codeql-action/pull/1056)
- When `wait-for-processing` is enabled, the workflow will now fail if there were any errors that occurred during processing of the analysis results.
## 2.1.9 - 27 Apr 2022
- Add `working-directory` input to the `autobuild` action. [#1024](https://github.com/github/codeql-action/pull/1024)
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`. [#1007](https://github.com/github/codeql-action/pull/1007)
- Update default CodeQL bundle version to 2.9.0.
- Fix a bug where [status reporting fails on Windows](https://github.com/github/codeql-action/issues/1041). [#1042](https://github.com/github/codeql-action/pull/1042)
## 2.1.8 - 08 Apr 2022
- Update default CodeQL bundle version to 2.8.5. [#1014](https://github.com/github/codeql-action/pull/1014)
- Fix error where the init action would fail due to a GitHub API request that was taking too long to complete [#1025](https://github.com/github/codeql-action/pull/1025)
## 2.1.7 - 05 Apr 2022
- A bug where additional queries specified in the workflow file would sometimes not be respected has been fixed. [#1018](https://github.com/github/codeql-action/pull/1018)
## 2.1.6 - 30 Mar 2022
- [v2+ only] The CodeQL Action now runs on Node.js v16. [#1000](https://github.com/github/codeql-action/pull/1000)
- Update default CodeQL bundle version to 2.8.4. [#990](https://github.com/github/codeql-action/pull/990) - Update default CodeQL bundle version to 2.8.4. [#990](https://github.com/github/codeql-action/pull/990)
- Fix a bug where an invalid `commit_oid` was being sent to code scanning when a custom checkout path was being used. [#956](https://github.com/github/codeql-action/pull/956) - Fix a bug where an invalid `commit_oid` was being sent to code scanning when a custom checkout path was being used. [#956](https://github.com/github/codeql-action/pull/956)

View File

@@ -1,3 +1 @@
**/* @github/codeql-action-reviewers **/* @github/codeql-action-reviewers
/python-setup/ @github/codeql-python @github/codeql-action-reviewers

View File

@@ -61,30 +61,41 @@ Here are a few things you can do that will increase the likelihood of your pull
## Releasing (write access required) ## Releasing (write access required)
1. The first step of releasing a new version of the `codeql-action` is running the "Update release branch" workflow. 1. The first step of releasing a new version of the `codeql-action` is running the "Update release branch" workflow.
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `releases/v2` release branch. This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `v2` release branch.
You can start a release by triggering this workflow via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml). You can start a release by triggering this workflow via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
1. The workflow run will open a pull request titled "Merge main into releases/v2". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks. 1. The workflow run will open a pull request titled "Merge main into v2". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
1. Review the checklist items in the pull request description. 1. Review the checklist items in the pull request description.
Once you've checked off all but the last two of these, approve the PR and automerge it. Once you've checked off all but the last two of these, approve the PR and automerge it.
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Tag release and merge back" workflow will create a mergeback PR. 1. When the "Merge main into v2" pull request is merged into the `v2` branch, the "Tag release and merge back" workflow will create a mergeback PR.
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into releases/v2" pull request, and bumps the patch version of the CodeQL Action. This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into v2" pull request, and bumps the patch version of the CodeQL Action.
Approve the mergeback PR and automerge it. Approve the mergeback PR and automerge it.
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Update release branch" workflow will create a "Merge releases/v2 into releases/v1" pull request to merge the changes since the last release into the `releases/v1` release branch. 1. When the "Merge main into v2" pull request is merged into the `v2` branch, the "Update release branch" workflow will create a "Merge v2 into v1" pull request to merge the changes since the last release into the `v1` release branch.
This ensures we keep both the `releases/v1` and `releases/v2` release branches up to date and fully supported. This ensures we keep both the `v1` and `v2` release branches up to date and fully supported.
Review the checklist items in the pull request description. Review the checklist items in the pull request description.
Once you've checked off all the items, approve the PR and automerge it. Once you've checked off all the items, approve the PR and automerge it.
1. Once the mergeback has been merged to `main` and the "Merge releases/v2 into releases/v1" PR has been merged to `releases/v1`, the release is complete. 1. Once the mergeback has been merged to `main` and the "Merge v2 into v1" PR has been merged to `v1`, the release is complete.
## Keeping the PR checks up to date (admin access required) ## Keeping the PR checks up to date (admin access required)
Since the `codeql-action` runs most of its testing through individual Actions workflows, there are over two hundred jobs that need to pass in order for a PR to turn green. You can regenerate the checks automatically by running the [update-required-checks.sh](.github/workflows/script/update-required-checks.sh) script: Since the `codeql-action` runs most of its testing through individual Actions workflows, there are over two hundred jobs that need to pass in order for a PR to turn green. Managing these PR checks manually is time consuming and complex. Here is a semi-automated approach.
1. By default, this script retrieves the checks from the latest SHA on `main`, so make sure that your `main` branch is up to date. To regenerate the PR jobs for the action:
2. Run the script. If there's a reason to, you can pass in a different SHA as a CLI argument.
3. After running, go to the [branch protection rules settings page](https://github.com/github/codeql-action/settings/branches) and validate that the rules for `main`, `v1`, and `v2` have been updated. 1. From a terminal, run the following commands (replace `SHA` with the sha of the commit whose checks you want to use, typically this should be the latest from `main`):
```sh
SHA= ####
CHECKS="$(gh api repos/github/codeql-action/commits/${SHA}/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or . == "Update dependencies" or . == "Update Supported Enterprise Server Versions" | not)]')"
echo "{\"contexts\": ${CHECKS}}" > checks.json
gh api -X "PATCH" repos/github/codeql-action/branches/main/protection/required_status_checks --input checks.json
gh api -X "PATCH" repos/github/codeql-action/branches/v2/protection/required_status_checks --input checks.json
gh api -X "PATCH" repos/github/codeql-action/branches/v1/protection/required_status_checks --input checks.json
````
2. Go to the [branch protection rules settings page](https://github.com/github/codeql-action/settings/branches) and validate that the rules have been updated.
## Resources ## Resources

View File

@@ -52,11 +52,11 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@v2 uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages # Override language selection by uncommenting this and choosing your languages
# with: # with:
# languages: go, javascript, csharp, python, cpp, java # languages: go, javascript, csharp, python, cpp, java
@@ -64,10 +64,10 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below). # If this step fails, then you should remove it and run the build manually (see below).
- name: Autobuild - name: Autobuild
uses: github/codeql-action/autobuild@v2 uses: github/codeql-action/autobuild@v1
# Command-line programs to run using the OS shell. # Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun # 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following # ✏️ If the Autobuild fails above, remove it and uncomment the following
# three lines and modify them (or add more) to build your code if your # three lines and modify them (or add more) to build your code if your
@@ -78,14 +78,14 @@ jobs:
# make release # make release
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2 uses: github/codeql-action/analyze@v1
``` ```
If you prefer to integrate this within an existing CI workflow, it should end up looking something like this: If you prefer to integrate this within an existing CI workflow, it should end up looking something like this:
```yaml ```yaml
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@v2 uses: github/codeql-action/init@v1
with: with:
languages: go, javascript languages: go, javascript
@@ -95,7 +95,7 @@ If you prefer to integrate this within an existing CI workflow, it should end up
make release make release
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2 uses: github/codeql-action/analyze@v1
``` ```
### Configuration file ### Configuration file
@@ -103,7 +103,7 @@ If you prefer to integrate this within an existing CI workflow, it should end up
Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`. Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`.
```yaml ```yaml
- uses: github/codeql-action/init@v2 - uses: github/codeql-action/init@v1
with: with:
config-file: ./.github/codeql/codeql-config.yml config-file: ./.github/codeql/codeql-config.yml
``` ```
@@ -111,7 +111,7 @@ Use the `config-file` parameter of the `init` action to enable the configuration
The configuration file can be located in a different repository. This is useful if you want to share the same configuration across multiple repositories. If the configuration file is in a private repository you can also specify an `external-repository-token` option. This should be a personal access token that has read access to any repositories containing referenced config files and queries. The configuration file can be located in a different repository. This is useful if you want to share the same configuration across multiple repositories. If the configuration file is in a private repository you can also specify an `external-repository-token` option. This should be a personal access token that has read access to any repositories containing referenced config files and queries.
```yaml ```yaml
- uses: github/codeql-action/init@v2 - uses: github/codeql-action/init@v1
with: with:
config-file: owner/repo/codeql-config.yml@branch config-file: owner/repo/codeql-config.yml@branch
external-repository-token: ${{ secrets.EXTERNAL_REPOSITORY_TOKEN }} external-repository-token: ${{ secrets.EXTERNAL_REPOSITORY_TOKEN }}
@@ -122,7 +122,7 @@ For information on how to write a configuration file, see "[Using a custom confi
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action: If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:
```yaml ```yaml
- uses: github/codeql-action/init@v2 - uses: github/codeql-action/init@v1
with: with:
queries: <local-or-remote-query>,<another-query> queries: <local-or-remote-query>,<another-query>
``` ```
@@ -130,7 +130,7 @@ If you only want to customise the queries used, you can specify them in your wor
By default, this will override any queries specified in a config file. If you wish to use both sets of queries, prefix the list of queries in the workflow with `+`: By default, this will override any queries specified in a config file. If you wish to use both sets of queries, prefix the list of queries in the workflow with `+`:
```yaml ```yaml
- uses: github/codeql-action/init@v2 - uses: github/codeql-action/init@v1
with: with:
queries: +<local-or-remote-query>,<another-query> queries: +<local-or-remote-query>,<another-query>
``` ```
@@ -139,3 +139,10 @@ By default, this will override any queries specified in a config file. If you wi
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning). Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
### Note on "missing analysis" message
The very first time code scanning is run and if it is on a pull request, you will probably get a message mentioning a "missing analysis". This is expected.
After code scanning has analyzed the code in a pull request, it needs to compare the analysis of the topic branch (the merge commit of the branch you used to create the pull request) with the analysis of the base branch (the branch into which you want to merge the pull request). This allows code scanning to compute which alerts are newly introduced by the pull request, which alerts were already present in the base branch, and whether any existing alerts are fixed by the changes in the pull request. Initially, if you use a pull request to add code scanning to a repository, the base branch has not yet been analyzed, so it's not possible to compute these details. In this case, when you click through from the results check on the pull request you will see the "Missing analysis for base commit SHA-HASH" message.
For more information and other causes of this message, see [Reasons for the "Analysis not found" message](https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/setting-up-code-scanning-for-a-repository#reasons-for-the-analysis-not-found-message)

View File

@@ -61,21 +61,16 @@ inputs:
wait-for-processing: wait-for-processing:
description: If true, the Action will wait for the uploaded SARIF to be processed before completing. description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
required: true required: true
default: "true" default: "false"
token: token:
default: ${{ github.token }} default: ${{ github.token }}
matrix: matrix:
default: ${{ toJson(matrix) }} default: ${{ toJson(matrix) }}
expect-error:
description: "[Internal] It is an error to use this input outside of integration testing of the codeql-action."
required: false
default: "false"
outputs: outputs:
db-locations: db-locations:
description: A map from language to absolute path for each database created by CodeQL. description: A map from language to absolute path for each database created by CodeQL.
sarif-id: sarif-id:
description: The ID of the uploaded SARIF file. description: The ID of the uploaded SARIF file.
runs: runs:
using: "node16" using: "node12"
main: "../lib/analyze-action.js" main: "../lib/analyze-action.js"
post: "../lib/analyze-action-post.js"

View File

@@ -6,12 +6,6 @@ inputs:
default: ${{ github.token }} default: ${{ github.token }}
matrix: matrix:
default: ${{ toJson(matrix) }} default: ${{ toJson(matrix) }}
working-directory:
description: >-
Run the autobuilder using this path (relative to $GITHUB_WORKSPACE) as
working directory. If this input is not set, the autobuilder runs with
$GITHUB_WORKSPACE as its working directory.
required: false
runs: runs:
using: 'node16' using: 'node12'
main: '../lib/autobuild-action.js' main: '../lib/autobuild-action.js'

View File

@@ -10,34 +10,9 @@ inputs:
description: The languages to be analysed description: The languages to be analysed
required: false required: false
token: token:
description: GitHub token to use for authenticating with this instance of GitHub. To download custom packs from multiple registries, use the registries input.
default: ${{ github.token }} default: ${{ github.token }}
required: false
registries:
description: |
Use this input only when you need to download CodeQL packages from another instance of GitHub. If you only need to download packages from this GitHub instance, use the token input instead.
A YAML string that defines the list of GitHub container registries to use for downloading packs. The string is in the following form (the | is required on the first line):
registries: |
- url: https://containers.GHEHOSTNAME1/v2/
packages:
- my-company/*
- my-company2/*
token: \$\{{ secrets.GHEHOSTNAME1_TOKEN }}
- url: https://ghcr.io/v2/
packages: */*
token: \$\{{ secrets.GHCR_TOKEN }}
The `url` property contains the URL to the container registry you want to connect to.
The `packages` property contains a single glob string or a list of glob strings, specifying which packages should be retrieved from this particular container registry. Order is important. Earlier entries will match before later entries.
The `token` property contains a connection token for this registry. required: false
matrix: matrix:
default: ${{ toJson(matrix) }} default: ${{ toJson(matrix) }}
required: false
config-file: config-file:
description: Path of the config file to use description: Path of the config file to use
required: false required: false
@@ -57,7 +32,7 @@ inputs:
analyses, you must specify packs in the codeql-config.yml file. analyses, you must specify packs in the codeql-config.yml file.
required: false required: false
external-repository-token: external-repository-token:
description: A token for fetching external config files and queries if they reside in a private repository in the same GitHub instance that is running this action. description: A token for fetching external config files and queries if they reside in a private repository.
required: false required: false
setup-python-dependencies: setup-python-dependencies:
description: Try to auto-install your python dependencies description: Try to auto-install your python dependencies
@@ -81,10 +56,7 @@ inputs:
This input also sets the number of threads that can later be used by the "analyze" action. This input also sets the number of threads that can later be used by the "analyze" action.
required: false required: false
debug: debug:
description: >- description: Enable debugging mode. This will result in more output being produced which may be useful when debugging certain issues.
Enable debugging mode.
This will result in more output being produced which may be useful when debugging certain issues.
Debugging mode is enabled automatically when step debug logging is turned on.
required: false required: false
default: 'false' default: 'false'
debug-artifact-name: debug-artifact-name:
@@ -97,14 +69,9 @@ inputs:
The name of the database uploaded to the debugging artifact. The name of the database uploaded to the debugging artifact.
This is only used when debug mode is enabled. This is only used when debug mode is enabled.
required: false required: false
trap-caching:
description: >-
Explicitly enable or disable TRAP caching rather than respecting the feature flag for it.
required: false
outputs: outputs:
codeql-path: codeql-path:
description: The path of the CodeQL binary used for analysis description: The path of the CodeQL binary used for analysis
runs: runs:
using: 'node16' using: 'node12'
main: '../lib/init-action.js' main: '../lib/init-action.js'
post: '../lib/init-action-post.js'

83
lib/actions-util.js generated
View File

@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.workflowEventName = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0; exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const os = __importStar(require("os")); const os = __importStar(require("os"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
@@ -66,6 +66,13 @@ function getTemporaryDirectory() {
: (0, util_1.getRequiredEnvParam)("RUNNER_TEMP"); : (0, util_1.getRequiredEnvParam)("RUNNER_TEMP");
} }
exports.getTemporaryDirectory = getTemporaryDirectory; exports.getTemporaryDirectory = getTemporaryDirectory;
function getToolCacheDirectory() {
const value = process.env["CODEQL_ACTION_TOOL_CACHE"];
return value !== undefined && value !== ""
? value
: (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE");
}
exports.getToolCacheDirectory = getToolCacheDirectory;
/** /**
* Gets the SHA of the commit that is currently checked out. * Gets the SHA of the commit that is currently checked out.
*/ */
@@ -105,7 +112,7 @@ exports.getCommitOid = getCommitOid;
* Returns undefined if run by other triggers or the merge base cannot be determined. * Returns undefined if run by other triggers or the merge base cannot be determined.
*/ */
const determineMergeBaseCommitOid = async function () { const determineMergeBaseCommitOid = async function () {
if (workflowEventName() !== "pull_request") { if (process.env.GITHUB_EVENT_NAME !== "pull_request") {
return undefined; return undefined;
} }
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA"); const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
@@ -350,7 +357,7 @@ async function getWorkflowPath() {
const repo = repo_nwo[1]; const repo = repo_nwo[1];
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID")); const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
const apiClient = api.getActionsApiClient(); const apiClient = api.getActionsApiClient();
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", { const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
owner, owner,
repo, repo,
run_id, run_id,
@@ -448,11 +455,11 @@ async function getRef() {
return ref; return ref;
} }
const head = await (0, exports.getCommitOid)(checkoutPath, "HEAD"); const head = await (0, exports.getCommitOid)(checkoutPath, "HEAD");
// in actions/checkout@v2+ we can check if git rev-parse HEAD == GITHUB_SHA // in actions/checkout@v2 we can check if git rev-parse HEAD == GITHUB_SHA
// in actions/checkout@v1 this may not be true as it checks out the repository // in actions/checkout@v1 this may not be true as it checks out the repository
// using GITHUB_REF. There is a subtle race condition where // using GITHUB_REF. There is a subtle race condition where
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check // git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
// git rev-parse GITHUB_REF == git rev-parse HEAD instead. // git git-parse GITHUB_REF == git rev-parse HEAD instead.
const hasChangedRef = sha !== head && const hasChangedRef = sha !== head &&
(await (0, exports.getCommitOid)(checkoutPath, ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !== head; (await (0, exports.getCommitOid)(checkoutPath, ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !== head;
if (hasChangedRef) { if (hasChangedRef) {
@@ -501,7 +508,11 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
} }
const runnerOs = (0, util_1.getRequiredEnvParam)("RUNNER_OS"); const runnerOs = (0, util_1.getRequiredEnvParam)("RUNNER_OS");
const codeQlCliVersion = (0, util_1.getCachedCodeQlVersion)(); const codeQlCliVersion = (0, util_1.getCachedCodeQlVersion)();
const actionRef = process.env["GITHUB_ACTION_REF"]; // If running locally then the GITHUB_ACTION_REF cannot be trusted as it may be for the previous action
// See https://github.com/actions/runner/issues/803
const actionRef = isRunningLocalAction()
? undefined
: process.env["GITHUB_ACTION_REF"];
const statusReport = { const statusReport = {
workflow_run_id: workflowRunID, workflow_run_id: workflowRunID,
workflow_name: workflowName, workflow_name: workflowName,
@@ -573,7 +584,8 @@ async function sendStatusReport(statusReport) {
const statusReportJSON = JSON.stringify(statusReport); const statusReportJSON = JSON.stringify(statusReport);
core.debug(`Sending status report: ${statusReportJSON}`); core.debug(`Sending status report: ${statusReportJSON}`);
// If in test mode we don't want to upload the results // If in test mode we don't want to upload the results
if ((0, util_1.isInTestMode)()) { const testMode = process.env["TEST_MODE"] === "true" || false;
if (testMode) {
core.debug("In test mode. Status reports are not uploaded."); core.debug("In test mode. Status reports are not uploaded.");
return true; return true;
} }
@@ -626,21 +638,9 @@ async function sendStatusReport(statusReport) {
} }
} }
exports.sendStatusReport = sendStatusReport; exports.sendStatusReport = sendStatusReport;
function workflowEventName() {
// If the original event is dynamic CODESCANNING_EVENT_NAME will contain the right info (push/pull_request)
if (process.env["GITHUB_EVENT_NAME"] === "dynamic") {
const value = process.env["CODESCANNING_EVENT_NAME"];
if (value === undefined || value.length === 0) {
return process.env["GITHUB_EVENT_NAME"];
}
return value;
}
return process.env["GITHUB_EVENT_NAME"];
}
exports.workflowEventName = workflowEventName;
// Was the workflow run triggered by a `push` event, for example as opposed to a `pull_request` event. // Was the workflow run triggered by a `push` event, for example as opposed to a `pull_request` event.
function workflowIsTriggeredByPushEvent() { function workflowIsTriggeredByPushEvent() {
return workflowEventName() === "push"; return process.env["GITHUB_EVENT_NAME"] === "push";
} }
// Is dependabot the actor that triggered the current workflow run. // Is dependabot the actor that triggered the current workflow run.
function isDependabotActor() { function isDependabotActor() {
@@ -671,51 +671,22 @@ function getWorkflowEvent() {
throw new Error(`Unable to read workflow event JSON from ${eventJsonFile}: ${e}`); throw new Error(`Unable to read workflow event JSON from ${eventJsonFile}: ${e}`);
} }
} }
function removeRefsHeadsPrefix(ref) {
return ref.startsWith("refs/heads/") ? ref.slice("refs/heads/".length) : ref;
}
// Is the version of the repository we are currently analyzing from the default branch, // Is the version of the repository we are currently analyzing from the default branch,
// or alternatively from another branch or a pull request. // or alternatively from another branch or a pull request.
async function isAnalyzingDefaultBranch() { async function isAnalyzingDefaultBranch() {
var _a; var _a;
// Get the current ref and trim and refs/heads/ prefix // Get the current ref and trim and refs/heads/ prefix
let currentRef = await getRef(); let currentRef = await getRef();
currentRef = removeRefsHeadsPrefix(currentRef); currentRef = currentRef.startsWith("refs/heads/")
? currentRef.slice("refs/heads/".length)
: currentRef;
const event = getWorkflowEvent(); const event = getWorkflowEvent();
let defaultBranch = (_a = event === null || event === void 0 ? void 0 : event.repository) === null || _a === void 0 ? void 0 : _a.default_branch; const defaultBranch = (_a = event === null || event === void 0 ? void 0 : event.repository) === null || _a === void 0 ? void 0 : _a.default_branch;
if (process.env.GITHUB_EVENT_NAME === "schedule") {
defaultBranch = removeRefsHeadsPrefix((0, util_1.getRequiredEnvParam)("GITHUB_REF"));
}
return currentRef === defaultBranch; return currentRef === defaultBranch;
} }
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch; exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
async function printDebugLogs(config) { function sanitizeArifactName(name) {
for (const language of config.languages) { return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
const databaseDirectory = (0, util_1.getCodeQLDatabasePath)(config, language);
const logsDirectory = path.join(databaseDirectory, "log");
if (!(0, util_1.doesDirectoryExist)(logsDirectory)) {
core.info(`Directory ${logsDirectory} does not exist.`);
continue; // Skip this language database.
}
const walkLogFiles = (dir) => {
const entries = fs.readdirSync(dir, { withFileTypes: true });
if (entries.length === 0) {
core.info(`No debug logs found at directory ${logsDirectory}.`);
}
for (const entry of entries) {
if (entry.isFile()) {
const absolutePath = path.resolve(dir, entry.name);
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name} from file at path ${absolutePath}`);
process.stdout.write(fs.readFileSync(absolutePath));
core.endGroup();
}
else if (entry.isDirectory()) {
walkLogFiles(path.resolve(dir, entry.name));
}
}
};
walkLogFiles(logsDirectory);
}
} }
exports.printDebugLogs = printDebugLogs; exports.sanitizeArifactName = sanitizeArifactName;
//# sourceMappingURL=actions-util.js.map //# sourceMappingURL=actions-util.js.map

File diff suppressed because one or more lines are too long

View File

@@ -495,31 +495,12 @@ on: ["push"]
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true); t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
process.env["GITHUB_REF"] = "feature"; process.env["GITHUB_REF"] = "feature";
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false); t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
fs.writeFileSync(envFile, JSON.stringify({
schedule: "0 0 * * *",
}));
process.env["GITHUB_EVENT_NAME"] = "schedule";
process.env["GITHUB_REF"] = "refs/heads/main";
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
getAdditionalInputStub
.withArgs("ref")
.resolves("refs/heads/something-else");
getAdditionalInputStub
.withArgs("sha")
.resolves("0000000000000000000000000000000000000000");
process.env["GITHUB_EVENT_NAME"] = "schedule";
process.env["GITHUB_REF"] = "refs/heads/main";
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
getAdditionalInputStub.restore();
}); });
}); });
(0, ava_1.default)("workflowEventName()", async (t) => { (0, ava_1.default)("sanitizeArifactName", (t) => {
process.env["GITHUB_EVENT_NAME"] = "push"; t.deepEqual(actionsutil.sanitizeArifactName("hello-world_"), "hello-world_");
t.deepEqual(actionsutil.workflowEventName(), "push"); t.deepEqual(actionsutil.sanitizeArifactName("hello`world`"), "helloworld");
process.env["GITHUB_EVENT_NAME"] = "dynamic"; t.deepEqual(actionsutil.sanitizeArifactName("hello===123"), "hello123");
t.deepEqual(actionsutil.workflowEventName(), "dynamic"); t.deepEqual(actionsutil.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
process.env["CODESCANNING_EVENT_NAME"] = "push";
t.deepEqual(actionsutil.workflowEventName(), "push");
}); });
//# sourceMappingURL=actions-util.test.js.map //# sourceMappingURL=actions-util.test.js.map

File diff suppressed because one or more lines are too long

7
lib/analysis-paths.js generated
View File

@@ -58,11 +58,14 @@ function includeAndExcludeAnalysisPaths(config) {
} }
// If the temporary or tools directory is in the working directory ignore that too. // If the temporary or tools directory is in the working directory ignore that too.
const tempRelativeToWorking = path.relative(process.cwd(), config.tempDir); const tempRelativeToWorking = path.relative(process.cwd(), config.tempDir);
const toolsRelativeToWorking = path.relative(process.cwd(), config.toolCacheDir);
let pathsIgnore = config.pathsIgnore; let pathsIgnore = config.pathsIgnore;
if (!tempRelativeToWorking.startsWith("..") && if (!tempRelativeToWorking.startsWith("..")) {
!path.isAbsolute(tempRelativeToWorking)) {
pathsIgnore = pathsIgnore.concat(tempRelativeToWorking); pathsIgnore = pathsIgnore.concat(tempRelativeToWorking);
} }
if (!toolsRelativeToWorking.startsWith("..")) {
pathsIgnore = pathsIgnore.concat(toolsRelativeToWorking);
}
if (pathsIgnore.length !== 0) { if (pathsIgnore.length !== 0) {
process.env["LGTM_INDEX_EXCLUDE"] = buildIncludeExcludeEnvVar(pathsIgnore); process.env["LGTM_INDEX_EXCLUDE"] = buildIncludeExcludeEnvVar(pathsIgnore);
} }

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IACE,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC;QACvC,CAAC,IAAI,CAAC,UAAU,CAAC,qBAAqB,CAAC,EACvC;QACA,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AAjCD,wEAiCC"} {"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}

View File

@@ -37,6 +37,7 @@ const util = __importStar(require("./util"));
paths: [], paths: [],
originalUserInput: {}, originalUserInput: {},
tempDir: tmpDir, tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "", codeQLCmd: "",
gitHubVersion: { type: util.GitHubVariant.DOTCOM }, gitHubVersion: { type: util.GitHubVariant.DOTCOM },
dbLocation: path.resolve(tmpDir, "codeql_databases"), dbLocation: path.resolve(tmpDir, "codeql_databases"),
@@ -44,13 +45,7 @@ const util = __importStar(require("./util"));
debugMode: false, debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME, debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME, debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
augmentationProperties: { injectedMlQueries: false,
injectedMlQueries: false,
packsInputCombines: false,
queriesInputCombines: false,
},
trapCaches: {},
trapCacheDownloadTime: 0,
}; };
analysisPaths.includeAndExcludeAnalysisPaths(config); analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined); t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
@@ -67,6 +62,7 @@ const util = __importStar(require("./util"));
pathsIgnore: ["path4", "path5", "path6/**"], pathsIgnore: ["path4", "path5", "path6/**"],
originalUserInput: {}, originalUserInput: {},
tempDir: tmpDir, tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "", codeQLCmd: "",
gitHubVersion: { type: util.GitHubVariant.DOTCOM }, gitHubVersion: { type: util.GitHubVariant.DOTCOM },
dbLocation: path.resolve(tmpDir, "codeql_databases"), dbLocation: path.resolve(tmpDir, "codeql_databases"),
@@ -74,13 +70,7 @@ const util = __importStar(require("./util"));
debugMode: false, debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME, debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME, debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
augmentationProperties: { injectedMlQueries: false,
injectedMlQueries: false,
packsInputCombines: false,
queriesInputCombines: false,
},
trapCaches: {},
trapCacheDownloadTime: 0,
}; };
analysisPaths.includeAndExcludeAnalysisPaths(config); analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2"); t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
@@ -89,32 +79,29 @@ const util = __importStar(require("./util"));
}); });
}); });
(0, ava_1.default)("exclude temp dir", async (t) => { (0, ava_1.default)("exclude temp dir", async (t) => {
const tempDir = path.join(process.cwd(), "codeql-runner-temp"); return await util.withTmpDir(async (toolCacheDir) => {
const config = { const tempDir = path.join(process.cwd(), "codeql-runner-temp");
languages: [], const config = {
queries: {}, languages: [],
pathsIgnore: [], queries: {},
paths: [], pathsIgnore: [],
originalUserInput: {}, paths: [],
tempDir, originalUserInput: {},
codeQLCmd: "", tempDir,
gitHubVersion: { type: util.GitHubVariant.DOTCOM }, toolCacheDir,
dbLocation: path.resolve(tempDir, "codeql_databases"), codeQLCmd: "",
packs: {}, gitHubVersion: { type: util.GitHubVariant.DOTCOM },
debugMode: false, dbLocation: path.resolve(tempDir, "codeql_databases"),
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME, packs: {},
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME, debugMode: false,
augmentationProperties: { debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
injectedMlQueries: false, injectedMlQueries: false,
packsInputCombines: false, };
queriesInputCombines: false, analysisPaths.includeAndExcludeAnalysisPaths(config);
}, t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
trapCaches: {}, t.is(process.env["LGTM_INDEX_EXCLUDE"], "codeql-runner-temp");
trapCacheDownloadTime: 0, t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
}; });
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
t.is(process.env["LGTM_INDEX_EXCLUDE"], "codeql-runner-temp");
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
}); });
//# sourceMappingURL=analysis-paths.test.js.map //# sourceMappingURL=analysis-paths.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,sBAAsB,EAAE;gBACtB,iBAAiB,EAAE,KAAK;gBACxB,kBAAkB,EAAE,KAAK;gBACzB,oBAAoB,EAAE,KAAK;aAC5B;YACD,UAAU,EAAE,EAAE;YACd,qBAAqB,EAAE,CAAC;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,sBAAsB,EAAE;gBACtB,iBAAiB,EAAE,KAAK;gBACxB,kBAAkB,EAAE,KAAK;gBACzB,oBAAoB,EAAE,KAAK;aAC5B;YACD,UAAU,EAAE,EAAE;YACd,qBAAqB,EAAE,CAAC;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;IAC/D,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;QACT,iBAAiB,EAAE,EAAE;QACrB,OAAO;QACP,SAAS,EAAE,EAAE;QACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;QACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;QACrD,KAAK,EAAE,EAAE;QACT,SAAS,EAAE,KAAK;QAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;QACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;QACnD,sBAAsB,EAAE;YACtB,iBAAiB,EAAE,KAAK;YACxB,kBAAkB,EAAE,KAAK;YACzB,oBAAoB,EAAE,KAAK;SAC5B;QACD,UAAU,EAAE,EAAE;QACd,qBAAqB,EAAE,CAAC;KACzB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;IAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC"} {"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

View File

@@ -38,29 +38,22 @@ const util = __importStar(require("./util"));
// but the first test would fail. // but the first test would fail.
(0, ava_1.default)("analyze action with RAM & threads from environment variables", async (t) => { (0, ava_1.default)("analyze action with RAM & threads from environment variables", async (t) => {
await util.withTmpDir(async (tmpDir) => { await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL; process.env["GITHUB_SERVER_URL"] = "fake-server-url";
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; process.env["GITHUB_REPOSITORY"] = "fake/repository";
process.env["GITHUB_API_URL"] = "https://api.github.com";
sinon sinon
.stub(actionsUtil, "createStatusReportBase") .stub(actionsUtil, "createStatusReportBase")
.resolves({}); .resolves({});
sinon.stub(actionsUtil, "sendStatusReport").resolves(true); sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
const gitHubVersion = {
type: util.GitHubVariant.DOTCOM,
};
sinon.stub(configUtils, "getConfig").resolves({ sinon.stub(configUtils, "getConfig").resolves({
gitHubVersion, gitHubVersion: { type: util.GitHubVariant.DOTCOM },
languages: [], languages: [],
packs: [], packs: [],
trapCaches: {},
}); });
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput"); const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("token").returns("fake-token"); requiredInputStub.withArgs("token").returns("fake-token");
requiredInputStub.withArgs("upload-database").returns("false"); requiredInputStub.withArgs("upload-database").returns("false");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput"); const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("cleanup-level").returns("none"); optionalInputStub.withArgs("cleanup-level").returns("none");
optionalInputStub.withArgs("expect-error").returns("false");
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir); (0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {}); (0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
// When there are no action inputs for RAM and threads, the action uses // When there are no action inputs for RAM and threads, the action uses

View File

@@ -1 +1 @@
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"} {"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

View File

@@ -38,29 +38,22 @@ const util = __importStar(require("./util"));
// but the first test would fail. // but the first test would fail.
(0, ava_1.default)("analyze action with RAM & threads from action inputs", async (t) => { (0, ava_1.default)("analyze action with RAM & threads from action inputs", async (t) => {
await util.withTmpDir(async (tmpDir) => { await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL; process.env["GITHUB_SERVER_URL"] = "fake-server-url";
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; process.env["GITHUB_REPOSITORY"] = "fake/repository";
process.env["GITHUB_API_URL"] = "https://api.github.com";
sinon sinon
.stub(actionsUtil, "createStatusReportBase") .stub(actionsUtil, "createStatusReportBase")
.resolves({}); .resolves({});
sinon.stub(actionsUtil, "sendStatusReport").resolves(true); sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
const gitHubVersion = {
type: util.GitHubVariant.DOTCOM,
};
sinon.stub(configUtils, "getConfig").resolves({ sinon.stub(configUtils, "getConfig").resolves({
gitHubVersion, gitHubVersion: { type: util.GitHubVariant.DOTCOM },
languages: [], languages: [],
packs: [], packs: [],
trapCaches: {},
}); });
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput"); const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("token").returns("fake-token"); requiredInputStub.withArgs("token").returns("fake-token");
requiredInputStub.withArgs("upload-database").returns("false"); requiredInputStub.withArgs("upload-database").returns("false");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput"); const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("cleanup-level").returns("none"); optionalInputStub.withArgs("cleanup-level").returns("none");
optionalInputStub.withArgs("expect-error").returns("false");
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir); (0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {}); (0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
process.env["CODEQL_THREADS"] = "1"; process.env["CODEQL_THREADS"] = "1";

View File

@@ -1 +1 @@
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"} {"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

View File

@@ -1,41 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.run = void 0;
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const config_utils_1 = require("./config-utils");
const logging_1 = require("./logging");
async function run(uploadSarifDebugArtifact) {
const logger = (0, logging_1.getActionsLogger)();
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Did the 'init' action fail to start?");
}
// Upload Actions SARIF artifacts for debugging
if (config === null || config === void 0 ? void 0 : config.debugMode) {
core.info("Debug mode is on. Uploading available SARIF files as Actions debugging artifact...");
const outputDir = actionsUtil.getRequiredInput("output");
await uploadSarifDebugArtifact(config, outputDir);
}
}
exports.run = run;
//# sourceMappingURL=analyze-action-post-helper.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analyze-action-post-helper.js","sourceRoot":"","sources":["../src/analyze-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAA2C;AAC3C,uCAA6C;AAEtC,KAAK,UAAU,GAAG,CAAC,wBAAkC;IAC1D,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,IAAI,KAAK,CACb,2FAA2F,CAC5F,CAAC;KACH;IAED,+CAA+C;IAC/C,IAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,SAAS,EAAE;QACrB,IAAI,CAAC,IAAI,CACP,oFAAoF,CACrF,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,wBAAwB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;KACnD;AACH,CAAC;AAlBD,kBAkBC"}

View File

@@ -1,69 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const sinon = __importStar(require("sinon"));
const actionsUtil = __importStar(require("./actions-util"));
const analyzeActionPostHelper = __importStar(require("./analyze-action-post-helper"));
const configUtils = __importStar(require("./config-utils"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
(0, testing_utils_1.setupTests)(ava_1.default);
(0, ava_1.default)("post: analyze action with debug mode off", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
const gitHubVersion = {
type: util.GitHubVariant.DOTCOM,
};
sinon.stub(configUtils, "getConfig").resolves({
debugMode: false,
gitHubVersion,
languages: [],
packs: [],
});
const uploadSarifSpy = sinon.spy();
await analyzeActionPostHelper.run(uploadSarifSpy);
t.assert(uploadSarifSpy.notCalled);
});
});
(0, ava_1.default)("post: analyze action with debug mode on", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
const gitHubVersion = {
type: util.GitHubVariant.DOTCOM,
};
sinon.stub(configUtils, "getConfig").resolves({
debugMode: true,
gitHubVersion,
languages: [],
packs: [],
});
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("output").returns("fake-output-dir");
const uploadSarifSpy = sinon.spy();
await analyzeActionPostHelper.run(uploadSarifSpy);
t.assert(uploadSarifSpy.called);
});
});
//# sourceMappingURL=analyze-action-post-helper.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analyze-action-post-helper.test.js","sourceRoot":"","sources":["../src/analyze-action-post-helper.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,sFAAwE;AACxE,4DAA8C;AAC9C,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,0CAA0C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC3D,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QAEpC,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,KAAK;YAChB,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QAEpC,MAAM,cAAc,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;QAEnC,MAAM,uBAAuB,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;QAElD,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,SAAS,CAAC,CAAC;IACrC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QAEpC,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,IAAI;YACf,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QAEpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;QAEhE,MAAM,cAAc,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;QAEnC,MAAM,uBAAuB,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;QAElD,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;IAClC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

View File

@@ -1,40 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
* This file is the entry point for the `post:` hook of `analyze-action.yml`.
* It will run after the all steps in this job, in reverse order in relation to
* other `post:` hooks.
*/
const core = __importStar(require("@actions/core"));
const analyzeActionPostHelper = __importStar(require("./analyze-action-post-helper"));
const debugArtifacts = __importStar(require("./debug-artifacts"));
async function runWrapper() {
try {
await analyzeActionPostHelper.run(debugArtifacts.uploadSarifDebugArtifact);
}
catch (error) {
core.setFailed(`analyze post-action step failed: ${error}`);
console.log(error);
}
}
void runWrapper();
//# sourceMappingURL=analyze-action-post.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,sFAAwE;AACxE,kEAAoD;AAEpD,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,uBAAuB,CAAC,GAAG,CAAC,cAAc,CAAC,wBAAwB,CAAC,CAAC;KAC5E;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,oCAAoC,KAAK,EAAE,CAAC,CAAC;QAC5D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

219
lib/analyze-action.js generated
View File

@@ -18,33 +18,25 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod); __setModuleDefault(result, mod);
return result; return result;
}; };
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.runPromise = exports.sendStatusReport = void 0; exports.runPromise = exports.sendStatusReport = void 0;
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const path_1 = __importDefault(require("path")); const path = __importStar(require("path"));
// We need to import `performance` on Node 12 const artifact = __importStar(require("@actions/artifact"));
const perf_hooks_1 = require("perf_hooks");
const core = __importStar(require("@actions/core")); const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util")); const actionsUtil = __importStar(require("./actions-util"));
const analyze_1 = require("./analyze"); const analyze_1 = require("./analyze");
const api_client_1 = require("./api-client");
const autobuild_1 = require("./autobuild");
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
const config_utils_1 = require("./config-utils"); const config_utils_1 = require("./config-utils");
const database_upload_1 = require("./database-upload"); const database_upload_1 = require("./database-upload");
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const logging_1 = require("./logging"); const logging_1 = require("./logging");
const repository_1 = require("./repository"); const repository_1 = require("./repository");
const trap_caching_1 = require("./trap-caching");
const upload_lib = __importStar(require("./upload-lib")); const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
const util_1 = require("./util");
// eslint-disable-next-line import/no-commonjs // eslint-disable-next-line import/no-commonjs
const pkg = require("../package.json"); const pkg = require("../package.json");
async function sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger) { async function sendStatusReport(startedAt, config, stats, error) {
const status = actionsUtil.getActionsStatus(error, stats === null || stats === void 0 ? void 0 : stats.analyze_failure_language); const status = actionsUtil.getActionsStatus(error, stats === null || stats === void 0 ? void 0 : stats.analyze_failure_language);
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, error === null || error === void 0 ? void 0 : error.message, error === null || error === void 0 ? void 0 : error.stack); const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, error === null || error === void 0 ? void 0 : error.message, error === null || error === void 0 ? void 0 : error.stack);
const statusReport = { const statusReport = {
@@ -55,115 +47,58 @@ async function sendStatusReport(startedAt, config, stats, error, trapCacheUpload
} }
: {}), : {}),
...(stats || {}), ...(stats || {}),
...(dbCreationTimings || {}),
}; };
if (config && didUploadTrapCaches) { await actionsUtil.sendStatusReport(statusReport);
const trapCacheUploadStatusReport = {
...statusReport,
trap_cache_upload_duration_ms: Math.round(trapCacheUploadTime || 0),
trap_cache_upload_size_bytes: Math.round(await (0, trap_caching_1.getTotalCacheSize)(config.trapCaches, logger)),
};
await actionsUtil.sendStatusReport(trapCacheUploadStatusReport);
}
else {
await actionsUtil.sendStatusReport(statusReport);
}
} }
exports.sendStatusReport = sendStatusReport; exports.sendStatusReport = sendStatusReport;
// `expect-error` should only be set to a non-false value by the CodeQL Action PR checks.
function hasBadExpectErrorInput() {
return (actionsUtil.getOptionalInput("expect-error") !== "false" &&
!util.isInTestMode());
}
/**
* Returns whether any TRAP files exist under the `db-go` folder,
* indicating whether Go extraction has extracted at least one file.
*/
function doesGoExtractionOutputExist(config) {
const golangDbDirectory = util.getCodeQLDatabasePath(config, languages_1.Language.go);
const trapDirectory = path_1.default.join(golangDbDirectory, "trap", languages_1.Language.go);
return (fs.existsSync(trapDirectory) &&
fs
.readdirSync(trapDirectory)
.some((fileName) => [
".trap",
".trap.gz",
".trap.br",
".trap.tar.gz",
".trap.tar.br",
".trap.tar",
].some((ext) => fileName.endsWith(ext))));
}
/**
* When Go extraction reconciliation is enabled, either via the feature flag
* or an environment variable, we will attempt to autobuild Go to preserve
* compatibility for users who have set up Go using a legacy scanning style
* CodeQL workflow, i.e. one without an autobuild step or manual build
* steps.
*
* - We detect whether an autobuild step is present by checking the
* `util.DID_AUTOBUILD_GO_ENV_VAR_NAME` environment variable, which is set
* when the autobuilder is invoked.
* - We approximate whether manual build steps are present by looking at
* whether any extraction output already exists for Go.
*/
async function runAutobuildIfLegacyGoWorkflow(config, featureFlags, logger) {
if (!config.languages.includes(languages_1.Language.go)) {
return;
}
if (!(await util.isGoExtractionReconciliationEnabled(featureFlags))) {
logger.debug("Won't run Go autobuild since Go extraction reconciliation is not enabled.");
return;
}
if (process.env[util.DID_AUTOBUILD_GO_ENV_VAR_NAME] === "true") {
// This log line is info level while Go extraction reconciliation is in beta.
// We will make it debug level once Go extraction reconciliation is GA.
logger.info("Won't run Go autobuild since it has already been run.");
return;
}
// This captures whether a user has added manual build steps for Go
if (doesGoExtractionOutputExist(config)) {
// This log line is info level while Go extraction reconciliation is in beta.
// We will make it debug level once Go extraction reconciliation is GA.
logger.info("Won't run Go autobuild since at least one file of Go code has already been extracted.");
return;
}
await (0, autobuild_1.runAutobuild)(languages_1.Language.go, config, logger);
}
async function run() { async function run() {
const startedAt = new Date(); const startedAt = new Date();
let uploadResult = undefined; let uploadResult = undefined;
let runStats = undefined; let runStats = undefined;
let config = undefined; let config = undefined;
let trapCacheUploadTime = undefined;
let dbCreationTimings = undefined;
let didUploadTrapCaches = false;
util.initializeEnvironment(util.Mode.actions, pkg.version); util.initializeEnvironment(util.Mode.actions, pkg.version);
await util.checkActionVersion(pkg.version);
const logger = (0, logging_1.getActionsLogger)();
try { try {
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) { if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
return; return;
} }
const logger = (0, logging_1.getActionsLogger)();
config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger); config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
if (config === undefined) { if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?"); throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
} }
if (hasBadExpectErrorInput()) {
throw new Error("`expect-error` input parameter is for internal use only. It should only be set by codeql-action or a fork.");
}
await util.enrichEnvironment(util.Mode.actions, await (0, codeql_1.getCodeQL)(config.codeQLCmd)); await util.enrichEnvironment(util.Mode.actions, await (0, codeql_1.getCodeQL)(config.codeQLCmd));
const apiDetails = (0, api_client_1.getApiDetails)(); const apiDetails = {
auth: actionsUtil.getRequiredInput("token"),
url: util.getRequiredEnvParam("GITHUB_SERVER_URL"),
};
const outputDir = actionsUtil.getRequiredInput("output"); const outputDir = actionsUtil.getRequiredInput("output");
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger); const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]); const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]);
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")); const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
const gitHubVersion = await (0, api_client_1.getGitHubVersionActionsOnly)(); await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, apiDetails, repositoryNwo, logger);
await runAutobuildIfLegacyGoWorkflow(config, featureFlags, logger);
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, featureFlags);
if (actionsUtil.getRequiredInput("skip-queries") !== "true") { if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger, featureFlags); runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
if (config.debugMode) {
// Upload the SARIF files as an Actions artifact for debugging
await uploadDebugArtifacts(config.languages.map((lang) => path.resolve(outputDir, `${lang}.sarif`)), outputDir, config.debugArtifactName);
}
}
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
if (config.debugMode) {
// Upload the logs as an Actions artifact for debugging
const toUpload = [];
for (const language of config.languages) {
toUpload.push(...listFolder(path.resolve(util.getCodeQLDatabasePath(config, language), "log")));
}
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
// Multilanguage tracing: there are additional logs in the root of the cluster
toUpload.push(...listFolder(path.resolve(config.dbLocation, "log")));
}
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
await uploadDebugArtifacts([path.resolve(config.tempDir, "compound-build-tracer.log")], config.tempDir, config.debugArtifactName);
}
} }
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") { if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger); await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
@@ -182,53 +117,95 @@ async function run() {
} }
// Possibly upload the database bundles for remote queries // Possibly upload the database bundles for remote queries
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger); await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger);
// Possibly upload the TRAP caches for later re-use if (uploadResult !== undefined &&
const trapCacheUploadStartTime = perf_hooks_1.performance.now();
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
didUploadTrapCaches = await (0, trap_caching_1.uploadTrapCaches)(codeql, config, logger);
trapCacheUploadTime = perf_hooks_1.performance.now() - trapCacheUploadStartTime;
// We don't upload results in test mode, so don't wait for processing
if (util.isInTestMode()) {
core.debug("In test mode. Waiting for processing is disabled.");
}
else if (uploadResult !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true") { actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)()); await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
} }
// If we did not throw an error yet here, but we expect one, throw it.
if (actionsUtil.getOptionalInput("expect-error") === "true") {
core.setFailed(`expect-error input was set to true but no error was thrown.`);
}
} }
catch (origError) { catch (origError) {
const error = origError instanceof Error ? origError : new Error(String(origError)); const error = origError instanceof Error ? origError : new Error(String(origError));
if (actionsUtil.getOptionalInput("expect-error") !== "true" || core.setFailed(error.message);
hasBadExpectErrorInput()) {
core.setFailed(error.message);
}
console.log(error); console.log(error);
if (error instanceof analyze_1.CodeQLAnalysisError) { if (error instanceof analyze_1.CodeQLAnalysisError) {
const stats = { ...error.queriesStatusReport }; const stats = { ...error.queriesStatusReport };
await sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger); await sendStatusReport(startedAt, config, stats, error);
} }
else { else {
await sendStatusReport(startedAt, config, undefined, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger); await sendStatusReport(startedAt, config, undefined, error);
} }
return; return;
} }
finally {
if (config !== undefined && config.debugMode) {
try {
// Upload the database bundles as an Actions artifact for debugging
const toUpload = [];
for (const language of config.languages) {
toUpload.push(await (0, util_1.bundleDb)(config, language, await (0, codeql_1.getCodeQL)(config.codeQLCmd), `${config.debugDatabaseName}-${language}`));
}
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
}
catch (error) {
console.log(`Failed to upload database debug bundles: ${error}`);
}
}
if (core.isDebug() && config !== undefined) {
core.info("Debug mode is on. Printing CodeQL debug logs...");
for (const language of config.languages) {
const databaseDirectory = util.getCodeQLDatabasePath(config, language);
const logsDirectory = path.join(databaseDirectory, "log");
const walkLogFiles = (dir) => {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile()) {
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
core.endGroup();
}
else if (entry.isDirectory()) {
walkLogFiles(path.resolve(dir, entry.name));
}
}
};
walkLogFiles(logsDirectory);
}
}
}
if (runStats && uploadResult) { if (runStats && uploadResult) {
await sendStatusReport(startedAt, config, { await sendStatusReport(startedAt, config, {
...runStats, ...runStats,
...uploadResult.statusReport, ...uploadResult.statusReport,
}, undefined, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger); });
} }
else if (runStats) { else if (runStats) {
await sendStatusReport(startedAt, config, { ...runStats }, undefined, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger); await sendStatusReport(startedAt, config, { ...runStats });
} }
else { else {
await sendStatusReport(startedAt, config, undefined, undefined, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger); await sendStatusReport(startedAt, config, undefined);
} }
} }
async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
let suffix = "";
const matrix = actionsUtil.getRequiredInput("matrix");
if (matrix !== undefined && matrix !== "null") {
for (const entry of Object.entries(JSON.parse(matrix)).sort())
suffix += `-${entry[1]}`;
}
await artifact.create().uploadArtifact(actionsUtil.sanitizeArifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
}
function listFolder(dir) {
const entries = fs.readdirSync(dir, { withFileTypes: true });
const files = [];
for (const entry of entries) {
if (entry.isFile()) {
files.push(path.resolve(dir, entry.name));
}
else if (entry.isDirectory()) {
files.push(...listFolder(path.resolve(dir, entry.name)));
}
}
return files;
}
exports.runPromise = run(); exports.runPromise = run();
async function runWrapper() { async function runWrapper() {
try { try {

File diff suppressed because one or more lines are too long

254
lib/analyze.js generated
View File

@@ -18,20 +18,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod); __setModuleDefault(result, mod);
return result; return result;
}; };
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.validateQueryFilters = exports.runCleanup = exports.runFinalize = exports.createQuerySuiteContents = exports.convertPackToQuerySuiteEntry = exports.runQueries = exports.dbIsFinalized = exports.createdDBForScannedLanguages = exports.CodeQLAnalysisError = void 0; exports.runCleanup = exports.runFinalize = exports.runQueries = exports.CodeQLAnalysisError = void 0;
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const perf_hooks_1 = require("perf_hooks"); // We need to import `performance` on Node 12
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner")); const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const del_1 = __importDefault(require("del"));
const yaml = __importStar(require("js-yaml")); const yaml = __importStar(require("js-yaml"));
const analysisPaths = __importStar(require("./analysis-paths")); const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const count_loc_1 = require("./count-loc"); const count_loc_1 = require("./count-loc");
const languages_1 = require("./languages"); const languages_1 = require("./languages");
const sharedEnv = __importStar(require("./shared-environment")); const sharedEnv = __importStar(require("./shared-environment"));
@@ -70,23 +64,23 @@ async function setupPythonExtractor(logger) {
logger.info(`Setting LGTM_PYTHON_SETUP_VERSION=${output}`); logger.info(`Setting LGTM_PYTHON_SETUP_VERSION=${output}`);
process.env["LGTM_PYTHON_SETUP_VERSION"] = output; process.env["LGTM_PYTHON_SETUP_VERSION"] = output;
} }
async function createdDBForScannedLanguages(codeql, config, logger, featureFlags) { async function createdDBForScannedLanguages(config, logger) {
// Insert the LGTM_INDEX_X env vars at this point so they are set when // Insert the LGTM_INDEX_X env vars at this point so they are set when
// we extract any scanned languages. // we extract any scanned languages.
analysisPaths.includeAndExcludeAnalysisPaths(config); analysisPaths.includeAndExcludeAnalysisPaths(config);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
for (const language of config.languages) { for (const language of config.languages) {
if ((0, languages_1.isScannedLanguage)(language, await util.isGoExtractionReconciliationEnabled(featureFlags), logger) && if ((0, languages_1.isScannedLanguage)(language) &&
!dbIsFinalized(config, language, logger)) { !dbIsFinalized(config, language, logger)) {
logger.startGroup(`Extracting ${language}`); logger.startGroup(`Extracting ${language}`);
if (language === languages_1.Language.python) { if (language === languages_1.Language.python) {
await setupPythonExtractor(logger); await setupPythonExtractor(logger);
} }
await codeql.extractScannedLanguage(config, language); await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config, language), language);
logger.endGroup(); logger.endGroup();
} }
} }
} }
exports.createdDBForScannedLanguages = createdDBForScannedLanguages;
function dbIsFinalized(config, language, logger) { function dbIsFinalized(config, language, logger) {
const dbPath = util.getCodeQLDatabasePath(config, language); const dbPath = util.getCodeQLDatabasePath(config, language);
try { try {
@@ -98,13 +92,9 @@ function dbIsFinalized(config, language, logger) {
return false; return false;
} }
} }
exports.dbIsFinalized = dbIsFinalized; async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger) {
async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger, featureFlags) { await createdDBForScannedLanguages(config, logger);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd); const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
const extractionStart = perf_hooks_1.performance.now();
await createdDBForScannedLanguages(codeql, config, logger, featureFlags);
const extractionTime = perf_hooks_1.performance.now() - extractionStart;
const trapImportStart = perf_hooks_1.performance.now();
for (const language of config.languages) { for (const language of config.languages) {
if (dbIsFinalized(config, language, logger)) { if (dbIsFinalized(config, language, logger)) {
logger.info(`There is already a finalized database for ${language} at the location where the CodeQL Action places databases, so we did not create one.`); logger.info(`There is already a finalized database for ${language} at the location where the CodeQL Action places databases, so we did not create one.`);
@@ -115,19 +105,16 @@ async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger,
logger.endGroup(); logger.endGroup();
} }
} }
const trapImportTime = perf_hooks_1.performance.now() - trapImportStart;
return {
scanned_language_extraction_duration_ms: Math.round(extractionTime),
trap_import_duration_ms: Math.round(trapImportTime),
};
} }
// Runs queries and creates sarif files in the given folder // Runs queries and creates sarif files in the given folder
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger, featureFlags) { async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger) {
const statusReport = {}; const statusReport = {};
let locPromise = Promise.resolve({}); let locPromise = Promise.resolve({});
const cliCanCountBaseline = await cliCanCountLoC(); const cliCanCountBaseline = await cliCanCountLoC();
const countLocDebugMode = process.env["INTERNAL_CODEQL_ACTION_DEBUG_LOC"] || config.debugMode; const debugMode = process.env["INTERNAL_CODEQL_ACTION_DEBUG_LOC"] ||
if (!cliCanCountBaseline || countLocDebugMode) { process.env["ACTIONS_RUNNER_DEBUG"] ||
process.env["ACTIONS_STEP_DEBUG"];
if (!cliCanCountBaseline || debugMode) {
// count the number of lines in the background // count the number of lines in the background
locPromise = (0, count_loc_1.countLoc)(path.resolve(), locPromise = (0, count_loc_1.countLoc)(path.resolve(),
// config.paths specifies external directories. the current // config.paths specifies external directories. the current
@@ -135,10 +122,8 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
// that here. // that here.
config.paths, config.pathsIgnore, config.languages, logger); config.paths, config.pathsIgnore, config.languages, logger);
} }
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
for (const language of config.languages) { for (const language of config.languages) {
const queries = config.queries[language]; const queries = config.queries[language];
const queryFilters = validateQueryFilters(config.originalUserInput["query-filters"]);
const packsWithVersion = config.packs[language] || []; const packsWithVersion = config.packs[language] || [];
const hasBuiltinQueries = (queries === null || queries === void 0 ? void 0 : queries.builtin.length) > 0; const hasBuiltinQueries = (queries === null || queries === void 0 ? void 0 : queries.builtin.length) > 0;
const hasCustomQueries = (queries === null || queries === void 0 ? void 0 : queries.custom.length) > 0; const hasCustomQueries = (queries === null || queries === void 0 ? void 0 : queries.custom.length) > 0;
@@ -146,73 +131,57 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
if (!hasBuiltinQueries && !hasCustomQueries && !hasPackWithCustomQueries) { if (!hasBuiltinQueries && !hasCustomQueries && !hasPackWithCustomQueries) {
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`); throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
} }
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
try { try {
if (await util.useCodeScanningConfigInCli(codeql, featureFlags)) { if (hasPackWithCustomQueries &&
// If we are using the codescanning config in the CLI, !(await util.codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_CONFIG_FILES))) {
// much of the work needed to generate the query suites logger.info("Performing analysis with custom CodeQL Packs.");
// is done in the CLI. We just need to make a single logger.startGroup(`Downloading custom packs for ${language}`);
// call to run all the queries for each language and const results = await codeql.packDownload(packsWithVersion);
// another to interpret the results. logger.info(`Downloaded packs: ${results.packs
logger.startGroup(`Running queries for ${language}`); .map((r) => `${r.name}@${r.version || "latest"}`)
.join(", ")}`);
logger.endGroup();
}
logger.startGroup(`Running queries for ${language}`);
const querySuitePaths = [];
if (queries["builtin"].length > 0) {
const startTimeBuiltIn = new Date().getTime(); const startTimeBuiltIn = new Date().getTime();
await runQueryGroup(language, "all", undefined, undefined); querySuitePaths.push(await runQueryGroup(language, "builtin", createQuerySuiteContents(queries["builtin"]), undefined));
// TODO should not be using `builtin` here. We should be using `all` instead.
// The status report does not support `all` yet.
statusReport[`analyze_builtin_queries_${language}_duration_ms`] = statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
new Date().getTime() - startTimeBuiltIn; new Date().getTime() - startTimeBuiltIn;
logger.startGroup(`Interpreting results for ${language}`);
const startTimeInterpretResults = new Date().getTime();
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
const analysisSummary = await runInterpretResults(language, undefined, sarifFile, config.debugMode);
statusReport[`interpret_results_${language}_duration_ms`] =
new Date().getTime() - startTimeInterpretResults;
logger.endGroup();
logger.info(analysisSummary);
} }
else { const startTimeCustom = new Date().getTime();
logger.startGroup(`Running queries for ${language}`); let ranCustom = false;
const querySuitePaths = []; for (let i = 0; i < queries["custom"].length; ++i) {
if (queries["builtin"].length > 0) { if (queries["custom"][i].queries.length > 0) {
const startTimeBuiltIn = new Date().getTime(); querySuitePaths.push(await runQueryGroup(language, `custom-${i}`, createQuerySuiteContents(queries["custom"][i].queries), queries["custom"][i].searchPath));
querySuitePaths.push((await runQueryGroup(language, "builtin", createQuerySuiteContents(queries["builtin"], queryFilters), undefined)));
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
new Date().getTime() - startTimeBuiltIn;
}
const startTimeCustom = new Date().getTime();
let ranCustom = false;
for (let i = 0; i < queries["custom"].length; ++i) {
if (queries["custom"][i].queries.length > 0) {
querySuitePaths.push((await runQueryGroup(language, `custom-${i}`, createQuerySuiteContents(queries["custom"][i].queries, queryFilters), queries["custom"][i].searchPath)));
ranCustom = true;
}
}
if (packsWithVersion.length > 0) {
querySuitePaths.push(await runQueryPacks(language, "packs", packsWithVersion, queryFilters));
ranCustom = true; ranCustom = true;
} }
if (ranCustom) {
statusReport[`analyze_custom_queries_${language}_duration_ms`] =
new Date().getTime() - startTimeCustom;
}
logger.endGroup();
logger.startGroup(`Interpreting results for ${language}`);
const startTimeInterpretResults = new Date().getTime();
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
const analysisSummary = await runInterpretResults(language, querySuitePaths, sarifFile, config.debugMode);
if (!cliCanCountBaseline) {
await injectLinesOfCode(sarifFile, language, locPromise);
}
statusReport[`interpret_results_${language}_duration_ms`] =
new Date().getTime() - startTimeInterpretResults;
logger.endGroup();
logger.info(analysisSummary);
} }
if (!cliCanCountBaseline || countLocDebugMode) { if (packsWithVersion.length > 0) {
querySuitePaths.push(await runQueryGroup(language, "packs", createPackSuiteContents(packsWithVersion), undefined));
ranCustom = true;
}
if (ranCustom) {
statusReport[`analyze_custom_queries_${language}_duration_ms`] =
new Date().getTime() - startTimeCustom;
}
logger.endGroup();
logger.startGroup(`Interpreting results for ${language}`);
const startTimeInterpretResults = new Date().getTime();
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
const analysisSummary = await runInterpretResults(language, querySuitePaths, sarifFile);
if (!cliCanCountBaseline)
await injectLinesOfCode(sarifFile, language, locPromise);
statusReport[`interpret_results_${language}_duration_ms`] =
new Date().getTime() - startTimeInterpretResults;
logger.endGroup();
logger.info(analysisSummary);
if (!cliCanCountBaseline || debugMode)
printLinesOfCodeSummary(logger, language, await locPromise); printLinesOfCodeSummary(logger, language, await locPromise);
} if (cliCanCountBaseline)
if (cliCanCountBaseline) {
logger.info(await runPrintLinesOfCode(language)); logger.info(await runPrintLinesOfCode(language));
}
} }
catch (e) { catch (e) {
logger.info(String(e)); logger.info(String(e));
@@ -224,69 +193,63 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
} }
} }
return statusReport; return statusReport;
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging) { async function runInterpretResults(language, queries, sarifFile) {
const databasePath = util.getCodeQLDatabasePath(config, language); const databasePath = util.getCodeQLDatabasePath(config, language);
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId); const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId);
} }
async function cliCanCountLoC() { async function cliCanCountLoC() {
return await util.codeQlVersionAbove(await (0, codeql_1.getCodeQL)(config.codeQLCmd), codeql_1.CODEQL_VERSION_COUNTS_LINES); return await util.codeQlVersionAbove(await (0, codeql_1.getCodeQL)(config.codeQLCmd), codeql_1.CODEQL_VERSION_COUNTS_LINES);
} }
async function runPrintLinesOfCode(language) { async function runPrintLinesOfCode(language) {
const databasePath = util.getCodeQLDatabasePath(config, language); const databasePath = util.getCodeQLDatabasePath(config, language);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
return await codeql.databasePrintBaseline(databasePath); return await codeql.databasePrintBaseline(databasePath);
} }
async function runQueryGroup(language, type, querySuiteContents, searchPath) { async function runQueryGroup(language, type, querySuiteContents, searchPath) {
const databasePath = util.getCodeQLDatabasePath(config, language); const databasePath = util.getCodeQLDatabasePath(config, language);
// Pass the queries to codeql using a file instead of using the command // Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows. // line to avoid command line length restrictions, particularly on windows.
const querySuitePath = querySuiteContents const querySuitePath = `${databasePath}-queries-${type}.qls`;
? `${databasePath}-queries-${type}.qls` fs.writeFileSync(querySuitePath, querySuiteContents);
: undefined; logger.debug(`Query suite file for ${language}-${type}...\n${querySuiteContents}`);
if (querySuiteContents && querySuitePath) { const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
fs.writeFileSync(querySuitePath, querySuiteContents);
logger.debug(`Query suite file for ${language}-${type}...\n${querySuiteContents}`);
}
await codeql.databaseRunQueries(databasePath, searchPath, querySuitePath, memoryFlag, threadsFlag); await codeql.databaseRunQueries(databasePath, searchPath, querySuitePath, memoryFlag, threadsFlag);
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`); logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
return querySuitePath; return querySuitePath;
} }
async function runQueryPacks(language, type, packs, queryFilters) {
const databasePath = util.getCodeQLDatabasePath(config, language);
for (const pack of packs) {
logger.debug(`Running query pack for ${language}-${type}: ${pack}`);
}
// combine the list of packs into a query suite in order to run them all simultaneously.
const querySuite = packs.map(convertPackToQuerySuiteEntry).concat(queryFilters);
const querySuitePath = `${databasePath}-queries-${type}.qls`;
fs.writeFileSync(querySuitePath, yaml.dump(querySuite));
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
await codeql.databaseRunQueries(databasePath, undefined, querySuitePath, memoryFlag, threadsFlag);
return querySuitePath;
}
} }
exports.runQueries = runQueries; exports.runQueries = runQueries;
function convertPackToQuerySuiteEntry(packStr) { function createQuerySuiteContents(queries) {
var _a, _b, _c, _d; return queries.map((q) => `- query: ${q}`).join("\n");
const pack = configUtils.parsePacksSpecification(packStr);
return {
qlpack: !pack.path ? pack.name : undefined,
from: pack.path ? pack.name : undefined,
version: pack.version,
query: ((_a = pack.path) === null || _a === void 0 ? void 0 : _a.endsWith(".ql")) ? pack.path : undefined,
queries: !((_b = pack.path) === null || _b === void 0 ? void 0 : _b.endsWith(".ql")) && !((_c = pack.path) === null || _c === void 0 ? void 0 : _c.endsWith(".qls"))
? pack.path
: undefined,
apply: ((_d = pack.path) === null || _d === void 0 ? void 0 : _d.endsWith(".qls")) ? pack.path : undefined,
};
} }
exports.convertPackToQuerySuiteEntry = convertPackToQuerySuiteEntry; function createPackSuiteContents(packsWithVersion) {
function createQuerySuiteContents(queries, queryFilters) { return packsWithVersion.map(packWithVersionToQuerySuiteEntry).join("\n");
return yaml.dump(queries.map((q) => ({ query: q })).concat(queryFilters));
} }
exports.createQuerySuiteContents = createQuerySuiteContents; function packWithVersionToQuerySuiteEntry(pack) {
async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger, featureFlags) { let text = `- qlpack: ${pack.packName}`;
if (pack.version) {
text += `\n version: ${pack.version}`;
}
return text;
}
async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger) {
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
if (await util.codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
// Delete variables as specified by the end-tracing script
await (0, tracer_config_1.endTracingForCluster)(config);
}
else {
// Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
}
// After switching to Node16, this entire block can be replaced with `await fs.promises.rm(outputDir, { recursive: true, force: true });`.
try { try {
await (0, del_1.default)(outputDir, { force: true }); await fs.promises.rmdir(outputDir, {
recursive: true,
maxRetries: 5,
retryDelay: 2000,
});
} }
catch (error) { catch (error) {
if ((error === null || error === void 0 ? void 0 : error.code) !== "ENOENT") { if ((error === null || error === void 0 ? void 0 : error.code) !== "ENOENT") {
@@ -294,22 +257,7 @@ async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger, f
} }
} }
await fs.promises.mkdir(outputDir, { recursive: true }); await fs.promises.mkdir(outputDir, { recursive: true });
const timings = await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger, featureFlags); await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
// WARNING: This does not _really_ end tracing, as the tracer will restore its
// critical environment variables and it'll still be active for all processes
// launched from this build step.
// However, it will stop tracing for all steps past the codeql-action/analyze
// step.
if (await util.codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
// Delete variables as specified by the end-tracing script
await (0, tracer_config_1.endTracingForCluster)(config, await util.isGoExtractionReconciliationEnabled(featureFlags), logger);
}
else {
// Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
}
return timings;
} }
exports.runFinalize = runFinalize; exports.runFinalize = runFinalize;
async function runCleanup(config, cleanupLevel, logger) { async function runCleanup(config, cleanupLevel, logger) {
@@ -350,28 +298,4 @@ function printLinesOfCodeSummary(logger, language, lineCounts) {
logger.info(`Counted a baseline of ${lineCounts[language]} lines of code for ${language}.`); logger.info(`Counted a baseline of ${lineCounts[language]} lines of code for ${language}.`);
} }
} }
// exported for testing
function validateQueryFilters(queryFilters) {
if (!queryFilters) {
return [];
}
if (!Array.isArray(queryFilters)) {
throw new Error(`Query filters must be an array of "include" or "exclude" entries. Found ${typeof queryFilters}`);
}
const errors = [];
for (const qf of queryFilters) {
const keys = Object.keys(qf);
if (keys.length !== 1) {
errors.push(`Query filter must have exactly one key: ${JSON.stringify(qf)}`);
}
if (!["exclude", "include"].includes(keys[0])) {
errors.push(`Only "include" or "exclude" filters are allowed:\n${JSON.stringify(qf)}`);
}
}
if (errors.length) {
throw new Error(`Invalid query filter.\n${errors.join("\n")}`);
}
return queryFilters;
}
exports.validateQueryFilters = validateQueryFilters;
//# sourceMappingURL=analyze.js.map //# sourceMappingURL=analyze.js.map

File diff suppressed because one or more lines are too long

188
lib/analyze.test.js generated
View File

@@ -26,11 +26,11 @@ const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const ava_1 = __importDefault(require("ava")); const ava_1 = __importDefault(require("ava"));
const yaml = __importStar(require("js-yaml")); const yaml = __importStar(require("js-yaml"));
const semver_1 = require("semver");
const sinon = __importStar(require("sinon")); const sinon = __importStar(require("sinon"));
const analyze_1 = require("./analyze"); const analyze_1 = require("./analyze");
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
const count = __importStar(require("./count-loc")); const count = __importStar(require("./count-loc"));
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages"); const languages_1 = require("./languages");
const logging_1 = require("./logging"); const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils"); const testing_utils_1 = require("./testing-utils");
@@ -53,8 +53,18 @@ const util = __importStar(require("./util"));
const addSnippetsFlag = ""; const addSnippetsFlag = "";
const threadsFlag = ""; const threadsFlag = "";
const packs = { const packs = {
[languages_1.Language.cpp]: ["a/b@1.0.0"], [languages_1.Language.cpp]: [
[languages_1.Language.java]: ["c/d@2.0.0"], {
packName: "a/b",
version: (0, semver_1.clean)("1.0.0"),
},
],
[languages_1.Language.java]: [
{
packName: "c/d",
version: (0, semver_1.clean)("2.0.0"),
},
],
}; };
for (const language of Object.values(languages_1.Language)) { for (const language of Object.values(languages_1.Language)) {
(0, codeql_1.setCodeQL)({ (0, codeql_1.setCodeQL)({
@@ -108,6 +118,7 @@ const util = __importStar(require("./util"));
paths: [], paths: [],
originalUserInput: {}, originalUserInput: {},
tempDir: tmpDir, tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "", codeQLCmd: "",
gitHubVersion: { gitHubVersion: {
type: util.GitHubVariant.DOTCOM, type: util.GitHubVariant.DOTCOM,
@@ -117,13 +128,7 @@ const util = __importStar(require("./util"));
debugMode: false, debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME, debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME, debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
augmentationProperties: { injectedMlQueries: false,
injectedMlQueries: false,
packsInputCombines: false,
queriesInputCombines: false,
},
trapCaches: {},
trapCacheDownloadTime: 0,
}; };
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), { fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
recursive: true, recursive: true,
@@ -132,7 +137,7 @@ const util = __importStar(require("./util"));
builtin: ["foo.ql"], builtin: ["foo.ql"],
custom: [], custom: [],
}; };
const builtinStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, feature_flags_1.createFeatureFlags)([])); const builtinStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true));
const hasPacks = language in packs; const hasPacks = language in packs;
const statusReportKeys = Object.keys(builtinStatusReport).sort(); const statusReportKeys = Object.keys(builtinStatusReport).sort();
if (hasPacks) { if (hasPacks) {
@@ -158,7 +163,7 @@ const util = __importStar(require("./util"));
}, },
], ],
}; };
const customStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, feature_flags_1.createFeatureFlags)([])); const customStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true));
t.deepEqual(Object.keys(customStatusReport).length, 2); t.deepEqual(Object.keys(customStatusReport).length, 2);
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport); t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
const expectedSearchPathsUsed = hasPacks const expectedSearchPathsUsed = hasPacks
@@ -204,10 +209,32 @@ const util = __importStar(require("./util"));
query: "bar.ql", query: "bar.ql",
}, },
]; ];
const qlsPackContentCpp = [
{
qlpack: "a/b",
version: "1.0.0",
},
];
const qlsPackContentJava = [
{
qlpack: "c/d",
version: "2.0.0",
},
];
for (const lang of Object.values(languages_1.Language)) { for (const lang of Object.values(languages_1.Language)) {
t.deepEqual(readContents(`${lang}-queries-builtin.qls`), qlsContent); t.deepEqual(readContents(`${lang}-queries-builtin.qls`), qlsContent);
t.deepEqual(readContents(`${lang}-queries-custom-0.qls`), qlsContent); t.deepEqual(readContents(`${lang}-queries-custom-0.qls`), qlsContent);
t.deepEqual(readContents(`${lang}-queries-custom-1.qls`), qlsContent2); t.deepEqual(readContents(`${lang}-queries-custom-1.qls`), qlsContent2);
const packSuiteName = `${lang}-queries-packs.qls`;
if (lang === languages_1.Language.cpp) {
t.deepEqual(readContents(packSuiteName), qlsPackContentCpp);
}
else if (lang === languages_1.Language.java) {
t.deepEqual(readContents(packSuiteName), qlsPackContentJava);
}
else {
t.false(fs.existsSync(path.join(tmpDir, "codeql_databases", packSuiteName)));
}
} }
function readContents(name) { function readContents(name) {
const x = fs.readFileSync(path.join(tmpDir, "codeql_databases", name), "utf8"); const x = fs.readFileSync(path.join(tmpDir, "codeql_databases", name), "utf8");
@@ -216,141 +243,4 @@ const util = __importStar(require("./util"));
} }
} }
}); });
(0, ava_1.default)("validateQueryFilters", (t) => {
t.notThrows(() => (0, analyze_1.validateQueryFilters)([]));
t.notThrows(() => (0, analyze_1.validateQueryFilters)(undefined));
t.notThrows(() => {
return (0, analyze_1.validateQueryFilters)([
{
exclude: {
"problem.severity": "recommendation",
},
},
{
exclude: {
"tags contain": ["foo", "bar"],
},
},
{
include: {
"problem.severity": "something-to-think-about",
},
},
{
include: {
"tags contain": ["baz", "bop"],
},
},
]);
});
t.throws(() => {
return (0, analyze_1.validateQueryFilters)([
{
exclude: {
"tags contain": ["foo", "bar"],
},
include: {
"tags contain": ["baz", "bop"],
},
},
]);
}, { message: /Query filter must have exactly one key/ });
t.throws(() => {
return (0, analyze_1.validateQueryFilters)([{ xxx: "foo" }]);
}, { message: /Only "include" or "exclude" filters are allowed/ });
t.throws(() => {
return (0, analyze_1.validateQueryFilters)({ exclude: "foo" });
}, {
message: /Query filters must be an array of "include" or "exclude" entries/,
});
});
const convertPackToQuerySuiteEntryMacro = ava_1.default.macro({
exec: (t, packSpec, suiteEntry) => t.deepEqual((0, analyze_1.convertPackToQuerySuiteEntry)(packSpec), suiteEntry),
title: (_providedTitle, packSpec) => `Query Suite Entry: ${packSpec}`,
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b", {
qlpack: "a/b",
from: undefined,
version: undefined,
query: undefined,
queries: undefined,
apply: undefined,
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b@~1.2.3", {
qlpack: "a/b",
from: undefined,
version: "~1.2.3",
query: undefined,
queries: undefined,
apply: undefined,
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b:my/path", {
qlpack: undefined,
from: "a/b",
version: undefined,
query: undefined,
queries: "my/path",
apply: undefined,
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b@~1.2.3:my/path", {
qlpack: undefined,
from: "a/b",
version: "~1.2.3",
query: undefined,
queries: "my/path",
apply: undefined,
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b:my/path/query.ql", {
qlpack: undefined,
from: "a/b",
version: undefined,
query: "my/path/query.ql",
queries: undefined,
apply: undefined,
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b@~1.2.3:my/path/query.ql", {
qlpack: undefined,
from: "a/b",
version: "~1.2.3",
query: "my/path/query.ql",
queries: undefined,
apply: undefined,
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b:my/path/suite.qls", {
qlpack: undefined,
from: "a/b",
version: undefined,
query: undefined,
queries: undefined,
apply: "my/path/suite.qls",
});
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b@~1.2.3:my/path/suite.qls", {
qlpack: undefined,
from: "a/b",
version: "~1.2.3",
query: undefined,
queries: undefined,
apply: "my/path/suite.qls",
});
(0, ava_1.default)("convertPackToQuerySuiteEntry Failure", (t) => {
t.throws(() => (0, analyze_1.convertPackToQuerySuiteEntry)("this-is-not-a-pack"));
});
(0, ava_1.default)("createQuerySuiteContents", (t) => {
const yamlResult = (0, analyze_1.createQuerySuiteContents)(["query1.ql", "query2.ql"], [
{
exclude: { "problem.severity": "recommendation" },
},
{
include: { "problem.severity": "recommendation" },
},
]);
const expected = `- query: query1.ql
- query: query2.ql
- exclude:
problem.severity: recommendation
- include:
problem.severity: recommendation
`;
t.deepEqual(yamlResult, expected);
});
//# sourceMappingURL=analyze.test.js.map //# sourceMappingURL=analyze.test.js.map

File diff suppressed because one or more lines are too long

10
lib/api-client.js generated
View File

@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod }; return (mod && mod.__esModule) ? mod : { "default": mod };
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.getGitHubVersionActionsOnly = exports.getActionsApiClient = exports.getApiDetails = exports.getApiClient = exports.DisallowedAPIVersionReason = void 0; exports.getGitHubVersionActionsOnly = exports.getActionsApiClient = exports.getApiClient = exports.DisallowedAPIVersionReason = void 0;
const path = __importStar(require("path")); const path = __importStar(require("path"));
const githubUtils = __importStar(require("@actions/github/lib/utils")); const githubUtils = __importStar(require("@actions/github/lib/utils"));
const retry = __importStar(require("@octokit/plugin-retry")); const retry = __importStar(require("@octokit/plugin-retry"));
@@ -40,16 +40,14 @@ var DisallowedAPIVersionReason;
const getApiClient = function (apiDetails, { allowExternal = false } = {}) { const getApiClient = function (apiDetails, { allowExternal = false } = {}) {
const auth = (allowExternal && apiDetails.externalRepoAuth) || apiDetails.auth; const auth = (allowExternal && apiDetails.externalRepoAuth) || apiDetails.auth;
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry); const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
const apiURL = apiDetails.apiURL || deriveApiUrl(apiDetails.url);
return new retryingOctokit(githubUtils.getOctokitOptions(auth, { return new retryingOctokit(githubUtils.getOctokitOptions(auth, {
baseUrl: apiURL, baseUrl: getApiUrl(apiDetails.url),
userAgent: `CodeQL-${(0, util_1.getMode)()}/${pkg.version}`, userAgent: `CodeQL-${(0, util_1.getMode)()}/${pkg.version}`,
log: (0, console_log_level_1.default)({ level: "debug" }), log: (0, console_log_level_1.default)({ level: "debug" }),
})); }));
}; };
exports.getApiClient = getApiClient; exports.getApiClient = getApiClient;
// Once the runner is deleted, this can also be removed since the GitHub API URL is always available in an environment variable on Actions. function getApiUrl(githubUrl) {
function deriveApiUrl(githubUrl) {
const url = new URL(githubUrl); const url = new URL(githubUrl);
// If we detect this is trying to connect to github.com // If we detect this is trying to connect to github.com
// then return with a fixed canonical URL. // then return with a fixed canonical URL.
@@ -64,10 +62,8 @@ function getApiDetails() {
return { return {
auth: (0, actions_util_1.getRequiredInput)("token"), auth: (0, actions_util_1.getRequiredInput)("token"),
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"), url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
apiURL: (0, util_1.getRequiredEnvParam)("GITHUB_API_URL"),
}; };
} }
exports.getApiDetails = getApiDetails;
// Temporary function to aid in the transition to running on and off of github actions. // Temporary function to aid in the transition to running on and off of github actions.
// Once all code has been converted this function should be removed or made canonical // Once all code has been converted this function should be removed or made canonical
// and called only from the action entrypoints. // and called only from the action entrypoints.

View File

@@ -1 +1 @@
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,6CAA+B;AAC/B,iCAAqE;AAErE,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAiBM,MAAM,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,MAAM,MAAM,GAAG,UAAU,CAAC,MAAM,IAAI,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IACjE,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,MAAM;QACf,SAAS,EAAE,UAAU,IAAA,cAAO,GAAE,IAAI,GAAG,CAAC,OAAO,EAAE;QAC/C,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAfW,QAAA,YAAY,gBAevB;AAEF,2IAA2I;AAC3I,SAAS,YAAY,CAAC,SAAiB;IACrC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,SAAgB,aAAa;IAC3B,OAAO;QACL,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;QAC7C,MAAM,EAAE,IAAA,0BAAmB,EAAC,gBAAgB,CAAC;KAC9C,CAAC;AACJ,CAAC;AAND,sCAMC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB;IACjC,OAAO,IAAA,oBAAY,EAAC,aAAa,EAAE,CAAC,CAAC;AACvC,CAAC;AAFD,kDAEC;AAED,IAAI,mBAAmB,GAA8B,SAAS,CAAC;AAE/D;;;;;;;GAOG;AACI,KAAK,UAAU,2BAA2B;IAC/C,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE;QACrB,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAC;KAC1E;IACD,IAAI,mBAAmB,KAAK,SAAS,EAAE;QACrC,mBAAmB,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,EAAE,CAAC,CAAC;KACpE;IACD,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AARD,kEAQC"} {"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,6CAA+B;AAC/B,iCAAqE;AAErE,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeM,MAAM,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,UAAU,IAAA,cAAO,GAAE,IAAI,GAAG,CAAC,OAAO,EAAE;QAC/C,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAdW,QAAA,YAAY,gBAcvB;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,SAAS,aAAa;IACpB,OAAO;QACL,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;KAC9C,CAAC;AACJ,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB;IACjC,OAAO,IAAA,oBAAY,EAAC,aAAa,EAAE,CAAC,CAAC;AACvC,CAAC;AAFD,kDAEC;AAED,IAAI,mBAAmB,GAA8B,SAAS,CAAC;AAE/D;;;;;;;GAOG;AACI,KAAK,UAAU,2BAA2B;IAC/C,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE;QACrB,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAC;KAC1E;IACD,IAAI,mBAAmB,KAAK,SAAS,EAAE;QACrC,mBAAmB,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,EAAE,CAAC,CAAC;KACpE;IACD,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AARD,kEAQC"}

11
lib/api-client.test.js generated
View File

@@ -81,17 +81,6 @@ ava_1.default.beforeEach(() => {
userAgent: `CodeQL-Action/${pkg.version}`, userAgent: `CodeQL-Action/${pkg.version}`,
}); });
}); });
(0, ava_1.default)("Get the API with an API URL directly", async (t) => {
doTest(t, {
auth: "xyz",
url: "http://github.localhost",
apiURL: "http://api.github.localhost",
}, undefined, {
auth: "token xyz",
baseUrl: "http://api.github.localhost",
userAgent: `CodeQL-Action/${pkg.version}`,
});
});
function doTest(t, clientArgs, clientOptions, expected) { function doTest(t, clientArgs, clientOptions, expected) {
(0, api_client_1.getApiClient)(clientArgs, clientOptions); (0, api_client_1.getApiClient)(clientArgs, clientOptions);
const firstCallArgs = githubStub.args[0]; const firstCallArgs = githubStub.args[0];

View File

@@ -1 +1 @@
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,uEAAyD;AACzD,8CAA6C;AAC7C,6CAA+B;AAE/B,6CAA4C;AAC5C,mDAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAI,UAA2B,CAAC;AAChC,IAAI,UAA2B,CAAC;AAEhC,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACtD,UAAU,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC;IAC1B,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IAC/B,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;AACnD,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oBAAoB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrC,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,6BAA6B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9C,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oCAAoC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrD,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,qCAAqC;KAC3C,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,wBAAwB;QACjC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,sCAAsC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvD,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,yBAAyB;QAC9B,MAAM,EAAE,6BAA6B;KACtC,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,6BAA6B;QACtC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,MAAM,CACb,CAA4B,EAC5B,UAAe,EACf,aAAkB,EAClB,QAAa;IAEb,IAAA,yBAAY,EAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAExC,MAAM,aAAa,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IACzC,iEAAiE;IACjE,OAAO,aAAa,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;IAC5B,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzC,CAAC"} {"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,uEAAyD;AACzD,8CAA6C;AAC7C,6CAA+B;AAE/B,6CAA4C;AAC5C,mDAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAI,UAA2B,CAAC;AAChC,IAAI,UAA2B,CAAC;AAEhC,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACtD,UAAU,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC;IAC1B,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IAC/B,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;AACnD,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oBAAoB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrC,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,6BAA6B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9C,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oCAAoC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrD,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,qCAAqC;KAC3C,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,wBAAwB;QACjC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,MAAM,CACb,CAA4B,EAC5B,UAAe,EACf,aAAkB,EAClB,QAAa;IAEb,IAAA,yBAAY,EAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAExC,MAAM,aAAa,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IACzC,iEAAiE;IACjE,OAAO,aAAa,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;IAC5B,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzC,CAAC"}

View File

@@ -1 +1 @@
{ "maximumVersion": "3.7", "minimumVersion": "3.3" } { "maximumVersion": "3.5", "minimumVersion": "3.1" }

View File

@@ -21,13 +21,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core")); const core = __importStar(require("@actions/core"));
const actions_util_1 = require("./actions-util"); const actions_util_1 = require("./actions-util");
const api_client_1 = require("./api-client");
const autobuild_1 = require("./autobuild"); const autobuild_1 = require("./autobuild");
const configUtils = __importStar(require("./config-utils")); const config_utils = __importStar(require("./config-utils"));
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const logging_1 = require("./logging"); const logging_1 = require("./logging");
const repository_1 = require("./repository");
const util_1 = require("./util"); const util_1 = require("./util");
// eslint-disable-next-line import/no-commonjs // eslint-disable-next-line import/no-commonjs
const pkg = require("../package.json"); const pkg = require("../package.json");
@@ -43,45 +39,29 @@ async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguag
await (0, actions_util_1.sendStatusReport)(statusReport); await (0, actions_util_1.sendStatusReport)(statusReport);
} }
async function run() { async function run() {
const startedAt = new Date();
const logger = (0, logging_1.getActionsLogger)(); const logger = (0, logging_1.getActionsLogger)();
await (0, util_1.checkActionVersion)(pkg.version); const startedAt = new Date();
let currentLanguage = undefined; let language = undefined;
let languages = undefined;
try { try {
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("autobuild", "starting", startedAt)))) { if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("autobuild", "starting", startedAt)))) {
return; return;
} }
const gitHubVersion = await (0, api_client_1.getGitHubVersionActionsOnly)(); const config = await config_utils.getConfig((0, actions_util_1.getTemporaryDirectory)(), logger);
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.actions);
const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, (0, api_client_1.getApiDetails)(), (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), logger);
const config = await configUtils.getConfig((0, actions_util_1.getTemporaryDirectory)(), logger);
if (config === undefined) { if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?"); throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
} }
languages = await (0, autobuild_1.determineAutobuildLanguages)(config, featureFlags, logger); language = (0, autobuild_1.determineAutobuildLanguage)(config, logger);
if (languages !== undefined) { if (language !== undefined) {
const workingDirectory = (0, actions_util_1.getOptionalInput)("working-directory"); await (0, autobuild_1.runAutobuild)(language, config, logger);
if (workingDirectory) {
logger.info(`Changing autobuilder working directory to ${workingDirectory}`);
process.chdir(workingDirectory);
}
for (const language of languages) {
currentLanguage = language;
await (0, autobuild_1.runAutobuild)(language, config, logger);
if (language === languages_1.Language.go) {
core.exportVariable(util_1.DID_AUTOBUILD_GO_ENV_VAR_NAME, "true");
}
}
} }
} }
catch (error) { catch (error) {
core.setFailed(`We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error instanceof Error ? error.message : String(error)}`); core.setFailed(`We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error instanceof Error ? error.message : String(error)}`);
console.log(error); console.log(error);
await sendCompletedStatusReport(startedAt, languages !== null && languages !== void 0 ? languages : [], currentLanguage, error instanceof Error ? error : new Error(String(error))); await sendCompletedStatusReport(startedAt, language ? [language] : [], language, error instanceof Error ? error : new Error(String(error)));
return; return;
} }
await sendCompletedStatusReport(startedAt, languages !== null && languages !== void 0 ? languages : []); await sendCompletedStatusReport(startedAt, language ? [language] : []);
} }
async function runWrapper() { async function runWrapper() {
try { try {

View File

@@ -1 +1 @@
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAOwB;AACxB,6CAA0E;AAC1E,2CAAwE;AACxE,4DAA8C;AAC9C,mDAAqD;AACrD,2CAAuC;AACvC,uCAA6C;AAC7C,6CAAkD;AAClD,iCAOgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IAAI,eAAe,GAAyB,SAAS,CAAC;IACtD,IAAI,SAAS,GAA2B,SAAS,CAAC;IAClD,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,wCAA2B,GAAE,CAAC;QAC1D,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,EAAE,WAAI,CAAC,OAAO,CAAC,CAAC;QAE/D,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,aAAa,EACb,IAAA,0BAAa,GAAE,EACf,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,MAAM,CACP,CAAC;QAEF,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,SAAS,GAAG,MAAM,IAAA,uCAA2B,EAAC,MAAM,EAAE,YAAY,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;gBAChC,eAAe,GAAG,QAAQ,CAAC;gBAC3B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;gBAC7C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE;oBAC5B,IAAI,CAAC,cAAc,CAAC,oCAA6B,EAAE,MAAM,CAAC,CAAC;iBAC5D;aACF;SACF;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,SAAS,aAAT,SAAS,cAAT,SAAS,GAAI,EAAE,EACf,eAAe,EACf,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,SAAS,aAAT,SAAS,cAAT,SAAS,GAAI,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"} {"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAMwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,IAAA,sCAA0B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

90
lib/autobuild.js generated
View File

@@ -1,96 +1,28 @@
"use strict"; "use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.runAutobuild = exports.determineAutobuildLanguages = void 0; exports.runAutobuild = exports.determineAutobuildLanguage = void 0;
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
const languages_1 = require("./languages"); const languages_1 = require("./languages");
const util = __importStar(require("./util")); function determineAutobuildLanguage(config, logger) {
async function determineAutobuildLanguages(config, featureFlags, logger) {
const isGoExtractionReconciliationEnabled = await util.isGoExtractionReconciliationEnabled(featureFlags);
// Attempt to find a language to autobuild // Attempt to find a language to autobuild
// We want pick the dominant language in the repo from the ones we're able to build // We want pick the dominant language in the repo from the ones we're able to build
// The languages are sorted in order specified by user or by lines of code if we got // The languages are sorted in order specified by user or by lines of code if we got
// them from the GitHub API, so try to build the first language on the list. // them from the GitHub API, so try to build the first language on the list.
const autobuildLanguages = config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l, isGoExtractionReconciliationEnabled, logger)); const autobuildLanguages = config.languages.filter(languages_1.isTracedLanguage);
if (!autobuildLanguages) { const language = autobuildLanguages[0];
if (!language) {
logger.info("None of the languages in this project require extra build steps"); logger.info("None of the languages in this project require extra build steps");
return undefined; return undefined;
} }
/** logger.debug(`Detected dominant traced language: ${language}`);
* Additionally autobuild Go in the autobuild Action to ensure backwards if (autobuildLanguages.length > 1) {
* compatibility for users performing a multi-language build within a single logger.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages
* job.
*
* For example, consider a user with the following workflow file:
*
* ```yml
* - uses: github/codeql-action/init@v2
* with:
* languages: go, java
* - uses: github/codeql-action/autobuild@v2
* - uses: github/codeql-action/analyze@v2
* ```
*
* - With Go extraction disabled, we will run the Java autobuilder in the
* autobuild Action, ensuring we extract both Java and Go code.
* - With Go extraction enabled, taking the previous behavior we'd run the Go
* autobuilder, since Go is first on the list of languages. We wouldn't run
* the Java autobuilder at all and so we'd only extract Go code.
*
* We therefore introduce a special case here such that we'll autobuild Go
* in addition to the primary non-Go traced language in the autobuild Action.
*
* This special case behavior should be removed as part of the next major
* version of the CodeQL Action.
*/
const autobuildLanguagesWithoutGo = autobuildLanguages.filter((l) => l !== languages_1.Language.go);
const languages = [];
// First run the autobuilder for the first non-Go traced language, if one
// exists.
if (autobuildLanguagesWithoutGo[0] !== undefined) {
languages.push(autobuildLanguagesWithoutGo[0]);
}
// If Go is requested, run the Go autobuilder last to ensure it doesn't
// interfere with the other autobuilder.
if (autobuildLanguages.length !== autobuildLanguagesWithoutGo.length) {
languages.push(languages_1.Language.go);
}
logger.debug(`Will autobuild ${languages.join(" and ")}.`);
// In general the autobuilders for other traced languages may conflict with
// each other. Therefore if a user has requested more than one non-Go traced
// language, we ask for manual build steps.
// Matrixing the build would also work, but that would change the SARIF
// categories, potentially leading to a "stale tips" situation where alerts
// that should be fixed remain on a repo since they are linked to SARIF
// categories that are no longer updated.
if (autobuildLanguagesWithoutGo.length > 1) {
logger.warning(`We will only automatically build ${languages.join(" and ")} code. If you wish to scan ${autobuildLanguagesWithoutGo
.slice(1) .slice(1)
.join(" and ")}, you must replace the autobuild step of your workflow with custom build steps. ` + .join(" and ")}, you must replace this call with custom build steps.`);
"For more information, see " +
"https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-the-codeql-workflow-for-compiled-languages#adding-build-steps-for-a-compiled-language");
} }
return languages; return language;
} }
exports.determineAutobuildLanguages = determineAutobuildLanguages; exports.determineAutobuildLanguage = determineAutobuildLanguage;
async function runAutobuild(language, config, logger) { async function runAutobuild(language, config, logger) {
logger.startGroup(`Attempting to automatically build ${language} code`); logger.startGroup(`Attempting to automatically build ${language} code`);
const codeQL = await (0, codeql_1.getCodeQL)(config.codeQLCmd); const codeQL = await (0, codeql_1.getCodeQL)(config.codeQLCmd);

View File

@@ -1 +1 @@
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,qCAAqC;AAGrC,2CAAyD;AAEzD,6CAA+B;AAExB,KAAK,UAAU,2BAA2B,CAC/C,MAA0B,EAC1B,YAA0B,EAC1B,MAAc;IAEd,MAAM,mCAAmC,GACvC,MAAM,IAAI,CAAC,mCAAmC,CAAC,YAAY,CAAC,CAAC;IAC/D,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CACvD,IAAA,4BAAgB,EAAC,CAAC,EAAE,mCAAmC,EAAE,MAAM,CAAC,CACjE,CAAC;IAEF,IAAI,CAAC,kBAAkB,EAAE;QACvB,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACH,MAAM,2BAA2B,GAAG,kBAAkB,CAAC,MAAM,CAC3D,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,oBAAQ,CAAC,EAAE,CACzB,CAAC;IAEF,MAAM,SAAS,GAAe,EAAE,CAAC;IACjC,yEAAyE;IACzE,UAAU;IACV,IAAI,2BAA2B,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE;QAChD,SAAS,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC,CAAC,CAAC,CAAC;KAChD;IACD,uEAAuE;IACvE,wCAAwC;IACxC,IAAI,kBAAkB,CAAC,MAAM,KAAK,2BAA2B,CAAC,MAAM,EAAE;QACpE,SAAS,CAAC,IAAI,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC;KAC7B;IAED,MAAM,CAAC,KAAK,CAAC,kBAAkB,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAE3D,2EAA2E;IAC3E,4EAA4E;IAC5E,2CAA2C;IAC3C,uEAAuE;IACvE,2EAA2E;IAC3E,uEAAuE;IACvE,yCAAyC;IACzC,IAAI,2BAA2B,CAAC,MAAM,GAAG,CAAC,EAAE;QAC1C,MAAM,CAAC,OAAO,CACZ,oCAAoC,SAAS,CAAC,IAAI,CAChD,OAAO,CACR,8BAA8B,2BAA2B;aACvD,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CACH,OAAO,CACR,kFAAkF;YACnF,4BAA4B;YAC5B,0NAA0N,CAC7N,CAAC;KACH;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAzFD,kEAyFC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"} {"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;AAAA,qCAAqC;AAErC,2CAAyD;AAGzD,SAAgB,0BAA0B,CACxC,MAA2B,EAC3B,MAAc;IAEd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IACrE,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;IAEvC,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;IAE/D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QACjC,MAAM,CAAC,OAAO,CACZ,oCAAoC,QAAQ,8BAA8B,kBAAkB;aACzF,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CAAC,OAAO,CAAC,uDAAuD,CACxE,CAAC;KACH;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AA7BD,gEA6BC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}

251
lib/codeql.js generated
View File

@@ -22,24 +22,21 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod }; return (mod && mod.__esModule) ? mod : { "default": mod };
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CODEQL_VERSION_CONFIG_FILES = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CODEQL_DEFAULT_ACTION_REPOSITORY = exports.CommandInvocationError = void 0; exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_CONFIG_FILES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner")); const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const toolcache = __importStar(require("@actions/tool-cache"));
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal")); const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
const yaml = __importStar(require("js-yaml")); const yaml = __importStar(require("js-yaml"));
const query_string_1 = __importDefault(require("query-string")); const query_string_1 = __importDefault(require("query-string"));
const semver = __importStar(require("semver")); const semver = __importStar(require("semver"));
const uuid_1 = require("uuid");
const actions_util_1 = require("./actions-util"); const actions_util_1 = require("./actions-util");
const api = __importStar(require("./api-client")); const api = __importStar(require("./api-client"));
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool! const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
const error_matcher_1 = require("./error-matcher"); const error_matcher_1 = require("./error-matcher");
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages"); const languages_1 = require("./languages");
const toolcache = __importStar(require("./toolcache"));
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher"); const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
const trap_caching_1 = require("./trap-caching");
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
const util_1 = require("./util"); const util_1 = require("./util");
class CommandInvocationError extends Error { class CommandInvocationError extends Error {
@@ -56,7 +53,7 @@ exports.CommandInvocationError = CommandInvocationError;
*/ */
let cachedCodeQL = undefined; let cachedCodeQL = undefined;
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion; const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
exports.CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action"; const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
/** /**
* The oldest version of CodeQL that the Action will run with. This should be * The oldest version of CodeQL that the Action will run with. This should be
* at least three minor versions behind the current version. The version flags * at least three minor versions behind the current version. The version flags
@@ -79,11 +76,8 @@ const CODEQL_VERSION_GROUP_RULES = "2.5.5";
const CODEQL_VERSION_SARIF_GROUP = "2.5.3"; const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2"; exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1"; const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
exports.CODEQL_VERSION_CONFIG_FILES = "2.8.2"; // Versions before 2.8.2 weren't tolerant to unknown properties
exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5"; exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
const CODEQL_VERSION_LUA_TRACER_CONFIG = "2.10.0";
exports.CODEQL_VERSION_CONFIG_FILES = "2.10.1";
const CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED = "2.10.4";
exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
/** /**
* This variable controls using the new style of tracing from the CodeQL * This variable controls using the new style of tracing from the CodeQL
* CLI. In particular, with versions above this we will use both indirect * CLI. In particular, with versions above this we will use both indirect
@@ -94,17 +88,6 @@ exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
* versions above that. * versions above that.
*/ */
exports.CODEQL_VERSION_NEW_TRACING = "2.7.0"; exports.CODEQL_VERSION_NEW_TRACING = "2.7.0";
/**
* Versions 2.9.0+ of the CodeQL CLI run machine learning models from a temporary directory, which
* resolves an issue on Windows where TensorFlow models are not correctly loaded due to the path of
* some of their files being greater than MAX_PATH (260 characters).
*/
exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = "2.9.0";
/**
* Previous versions had the option already, but were missing the
* --extractor-options-verbosity that we need.
*/
exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = "2.10.3";
function getCodeQLBundleName() { function getCodeQLBundleName() {
let platform; let platform;
if (process.platform === "win32") { if (process.platform === "win32") {
@@ -123,7 +106,7 @@ function getCodeQLBundleName() {
} }
function getCodeQLActionRepository(logger) { function getCodeQLActionRepository(logger) {
if (!util.isActions()) { if (!util.isActions()) {
return exports.CODEQL_DEFAULT_ACTION_REPOSITORY; return CODEQL_DEFAULT_ACTION_REPOSITORY;
} }
else { else {
return getActionsCodeQLActionRepository(logger); return getActionsCodeQLActionRepository(logger);
@@ -140,7 +123,7 @@ function getActionsCodeQLActionRepository(logger) {
// This handles the case where the Action does not come from an Action repository, // This handles the case where the Action does not come from an Action repository,
// e.g. our integration tests which use the Action code from the current checkout. // e.g. our integration tests which use the Action code from the current checkout.
logger.info("The CodeQL Action is checked out locally. Using the default CodeQL Action repository."); logger.info("The CodeQL Action is checked out locally. Using the default CodeQL Action repository.");
return exports.CODEQL_DEFAULT_ACTION_REPOSITORY; return CODEQL_DEFAULT_ACTION_REPOSITORY;
} }
logger.info("GITHUB_ACTION_REPOSITORY environment variable was not set. Falling back to legacy method of finding the GitHub Action."); logger.info("GITHUB_ACTION_REPOSITORY environment variable was not set. Falling back to legacy method of finding the GitHub Action.");
const relativeScriptPathParts = (0, actions_util_1.getRelativeScriptPath)().split(path.sep); const relativeScriptPathParts = (0, actions_util_1.getRelativeScriptPath)().split(path.sep);
@@ -152,9 +135,9 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
// This GitHub instance, and this Action. // This GitHub instance, and this Action.
[apiDetails.url, codeQLActionRepository], [apiDetails.url, codeQLActionRepository],
// This GitHub instance, and the canonical Action. // This GitHub instance, and the canonical Action.
[apiDetails.url, exports.CODEQL_DEFAULT_ACTION_REPOSITORY], [apiDetails.url, CODEQL_DEFAULT_ACTION_REPOSITORY],
// GitHub.com, and the canonical Action. // GitHub.com, and the canonical Action.
[util.GITHUB_DOTCOM_URL, exports.CODEQL_DEFAULT_ACTION_REPOSITORY], [util.GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
]; ];
// We now filter out any duplicates. // We now filter out any duplicates.
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork. // Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
@@ -190,7 +173,7 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
const [apiURL, repository] = downloadSource; const [apiURL, repository] = downloadSource;
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public. // If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
if (apiURL === util.GITHUB_DOTCOM_URL && if (apiURL === util.GITHUB_DOTCOM_URL &&
repository === exports.CODEQL_DEFAULT_ACTION_REPOSITORY) { repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
break; break;
} }
const [repositoryOwner, repositoryName] = repository.split("/"); const [repositoryOwner, repositoryName] = repository.split("/");
@@ -211,7 +194,7 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
logger.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`); logger.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
} }
} }
return `https://github.com/${exports.CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`; return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
} }
/** /**
* Set up CodeQL CLI access. * Set up CodeQL CLI access.
@@ -219,51 +202,39 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
* @param codeqlURL * @param codeqlURL
* @param apiDetails * @param apiDetails
* @param tempDir * @param tempDir
* @param toolCacheDir
* @param variant * @param variant
* @param featureFlags
* @param logger * @param logger
* @param checkVersion Whether to check that CodeQL CLI meets the minimum * @param checkVersion Whether to check that CodeQL CLI meets the minimum
* version requirement. Must be set to true outside tests. * version requirement. Must be set to true outside tests.
* @returns * @returns
*/ */
async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, featureFlags, logger, checkVersion) { async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger, checkVersion) {
try { try {
const forceLatestReason =
// We use the special value of 'latest' to prioritize the version in the // We use the special value of 'latest' to prioritize the version in the
// defaults over any pinned cached version. // defaults over any pinned cached version.
codeqlURL === "latest" const forceLatest = codeqlURL === "latest";
? '"tools: latest" was requested'
: // If the user hasn't requested a particular CodeQL version, then bypass
// the toolcache when the appropriate feature flag is enabled. This
// allows us to quickly rollback a broken bundle that has made its way
// into the toolcache.
codeqlURL === undefined &&
(await featureFlags.getValue(feature_flags_1.FeatureFlag.BypassToolcacheEnabled))
? "a specific version of CodeQL was not requested and the bypass toolcache feature flag is enabled"
: undefined;
const forceLatest = forceLatestReason !== undefined;
if (forceLatest) { if (forceLatest) {
logger.debug(`Forcing the latest version of the CodeQL tools since ${forceLatestReason}.`);
codeqlURL = undefined; codeqlURL = undefined;
} }
let codeqlFolder; let codeqlFolder;
let codeqlURLVersion; let codeqlURLVersion;
if (codeqlURL && !codeqlURL.startsWith("http")) { if (codeqlURL && !codeqlURL.startsWith("http")) {
codeqlFolder = await toolcache.extractTar(codeqlURL); codeqlFolder = await toolcache.extractTar(codeqlURL, tempDir, logger);
codeqlURLVersion = "local"; codeqlURLVersion = "local";
} }
else { else {
codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`); codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
const codeqlURLSemVer = convertToSemVer(codeqlURLVersion, logger); const codeqlURLSemVer = convertToSemVer(codeqlURLVersion, logger);
// If we find the specified version, we always use that. // If we find the specified version, we always use that.
codeqlFolder = toolcache.find("CodeQL", codeqlURLSemVer); codeqlFolder = toolcache.find("CodeQL", codeqlURLSemVer, toolCacheDir, logger);
// If we don't find the requested version, in some cases we may allow a // If we don't find the requested version, in some cases we may allow a
// different version to save download time if the version hasn't been // different version to save download time if the version hasn't been
// specified explicitly (in which case we always honor it). // specified explicitly (in which case we always honor it).
if (!codeqlFolder && !codeqlURL && !forceLatest) { if (!codeqlFolder && !codeqlURL && !forceLatest) {
const codeqlVersions = toolcache.findAllVersions("CodeQL"); const codeqlVersions = toolcache.findAllVersions("CodeQL", toolCacheDir, logger);
if (codeqlVersions.length === 1 && (0, util_1.isGoodVersion)(codeqlVersions[0])) { if (codeqlVersions.length === 1 && (0, util_1.isGoodVersion)(codeqlVersions[0])) {
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0]); const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0], toolCacheDir, logger);
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) { if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`); logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
codeqlFolder = tmpCodeqlFolder; codeqlFolder = tmpCodeqlFolder;
@@ -279,9 +250,7 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, featureFlags
} }
const parsedCodeQLURL = new URL(codeqlURL); const parsedCodeQLURL = new URL(codeqlURL);
const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search); const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search);
const headers = { const headers = { accept: "application/octet-stream" };
accept: "application/octet-stream",
};
// We only want to provide an authorization header if we are downloading // We only want to provide an authorization header if we are downloading
// from the same GitHub instance the Action is running on. // from the same GitHub instance the Action is running on.
// This avoids leaking Enterprise tokens to dotcom. // This avoids leaking Enterprise tokens to dotcom.
@@ -295,12 +264,10 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, featureFlags
logger.debug("Downloading CodeQL bundle without token."); logger.debug("Downloading CodeQL bundle without token.");
} }
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`); logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`);
const dest = path.join(tempDir, (0, uuid_1.v4)()); const codeqlPath = await toolcache.downloadTool(codeqlURL, tempDir, headers);
const finalHeaders = Object.assign({ "User-Agent": "CodeQL Action" }, headers);
const codeqlPath = await toolcache.downloadTool(codeqlURL, dest, undefined, finalHeaders);
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`); logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
const codeqlExtracted = await toolcache.extractTar(codeqlPath); const codeqlExtracted = await toolcache.extractTar(codeqlPath, tempDir, logger);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLSemVer); codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLSemVer, toolCacheDir, logger);
} }
} }
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql"); let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
@@ -379,7 +346,6 @@ function setCodeQL(partialCodeql) {
extractScannedLanguage: resolveFunction(partialCodeql, "extractScannedLanguage"), extractScannedLanguage: resolveFunction(partialCodeql, "extractScannedLanguage"),
finalizeDatabase: resolveFunction(partialCodeql, "finalizeDatabase"), finalizeDatabase: resolveFunction(partialCodeql, "finalizeDatabase"),
resolveLanguages: resolveFunction(partialCodeql, "resolveLanguages"), resolveLanguages: resolveFunction(partialCodeql, "resolveLanguages"),
betterResolveLanguages: resolveFunction(partialCodeql, "betterResolveLanguages"),
resolveQueries: resolveFunction(partialCodeql, "resolveQueries"), resolveQueries: resolveFunction(partialCodeql, "resolveQueries"),
packDownload: resolveFunction(partialCodeql, "packDownload"), packDownload: resolveFunction(partialCodeql, "packDownload"),
databaseCleanup: resolveFunction(partialCodeql, "databaseCleanup"), databaseCleanup: resolveFunction(partialCodeql, "databaseCleanup"),
@@ -410,8 +376,8 @@ exports.getCachedCodeQL = getCachedCodeQL;
* a non-existent placeholder codeql command, so tests that use this function * a non-existent placeholder codeql command, so tests that use this function
* should also stub the toolrunner.ToolRunner constructor. * should also stub the toolrunner.ToolRunner constructor.
*/ */
async function getCodeQLForTesting(cmd = "codeql-for-testing") { async function getCodeQLForTesting() {
return getCodeQLForCmd(cmd, false); return getCodeQLForCmd("codeql-for-testing", false);
} }
exports.getCodeQLForTesting = getCodeQLForTesting; exports.getCodeQLForTesting = getCodeQLForTesting;
/** /**
@@ -430,7 +396,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
async getVersion() { async getVersion() {
let result = util.getCachedCodeQlVersion(); let result = util.getCachedCodeQlVersion();
if (result === undefined) { if (result === undefined) {
result = (await runTool(cmd, ["version", "--format=terse"])).trim(); result = await runTool(cmd, ["version", "--format=terse"]);
util.cacheCodeQlVersion(result); util.cacheCodeQlVersion(result);
} }
return result; return result;
@@ -488,12 +454,10 @@ async function getCodeQLForCmd(cmd, checkVersion) {
...getExtraOptionsFromEnv(["database", "init"]), ...getExtraOptionsFromEnv(["database", "init"]),
]); ]);
}, },
async databaseInitCluster(config, sourceRoot, processName, processLevel, featureFlags, logger) { async databaseInitCluster(config, sourceRoot, processName, processLevel) {
const extraArgs = config.languages.map((language) => `--language=${language}`); const extraArgs = config.languages.map((language) => `--language=${language}`);
const isGoExtractionReconciliationEnabled = await util.isGoExtractionReconciliationEnabled(featureFlags); if (config.languages.filter(languages_1.isTracedLanguage).length > 0) {
if (config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l, isGoExtractionReconciliationEnabled, logger)).length > 0) {
extraArgs.push("--begin-tracing"); extraArgs.push("--begin-tracing");
extraArgs.push(...(await (0, trap_caching_1.getTrapCachingExtractorConfigArgs)(config)));
if (processName !== undefined) { if (processName !== undefined) {
extraArgs.push(`--trace-process-name=${processName}`); extraArgs.push(`--trace-process-name=${processName}`);
} }
@@ -503,20 +467,28 @@ async function getCodeQLForCmd(cmd, checkVersion) {
// because that always passes in a process name. // because that always passes in a process name.
extraArgs.push(`--trace-process-level=${processLevel || 3}`); extraArgs.push(`--trace-process-level=${processLevel || 3}`);
} }
if (
// There's a bug in Lua tracing for Go on Windows in versions earlier than
// `CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED`, so don't use Lua tracing
// when tracing Go on Windows on these CodeQL versions.
(await util.codeQlVersionAbove(this, CODEQL_VERSION_LUA_TRACER_CONFIG)) &&
config.languages.includes(languages_1.Language.go) &&
(0, languages_1.isTracedLanguage)(languages_1.Language.go, isGoExtractionReconciliationEnabled, logger) &&
process.platform === "win32" &&
!(await util.codeQlVersionAbove(this, CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED))) {
extraArgs.push("--no-internal-use-lua-tracing");
}
} }
const configLocation = await generateCodescanningConfig(codeql, config, featureFlags); if (await util.codeQlVersionAbove(codeql, exports.CODEQL_VERSION_CONFIG_FILES)) {
if (configLocation) { const configLocation = path.resolve(config.tempDir, "user-config.yaml");
const augmentedConfig = config.originalUserInput;
if (config.injectedMlQueries) {
// We need to inject the ML queries into the original user input before
// we pass this on to the CLI, to make sure these get run.
let packString = util_1.ML_POWERED_JS_QUERIES_PACK.packName;
if (util_1.ML_POWERED_JS_QUERIES_PACK.version)
packString = `${packString}@${util_1.ML_POWERED_JS_QUERIES_PACK.version}`;
if (augmentedConfig.packs === undefined)
augmentedConfig.packs = [];
if (Array.isArray(augmentedConfig.packs)) {
augmentedConfig.packs.push(packString);
}
else {
if (!augmentedConfig.packs.javascript)
augmentedConfig.packs["javascript"] = [];
augmentedConfig.packs["javascript"].push(packString);
}
}
fs.writeFileSync(configLocation, yaml.dump(augmentedConfig));
extraArgs.push(`--codescanning-config=${configLocation}`); extraArgs.push(`--codescanning-config=${configLocation}`);
} }
await runTool(cmd, [ await runTool(cmd, [
@@ -543,23 +515,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
"-Dhttp.keepAlive=false", "-Dhttp.keepAlive=false",
"-Dmaven.wagon.http.pool=false", "-Dmaven.wagon.http.pool=false",
].join(" "); ].join(" ");
// On macOS, System Integrity Protection (SIP) typically interferes with
// CodeQL build tracing of protected binaries.
// The usual workaround is to prefix `$CODEQL_RUNNER` to build commands:
// `$CODEQL_RUNNER` (not to be confused with the deprecated CodeQL Runner tool)
// points to a simple wrapper binary included with the CLI, and the extra layer of
// process indirection helps the tracer bypass SIP.
// The above SIP workaround is *not* needed here.
// At the `autobuild` step in the Actions workflow, we assume the `init` step
// has successfully run, and will have exported `DYLD_INSERT_LIBRARIES`
// into the environment of subsequent steps, to activate the tracer.
// When `DYLD_INSERT_LIBRARIES` is set in the environment for a step,
// the Actions runtime introduces its own workaround for SIP
// (https://github.com/actions/runner/pull/416).
await runTool(autobuildCmd); await runTool(autobuildCmd);
}, },
async extractScannedLanguage(config, language) { async extractScannedLanguage(databasePath, language) {
const databasePath = util.getCodeQLDatabasePath(config, language);
// Get extractor location // Get extractor location
let extractorPath = ""; let extractorPath = "";
await new toolrunner.ToolRunner(cmd, [ await new toolrunner.ToolRunner(cmd, [
@@ -586,7 +544,6 @@ async function getCodeQLForCmd(cmd, checkVersion) {
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, [ await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, [
"database", "database",
"trace-command", "trace-command",
...(await (0, trap_caching_1.getTrapCachingExtractorConfigArgsForLang)(config, language)),
...getExtraOptionsFromEnv(["database", "trace-command"]), ...getExtraOptionsFromEnv(["database", "trace-command"]),
databasePath, databasePath,
"--", "--",
@@ -621,22 +578,6 @@ async function getCodeQLForCmd(cmd, checkVersion) {
throw new Error(`Unexpected output from codeql resolve languages: ${e}`); throw new Error(`Unexpected output from codeql resolve languages: ${e}`);
} }
}, },
async betterResolveLanguages() {
const codeqlArgs = [
"resolve",
"languages",
"--format=betterjson",
"--extractor-options-verbosity=4",
...getExtraOptionsFromEnv(["resolve", "languages"]),
];
const output = await runTool(cmd, codeqlArgs);
try {
return JSON.parse(output);
}
catch (e) {
throw new Error(`Unexpected output from codeql resolve languages with --format=betterjson: ${e}`);
}
},
async resolveQueries(queries, extraSearchPath) { async resolveQueries(queries, extraSearchPath) {
const codeqlArgs = [ const codeqlArgs = [
"resolve", "resolve",
@@ -670,18 +611,18 @@ async function getCodeQLForCmd(cmd, checkVersion) {
if (extraSearchPath !== undefined) { if (extraSearchPath !== undefined) {
codeqlArgs.push("--additional-packs", extraSearchPath); codeqlArgs.push("--additional-packs", extraSearchPath);
} }
if (querySuitePath) { if (!(await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_CONFIG_FILES))) {
codeqlArgs.push(querySuitePath); codeqlArgs.push(querySuitePath);
} }
await runTool(cmd, codeqlArgs); await runTool(cmd, codeqlArgs);
}, },
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId) { async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId) {
const codeqlArgs = [ const codeqlArgs = [
"database", "database",
"interpret-results", "interpret-results",
threadsFlag, threadsFlag,
"--format=sarif-latest", "--format=sarif-latest",
verbosityFlag, "-v",
`--output=${sarifFile}`, `--output=${sarifFile}`,
addSnippetsFlag, addSnippetsFlag,
...getExtraOptionsFromEnv(["database", "interpret-results"]), ...getExtraOptionsFromEnv(["database", "interpret-results"]),
@@ -699,7 +640,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
codeqlArgs.push("--sarif-category", automationDetailsId); codeqlArgs.push("--sarif-category", automationDetailsId);
} }
codeqlArgs.push(databasePath); codeqlArgs.push(databasePath);
if (querySuitePaths) { if (!(await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_CONFIG_FILES))) {
codeqlArgs.push(...querySuitePaths); codeqlArgs.push(...querySuitePaths);
} }
// capture stdout, which contains analysis summaries // capture stdout, which contains analysis summaries
@@ -723,22 +664,14 @@ async function getCodeQLForCmd(cmd, checkVersion) {
* If no version is specified, then the latest version is * If no version is specified, then the latest version is
* downloaded. The check to determine what the latest version is is done * downloaded. The check to determine what the latest version is is done
* each time this package is requested. * each time this package is requested.
*
* Optionally, a `qlconfigFile` is included. If used, then this file
* is used to determine which registry each pack is downloaded from.
*/ */
async packDownload(packs, qlconfigFile) { async packDownload(packs) {
const qlconfigArg = qlconfigFile
? [`--qlconfig-file=${qlconfigFile}`]
: [];
const codeqlArgs = [ const codeqlArgs = [
"pack", "pack",
"download", "download",
...qlconfigArg,
"--format=json", "--format=json",
"--resolve-query-specs",
...getExtraOptionsFromEnv(["pack", "download"]), ...getExtraOptionsFromEnv(["pack", "download"]),
...packs, ...packs.map(packWithVersionToString),
]; ];
const output = await runTool(cmd, codeqlArgs); const output = await runTool(cmd, codeqlArgs);
try { try {
@@ -794,6 +727,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
} }
return codeql; return codeql;
} }
function packWithVersionToString(pack) {
return pack.version ? `${pack.packName}@${pack.version}` : pack.packName;
}
/** /**
* Gets the options for `path` of `options` as an array of extra option strings. * Gets the options for `path` of `options` as an array of extra option strings.
*/ */
@@ -867,77 +803,4 @@ async function runTool(cmd, args = []) {
throw new CommandInvocationError(cmd, args, exitCode, error); throw new CommandInvocationError(cmd, args, exitCode, error);
return output; return output;
} }
/**
* If appropriate, generates a code scanning configuration that is to be used for a scan.
* If the configuration is not to be generated, returns undefined.
*
* @param codeql The CodeQL object to use.
* @param config The configuration to use.
* @returns the path to the generated user configuration file.
*/
async function generateCodescanningConfig(codeql, config, featureFlags) {
var _a;
if (!(await util.useCodeScanningConfigInCli(codeql, featureFlags))) {
return;
}
const configLocation = path.resolve(config.tempDir, "user-config.yaml");
// make a copy so we can modify it
const augmentedConfig = cloneObject(config.originalUserInput);
// Inject the queries from the input
if (config.augmentationProperties.queriesInput) {
if (config.augmentationProperties.queriesInputCombines) {
augmentedConfig.queries = (augmentedConfig.queries || []).concat(config.augmentationProperties.queriesInput);
}
else {
augmentedConfig.queries = config.augmentationProperties.queriesInput;
}
}
if (((_a = augmentedConfig.queries) === null || _a === void 0 ? void 0 : _a.length) === 0) {
delete augmentedConfig.queries;
}
// Inject the packs from the input
if (config.augmentationProperties.packsInput) {
if (config.augmentationProperties.packsInputCombines) {
// At this point, we already know that this is a single-language analysis
if (Array.isArray(augmentedConfig.packs)) {
augmentedConfig.packs = (augmentedConfig.packs || []).concat(config.augmentationProperties.packsInput);
}
else if (!augmentedConfig.packs) {
augmentedConfig.packs = config.augmentationProperties.packsInput;
}
else {
// At this point, we know there is only one language.
// If there were more than one language, an error would already have been thrown.
const language = Object.keys(augmentedConfig.packs)[0];
augmentedConfig.packs[language] = augmentedConfig.packs[language].concat(config.augmentationProperties.packsInput);
}
}
else {
augmentedConfig.packs = config.augmentationProperties.packsInput;
}
}
if (Array.isArray(augmentedConfig.packs) && !augmentedConfig.packs.length) {
delete augmentedConfig.packs;
}
if (config.augmentationProperties.injectedMlQueries) {
// We need to inject the ML queries into the original user input before
// we pass this on to the CLI, to make sure these get run.
const packString = await util.getMlPoweredJsQueriesPack(codeql);
if (augmentedConfig.packs === undefined)
augmentedConfig.packs = [];
if (Array.isArray(augmentedConfig.packs)) {
augmentedConfig.packs.push(packString);
}
else {
if (!augmentedConfig.packs.javascript)
augmentedConfig.packs["javascript"] = [];
augmentedConfig.packs["javascript"].push(packString);
}
}
fs.writeFileSync(configLocation, yaml.dump(augmentedConfig));
return configLocation;
}
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
//# sourceMappingURL=codeql.js.map //# sourceMappingURL=codeql.js.map

File diff suppressed because one or more lines are too long

420
lib/codeql.test.js generated
View File

@@ -22,20 +22,14 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod }; return (mod && mod.__esModule) ? mod : { "default": mod };
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.stubToolRunnerConstructor = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner")); const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const toolcache = __importStar(require("@actions/tool-cache")); const toolcache = __importStar(require("@actions/tool-cache"));
const ava_1 = __importDefault(require("ava")); const ava_1 = __importDefault(require("ava"));
const del_1 = __importDefault(require("del"));
const yaml = __importStar(require("js-yaml"));
const nock_1 = __importDefault(require("nock")); const nock_1 = __importDefault(require("nock"));
const sinon = __importStar(require("sinon")); const sinon = __importStar(require("sinon"));
const codeql = __importStar(require("./codeql")); const codeql = __importStar(require("./codeql"));
const defaults = __importStar(require("./defaults.json")); const defaults = __importStar(require("./defaults.json"));
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const logging_1 = require("./logging"); const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils"); const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
@@ -44,94 +38,54 @@ const util_1 = require("./util");
const sampleApiDetails = { const sampleApiDetails = {
auth: "token", auth: "token",
url: "https://github.com", url: "https://github.com",
apiURL: undefined,
registriesAuthTokens: undefined,
}; };
const sampleGHAEApiDetails = { const sampleGHAEApiDetails = {
auth: "token", auth: "token",
url: "https://example.githubenterprise.com", url: "https://example.githubenterprise.com",
apiURL: undefined,
registriesAuthTokens: undefined,
}; };
let stubConfig;
ava_1.default.beforeEach(() => { ava_1.default.beforeEach(() => {
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3"); (0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
stubConfig = {
languages: [languages_1.Language.cpp],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: "",
codeQLCmd: "",
gitHubVersion: {
type: util.GitHubVariant.DOTCOM,
},
dbLocation: "",
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
augmentationProperties: {
injectedMlQueries: false,
packsInputCombines: false,
queriesInputCombines: false,
},
trapCaches: {},
trapCacheDownloadTime: 0,
};
}); });
async function mockApiAndSetupCodeQL({ apiDetails, featureFlags, isPinned, tmpDir, toolsInput, version, }) {
var _a;
const platform = process.platform === "win32"
? "win64"
: process.platform === "linux"
? "linux64"
: "osx64";
const baseUrl = (_a = apiDetails === null || apiDetails === void 0 ? void 0 : apiDetails.url) !== null && _a !== void 0 ? _a : "https://example.com";
const relativeUrl = apiDetails
? `/github/codeql-action/releases/download/${version}/codeql-bundle-${platform}.tar.gz`
: `/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
(0, nock_1.default)(baseUrl)
.get(relativeUrl)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle${isPinned ? "-pinned" : ""}.tar.gz`));
await codeql.setupCodeQL(toolsInput ? toolsInput.input : `${baseUrl}${relativeUrl}`, apiDetails !== null && apiDetails !== void 0 ? apiDetails : sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, featureFlags !== null && featureFlags !== void 0 ? featureFlags : (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true), false);
}
(0, ava_1.default)("download codeql bundle cache", async (t) => { (0, ava_1.default)("download codeql bundle cache", async (t) => {
await util.withTmpDir(async (tmpDir) => { await util.withTmpDir(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir); (0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const versions = ["20200601", "20200610"]; const versions = ["20200601", "20200610"];
for (let i = 0; i < versions.length; i++) { for (let i = 0; i < versions.length; i++) {
const version = versions[i]; const version = versions[i];
await mockApiAndSetupCodeQL({ version, tmpDir }); (0, nock_1.default)("https://example.com")
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`)); t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
} }
t.is(toolcache.findAllVersions("CodeQL").length, 2); const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
}); });
}); });
(0, ava_1.default)("download codeql bundle cache explicitly requested with pinned different version cached", async (t) => { (0, ava_1.default)("download codeql bundle cache explicitly requested with pinned different version cached", async (t) => {
await util.withTmpDir(async (tmpDir) => { await util.withTmpDir(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir); (0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
await mockApiAndSetupCodeQL({ (0, nock_1.default)("https://example.com")
version: "20200601", .get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
isPinned: true, .replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
tmpDir, await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
});
t.assert(toolcache.find("CodeQL", "0.0.0-20200601")); t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
await mockApiAndSetupCodeQL({ version: "20200610", tmpDir }); (0, nock_1.default)("https://example.com")
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
t.assert(toolcache.find("CodeQL", "0.0.0-20200610")); t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
}); });
}); });
(0, ava_1.default)("don't download codeql bundle cache with pinned different version cached", async (t) => { (0, ava_1.default)("don't download codeql bundle cache with pinned different version cached", async (t) => {
await util.withTmpDir(async (tmpDir) => { await util.withTmpDir(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir); (0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
await mockApiAndSetupCodeQL({ (0, nock_1.default)("https://example.com")
version: "20200601", .get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
isPinned: true, .replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
tmpDir, await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
});
t.assert(toolcache.find("CodeQL", "0.0.0-20200601")); t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true), false); await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
const cachedVersions = toolcache.findAllVersions("CodeQL"); const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1); t.is(cachedVersions.length, 1);
}); });
@@ -139,14 +93,20 @@ async function mockApiAndSetupCodeQL({ apiDetails, featureFlags, isPinned, tmpDi
(0, ava_1.default)("download codeql bundle cache with different version cached (not pinned)", async (t) => { (0, ava_1.default)("download codeql bundle cache with different version cached (not pinned)", async (t) => {
await util.withTmpDir(async (tmpDir) => { await util.withTmpDir(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir); (0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
await mockApiAndSetupCodeQL({ version: "20200601", tmpDir }); (0, nock_1.default)("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
t.assert(toolcache.find("CodeQL", "0.0.0-20200601")); t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
await mockApiAndSetupCodeQL({ const platform = process.platform === "win32"
version: defaults.bundleVersion, ? "win64"
tmpDir, : process.platform === "linux"
apiDetails: sampleApiDetails, ? "linux64"
toolsInput: { input: undefined }, : "osx64";
}); (0, nock_1.default)("https://github.com")
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
const cachedVersions = toolcache.findAllVersions("CodeQL"); const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2); t.is(cachedVersions.length, 2);
}); });
@@ -154,54 +114,24 @@ async function mockApiAndSetupCodeQL({ apiDetails, featureFlags, isPinned, tmpDi
(0, ava_1.default)('download codeql bundle cache with pinned different version cached if "latest" tools specified', async (t) => { (0, ava_1.default)('download codeql bundle cache with pinned different version cached if "latest" tools specified', async (t) => {
await util.withTmpDir(async (tmpDir) => { await util.withTmpDir(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir); (0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
await mockApiAndSetupCodeQL({ (0, nock_1.default)("https://example.com")
version: "20200601", .get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
isPinned: true, .replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
tmpDir, await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
});
t.assert(toolcache.find("CodeQL", "0.0.0-20200601")); t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
await mockApiAndSetupCodeQL({ const platform = process.platform === "win32"
version: defaults.bundleVersion, ? "win64"
apiDetails: sampleApiDetails, : process.platform === "linux"
toolsInput: { input: "latest" }, ? "linux64"
tmpDir, : "osx64";
}); (0, nock_1.default)("https://github.com")
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
const cachedVersions = toolcache.findAllVersions("CodeQL"); const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2); t.is(cachedVersions.length, 2);
}); });
}); });
const TOOLCACHE_BYPASS_TEST_CASES = [
[true, undefined, true],
[false, undefined, false],
[
true,
"https://github.com/github/codeql-action/releases/download/codeql-bundle-20200601/codeql-bundle.tar.gz",
false,
],
];
for (const [isFeatureFlagEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCACHE_BYPASS_TEST_CASES) {
(0, ava_1.default)(`download codeql bundle ${shouldToolcacheBeBypassed ? "bypasses" : "does not bypass"} toolcache when feature flag ${isFeatureFlagEnabled ? "enabled" : "disabled"} and tools: ${toolsInput} passed`, async (t) => {
await util.withTmpDir(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
await mockApiAndSetupCodeQL({
version: "codeql-bundle-20200601",
apiDetails: sampleApiDetails,
isPinned: true,
tmpDir,
});
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
await mockApiAndSetupCodeQL({
version: defaults.bundleVersion,
apiDetails: sampleApiDetails,
featureFlags: (0, feature_flags_1.createFeatureFlags)(isFeatureFlagEnabled ? [feature_flags_1.FeatureFlag.BypassToolcacheEnabled] : []),
toolsInput: { input: toolsInput },
tmpDir,
});
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, shouldToolcacheBeBypassed ? 2 : 1);
});
});
}
(0, ava_1.default)("download codeql bundle from github ae endpoint", async (t) => { (0, ava_1.default)("download codeql bundle from github ae endpoint", async (t) => {
await util.withTmpDir(async (tmpDir) => { await util.withTmpDir(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir); (0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
@@ -225,7 +155,7 @@ for (const [isFeatureFlagEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOO
(0, nock_1.default)("https://example.githubenterprise.com") (0, nock_1.default)("https://example.githubenterprise.com")
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`) .get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`)); .replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, util.GitHubVariant.GHAE, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true), false); await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, tmpDir, util.GitHubVariant.GHAE, (0, logging_1.getRunnerLogger)(true), false);
const cachedVersions = toolcache.findAllVersions("CodeQL"); const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1); t.is(cachedVersions.length, 1);
}); });
@@ -293,269 +223,16 @@ for (const [isFeatureFlagEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOO
const runnerConstructorStub = stubToolRunnerConstructor(); const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting(); const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0"); sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", ""); await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present"); t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
}); });
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => { (0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor(); const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting(); const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1"); sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", ""); await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent"); t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
}); });
(0, ava_1.default)("databaseInitCluster() without injected codescanning config", async (t) => {
await util.withTmpDir(async (tempDir) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves("2.8.1");
const thisStubConfig = {
...stubConfig,
tempDir,
augmentationProperties: {
injectedMlQueries: false,
queriesInputCombines: false,
packsInputCombines: false,
},
};
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const args = runnerConstructorStub.firstCall.args[1];
// should NOT have used an config file
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
t.falsy(configArg, "Should have injected a codescanning config");
});
});
// Test macro for ensuring different variants of injected augmented configurations
const injectedConfigMacro = ava_1.default.macro({
exec: async (t, augmentationProperties, configOverride, expectedConfig) => {
const origCODEQL_PASS_CONFIG_TO_CLI = process.env.CODEQL_PASS_CONFIG_TO_CLI;
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = "true";
try {
await util.withTmpDir(async (tempDir) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon
.stub(codeqlObject, "getVersion")
.resolves(codeql.CODEQL_VERSION_CONFIG_FILES);
const thisStubConfig = {
...stubConfig,
...configOverride,
tempDir,
augmentationProperties,
};
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const args = runnerConstructorStub.firstCall.args[1];
// should have used an config file
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
t.truthy(configArg, "Should have injected a codescanning config");
const configFile = configArg.split("=")[1];
const augmentedConfig = yaml.load(fs.readFileSync(configFile, "utf8"));
t.deepEqual(augmentedConfig, expectedConfig);
await (0, del_1.default)(configFile, { force: true });
});
}
finally {
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = origCODEQL_PASS_CONFIG_TO_CLI;
}
},
title: (providedTitle = "") => `databaseInitCluster() injected config: ${providedTitle}`,
});
(0, ava_1.default)("basic", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: false,
packsInputCombines: false,
}, {}, {});
(0, ava_1.default)("injected ML queries", injectedConfigMacro, {
injectedMlQueries: true,
queriesInputCombines: false,
packsInputCombines: false,
}, {}, {
packs: ["codeql/javascript-experimental-atm-queries@~0.3.0"],
});
(0, ava_1.default)("injected ML queries with existing packs", injectedConfigMacro, {
injectedMlQueries: true,
queriesInputCombines: false,
packsInputCombines: false,
}, {
originalUserInput: {
packs: { javascript: ["codeql/something-else"] },
},
}, {
packs: {
javascript: [
"codeql/something-else",
"codeql/javascript-experimental-atm-queries@~0.3.0",
],
},
});
(0, ava_1.default)("injected ML queries with existing packs of different language", injectedConfigMacro, {
injectedMlQueries: true,
queriesInputCombines: false,
packsInputCombines: false,
}, {
originalUserInput: {
packs: { cpp: ["codeql/something-else"] },
},
}, {
packs: {
cpp: ["codeql/something-else"],
javascript: ["codeql/javascript-experimental-atm-queries@~0.3.0"],
},
});
(0, ava_1.default)("injected packs from input", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: false,
packsInputCombines: false,
packsInput: ["xxx", "yyy"],
}, {}, {
packs: ["xxx", "yyy"],
});
(0, ava_1.default)("injected packs from input with existing packs combines", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: false,
packsInputCombines: true,
packsInput: ["xxx", "yyy"],
}, {
originalUserInput: {
packs: {
cpp: ["codeql/something-else"],
},
},
}, {
packs: {
cpp: ["codeql/something-else", "xxx", "yyy"],
},
});
(0, ava_1.default)("injected packs from input with existing packs overrides", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: false,
packsInputCombines: false,
packsInput: ["xxx", "yyy"],
}, {
originalUserInput: {
packs: {
cpp: ["codeql/something-else"],
},
},
}, {
packs: ["xxx", "yyy"],
});
(0, ava_1.default)("injected packs from input with existing packs overrides and ML model inject", injectedConfigMacro, {
injectedMlQueries: true,
queriesInputCombines: false,
packsInputCombines: false,
packsInput: ["xxx", "yyy"],
}, {
originalUserInput: {
packs: {
cpp: ["codeql/something-else"],
},
},
}, {
packs: ["xxx", "yyy", "codeql/javascript-experimental-atm-queries@~0.3.0"],
});
// similar, but with queries
(0, ava_1.default)("injected queries from input", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: false,
packsInputCombines: false,
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
}, {}, {
queries: [
{
uses: "xxx",
},
{
uses: "yyy",
},
],
});
(0, ava_1.default)("injected queries from input overrides", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: false,
packsInputCombines: false,
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
}, {
originalUserInput: {
queries: [{ uses: "zzz" }],
},
}, {
queries: [
{
uses: "xxx",
},
{
uses: "yyy",
},
],
});
(0, ava_1.default)("injected queries from input combines", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: true,
packsInputCombines: false,
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
}, {
originalUserInput: {
queries: [{ uses: "zzz" }],
},
}, {
queries: [
{
uses: "zzz",
},
{
uses: "xxx",
},
{
uses: "yyy",
},
],
});
(0, ava_1.default)("injected queries from input combines 2", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: true,
packsInputCombines: true,
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
}, {}, {
queries: [
{
uses: "xxx",
},
{
uses: "yyy",
},
],
});
(0, ava_1.default)("injected queries and packs, but empty", injectedConfigMacro, {
injectedMlQueries: false,
queriesInputCombines: true,
packsInputCombines: true,
queriesInput: [],
packsInput: [],
}, {
originalUserInput: {
packs: [],
queries: [],
},
}, {});
(0, ava_1.default)("does not use injected config", async (t) => {
const origCODEQL_PASS_CONFIG_TO_CLI = process.env.CODEQL_PASS_CONFIG_TO_CLI;
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = "false";
try {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon
.stub(codeqlObject, "getVersion")
.resolves(codeql.CODEQL_VERSION_CONFIG_FILES);
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const args = runnerConstructorStub.firstCall.args[1];
// should have used an config file
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
t.falsy(configArg, "Should NOT have injected a codescanning config");
}
finally {
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = origCODEQL_PASS_CONFIG_TO_CLI;
}
});
function stubToolRunnerConstructor() { function stubToolRunnerConstructor() {
const runnerObjectStub = sinon.createStubInstance(toolrunner.ToolRunner); const runnerObjectStub = sinon.createStubInstance(toolrunner.ToolRunner);
runnerObjectStub.exec.resolves(0); runnerObjectStub.exec.resolves(0);
@@ -563,5 +240,4 @@ function stubToolRunnerConstructor() {
runnerConstructorStub.returns(runnerObjectStub); runnerConstructorStub.returns(runnerObjectStub);
return runnerConstructorStub; return runnerConstructorStub;
} }
exports.stubToolRunnerConstructor = stubToolRunnerConstructor;
//# sourceMappingURL=codeql.test.js.map //# sourceMappingURL=codeql.test.js.map

File diff suppressed because one or more lines are too long

388
lib/config-utils.js generated
View File

@@ -19,11 +19,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.downloadPacks = exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePackSpecification = exports.prettyPrintPack = exports.parsePacksSpecification = exports.parsePacksFromConfig = exports.calculateAugmentation = exports.getDefaultConfig = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesMissingUses = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = exports.defaultAugmentationProperties = void 0; exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.parsePacksFromConfig = exports.getDefaultConfig = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPacksRequireLanguage = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = void 0;
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
// We need to import `performance` on Node 12
const perf_hooks_1 = require("perf_hooks");
const yaml = __importStar(require("js-yaml")); const yaml = __importStar(require("js-yaml"));
const semver = __importStar(require("semver")); const semver = __importStar(require("semver"));
const api = __importStar(require("./api-client")); const api = __importStar(require("./api-client"));
@@ -31,7 +29,6 @@ const codeql_1 = require("./codeql");
const externalQueries = __importStar(require("./external-queries")); const externalQueries = __importStar(require("./external-queries"));
const feature_flags_1 = require("./feature-flags"); const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages"); const languages_1 = require("./languages");
const trap_caching_1 = require("./trap-caching");
const util_1 = require("./util"); const util_1 = require("./util");
// Property names from the user-supplied config file. // Property names from the user-supplied config file.
const NAME_PROPERTY = "name"; const NAME_PROPERTY = "name";
@@ -41,17 +38,6 @@ const QUERIES_USES_PROPERTY = "uses";
const PATHS_IGNORE_PROPERTY = "paths-ignore"; const PATHS_IGNORE_PROPERTY = "paths-ignore";
const PATHS_PROPERTY = "paths"; const PATHS_PROPERTY = "paths";
const PACKS_PROPERTY = "packs"; const PACKS_PROPERTY = "packs";
/**
* The default, empty augmentation properties. This is most useeful
* for tests.
*/
exports.defaultAugmentationProperties = {
queriesInputCombines: false,
packsInputCombines: false,
injectedMlQueries: false,
packsInput: undefined,
queriesInput: undefined,
};
/** /**
* A list of queries from https://github.com/github/codeql that * A list of queries from https://github.com/github/codeql that
* we don't want to run. Disabling them here is a quicker alternative to * we don't want to run. Disabling them here is a quicker alternative to
@@ -144,28 +130,21 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suite
// If we're running the JavaScript security-extended analysis (or a superset of it), the repo is // If we're running the JavaScript security-extended analysis (or a superset of it), the repo is
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query // opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
// pack, then add the ML-powered query pack so that we run ML-powered queries. // pack, then add the ML-powered query pack so that we run ML-powered queries.
if ( if (languages.includes("javascript") &&
// Only run ML-powered queries on Windows if we have a CLI that supports it.
(process.platform !== "win32" ||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS))) &&
languages.includes("javascript") &&
(found === "security-extended" || found === "security-and-quality") && (found === "security-extended" || found === "security-and-quality") &&
!((_a = packs.javascript) === null || _a === void 0 ? void 0 : _a.some(isMlPoweredJsQueriesPack)) && !((_a = packs.javascript) === null || _a === void 0 ? void 0 : _a.some((pack) => pack.packName === util_1.ML_POWERED_JS_QUERIES_PACK.packName)) &&
(await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) && (await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) &&
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) { (await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) {
if (!packs.javascript) { if (!packs.javascript) {
packs.javascript = []; packs.javascript = [];
} }
packs.javascript.push(await (0, util_1.getMlPoweredJsQueriesPack)(codeQL)); packs.javascript.push(util_1.ML_POWERED_JS_QUERIES_PACK);
injectedMlQueries = true; injectedMlQueries = true;
} }
const suites = languages.map((l) => `${l}-${suiteName}.qls`); const suites = languages.map((l) => `${l}-${suiteName}.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined); await runResolveQueries(codeQL, resultMap, suites, undefined);
return injectedMlQueries; return injectedMlQueries;
} }
function isMlPoweredJsQueriesPack(pack) {
return parsePacksSpecification(pack).name === util_1.ML_POWERED_JS_QUERIES_PACK_NAME;
}
/** /**
* Retrieve the set of queries at localQueryPath and add them to resultMap. * Retrieve the set of queries at localQueryPath and add them to resultMap.
*/ */
@@ -305,10 +284,6 @@ function getQueriesInvalid(configFile) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, "must be an array"); return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, "must be an array");
} }
exports.getQueriesInvalid = getQueriesInvalid; exports.getQueriesInvalid = getQueriesInvalid;
function getQueriesMissingUses(configFile) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, "must be an array, with each entry having a 'uses' property");
}
exports.getQueriesMissingUses = getQueriesMissingUses;
function getQueryUsesInvalid(configFile, queryUses) { function getQueryUsesInvalid(configFile, queryUses) {
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `must be a built-in suite (${builtinSuites.join(" or ")}), a relative path, or be of the form "owner/repo[/path]@ref"${queryUses !== undefined ? `\n Found: ${queryUses}` : ""}`); return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `must be a built-in suite (${builtinSuites.join(" or ")}), a relative path, or be of the form "owner/repo[/path]@ref"${queryUses !== undefined ? `\n Found: ${queryUses}` : ""}`);
} }
@@ -322,8 +297,9 @@ function getPathsInvalid(configFile) {
} }
exports.getPathsInvalid = getPathsInvalid; exports.getPathsInvalid = getPathsInvalid;
function getPacksRequireLanguage(lang, configFile) { function getPacksRequireLanguage(lang, configFile) {
return getConfigFilePropertyError(configFile, PACKS_PROPERTY, `has "${lang}", but it is not a valid language.`); return getConfigFilePropertyError(configFile, PACKS_PROPERTY, `has "${lang}", but it is not one of the languages to analyze`);
} }
exports.getPacksRequireLanguage = getPacksRequireLanguage;
function getPacksInvalidSplit(configFile) { function getPacksInvalidSplit(configFile) {
return getConfigFilePropertyError(configFile, PACKS_PROPERTY, "must split packages by language"); return getConfigFilePropertyError(configFile, PACKS_PROPERTY, "must split packages by language");
} }
@@ -478,7 +454,8 @@ function shouldAddConfigFileQueries(queriesInput) {
/** /**
* Get the default config for when the user has not supplied one. * Get the default config for when the user has not supplied one.
*/ */
async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) { async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
var _a;
const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger); const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger);
const queries = {}; const queries = {};
for (const language of languages) { for (const language of languages) {
@@ -488,17 +465,11 @@ async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput,
}; };
} }
await addDefaultQueries(codeQL, languages, queries); await addDefaultQueries(codeQL, languages, queries);
const augmentationProperties = calculateAugmentation(rawPacksInput, rawQueriesInput, languages); const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
const packs = augmentationProperties.packsInput let injectedMlQueries = false;
? { if (queriesInput) {
[languages[0]]: augmentationProperties.packsInput, injectedMlQueries = await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
}
: {};
if (rawQueriesInput) {
augmentationProperties.injectedMlQueries =
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
} }
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger);
return { return {
languages, languages,
queries, queries,
@@ -507,32 +478,21 @@ async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput,
packs, packs,
originalUserInput: {}, originalUserInput: {},
tempDir, tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(), codeQLCmd: codeQL.getPath(),
gitHubVersion, gitHubVersion,
dbLocation: dbLocationOrDefault(dbLocation, tempDir), dbLocation: dbLocationOrDefault(dbLocation, tempDir),
debugMode, debugMode,
debugArtifactName, debugArtifactName,
debugDatabaseName, debugDatabaseName,
augmentationProperties, injectedMlQueries,
trapCaches,
trapCacheDownloadTime,
}; };
} }
exports.getDefaultConfig = getDefaultConfig; exports.getDefaultConfig = getDefaultConfig;
async function downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger) {
let trapCaches = {};
let trapCacheDownloadTime = 0;
if (trapCachingEnabled) {
const start = perf_hooks_1.performance.now();
trapCaches = await (0, trap_caching_1.downloadTrapCaches)(codeQL, languages, logger);
trapCacheDownloadTime = perf_hooks_1.performance.now() - start;
}
return { trapCaches, trapCacheDownloadTime };
}
/** /**
* Load the config from the given file. * Load the config from the given file.
*/ */
async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) { async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
var _a; var _a;
let parsedYAML; let parsedYAML;
if (isLocal(configFile)) { if (isLocal(configFile)) {
@@ -573,25 +533,25 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
if (!disableDefaultQueries) { if (!disableDefaultQueries) {
await addDefaultQueries(codeQL, languages, queries); await addDefaultQueries(codeQL, languages, queries);
} }
const augmentationProperties = calculateAugmentation(rawPacksInput, rawQueriesInput, languages); const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, rawPacksInput, augmentationProperties.packsInputCombines, languages, configFile, logger);
// If queries were provided using `with` in the action configuration, // If queries were provided using `with` in the action configuration,
// they should take precedence over the queries in the config file // they should take precedence over the queries in the config file
// unless they're prefixed with "+", in which case they supplement those // unless they're prefixed with "+", in which case they supplement those
// in the config file. // in the config file.
if (rawQueriesInput) { let injectedMlQueries = false;
augmentationProperties.injectedMlQueries = if (queriesInput) {
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger); injectedMlQueries = await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
} }
if (shouldAddConfigFileQueries(rawQueriesInput) && if (shouldAddConfigFileQueries(queriesInput) &&
QUERIES_PROPERTY in parsedYAML) { QUERIES_PROPERTY in parsedYAML) {
const queriesArr = parsedYAML[QUERIES_PROPERTY]; const queriesArr = parsedYAML[QUERIES_PROPERTY];
if (!Array.isArray(queriesArr)) { if (!Array.isArray(queriesArr)) {
throw new Error(getQueriesInvalid(configFile)); throw new Error(getQueriesInvalid(configFile));
} }
for (const query of queriesArr) { for (const query of queriesArr) {
if (typeof query[QUERIES_USES_PROPERTY] !== "string") { if (!(QUERIES_USES_PROPERTY in query) ||
throw new Error(getQueriesMissingUses(configFile)); typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueryUsesInvalid(configFile));
} }
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, featureFlags, logger, configFile); await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, featureFlags, logger, configFile);
} }
@@ -618,7 +578,6 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger)); paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
} }
} }
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger);
return { return {
languages, languages,
queries, queries,
@@ -627,62 +586,16 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
packs, packs,
originalUserInput: parsedYAML, originalUserInput: parsedYAML,
tempDir, tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(), codeQLCmd: codeQL.getPath(),
gitHubVersion, gitHubVersion,
dbLocation: dbLocationOrDefault(dbLocation, tempDir), dbLocation: dbLocationOrDefault(dbLocation, tempDir),
debugMode, debugMode,
debugArtifactName, debugArtifactName,
debugDatabaseName, debugDatabaseName,
augmentationProperties, injectedMlQueries,
trapCaches,
trapCacheDownloadTime,
}; };
} }
/**
* Calculates how the codeql config file needs to be augmented before passing
* it to the CLI. The reason this is necessary is the codeql-action can be called
* with extra inputs from the workflow. These inputs are not part of the config
* and the CLI does not know about these inputs so we need to inject them into
* the config file sent to the CLI.
*
* @param rawPacksInput The packs input from the action configuration.
* @param rawQueriesInput The queries input from the action configuration.
* @param languages The languages that the config file is for. If the packs input
* is non-empty, then there must be exactly one language. Otherwise, an
* error is thrown.
*
* @returns The properties that need to be augmented in the config file.
*
* @throws An error if the packs input is non-empty and the languages input does
* not have exactly one language.
*/
// exported for testing.
function calculateAugmentation(rawPacksInput, rawQueriesInput, languages) {
const packsInputCombines = shouldCombine(rawPacksInput);
const packsInput = parsePacksFromInput(rawPacksInput, languages, packsInputCombines);
const queriesInputCombines = shouldCombine(rawQueriesInput);
const queriesInput = parseQueriesFromInput(rawQueriesInput, queriesInputCombines);
return {
injectedMlQueries: false,
packsInputCombines,
packsInput: packsInput === null || packsInput === void 0 ? void 0 : packsInput[languages[0]],
queriesInput,
queriesInputCombines,
};
}
exports.calculateAugmentation = calculateAugmentation;
function parseQueriesFromInput(rawQueriesInput, queriesInputCombines) {
if (!rawQueriesInput) {
return undefined;
}
const trimmedInput = queriesInputCombines
? rawQueriesInput.trim().slice(1).trim()
: rawQueriesInput === null || rawQueriesInput === void 0 ? void 0 : rawQueriesInput.trim();
if (queriesInputCombines && trimmedInput.length === 0) {
throw new Error(getConfigFilePropertyError(undefined, "queries", "A '+' was used in the 'queries' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs."));
}
return trimmedInput.split(",").map((query) => ({ uses: query.trim() }));
}
/** /**
* Pack names must be in the form of `scope/name`, with only alpha-numeric characters, * Pack names must be in the form of `scope/name`, with only alpha-numeric characters,
* and `-` allowed as long as not the first or last char. * and `-` allowed as long as not the first or last char.
@@ -694,7 +607,7 @@ const PACK_IDENTIFIER_PATTERN = (function () {
return new RegExp(`^${component}/${component}$`); return new RegExp(`^${component}/${component}$`);
})(); })();
// Exported for testing // Exported for testing
function parsePacksFromConfig(packsByLanguage, languages, configFile, logger) { function parsePacksFromConfig(packsByLanguage, languages, configFile) {
const packs = {}; const packs = {};
if (Array.isArray(packsByLanguage)) { if (Array.isArray(packsByLanguage)) {
if (languages.length === 1) { if (languages.length === 1) {
@@ -714,23 +627,18 @@ function parsePacksFromConfig(packsByLanguage, languages, configFile, logger) {
throw new Error(getPacksInvalid(configFile)); throw new Error(getPacksInvalid(configFile));
} }
if (!languages.includes(lang)) { if (!languages.includes(lang)) {
// This particular language is not being analyzed in this run. throw new Error(getPacksRequireLanguage(lang, configFile));
if (languages_1.Language[lang]) { }
logger.info(`Ignoring packs for ${lang} since this language is not being analyzed in this run.`); packs[lang] = [];
continue; for (const packStr of packsArr) {
} packs[lang].push(toPackWithVersion(packStr, configFile));
else {
// This language is invalid, probably a misspelling
throw new Error(getPacksRequireLanguage(configFile, lang));
}
} }
packs[lang] = packsArr.map((packStr) => validatePackSpecification(packStr, configFile));
} }
return packs; return packs;
} }
exports.parsePacksFromConfig = parsePacksFromConfig; exports.parsePacksFromConfig = parsePacksFromConfig;
function parsePacksFromInput(rawPacksInput, languages, packsInputCombines) { function parsePacksFromInput(packsInput, languages) {
if (!(rawPacksInput === null || rawPacksInput === void 0 ? void 0 : rawPacksInput.trim())) { if (!(packsInput === null || packsInput === void 0 ? void 0 : packsInput.trim())) {
return undefined; return undefined;
} }
if (languages.length > 1) { if (languages.length > 1) {
@@ -739,128 +647,56 @@ function parsePacksFromInput(rawPacksInput, languages, packsInputCombines) {
else if (languages.length === 0) { else if (languages.length === 0) {
throw new Error("No languages specified. Cannot process the packs input."); throw new Error("No languages specified. Cannot process the packs input.");
} }
rawPacksInput = rawPacksInput.trim(); packsInput = packsInput.trim();
if (packsInputCombines) { if (packsInput.startsWith("+")) {
rawPacksInput = rawPacksInput.trim().substring(1).trim(); packsInput = packsInput.substring(1).trim();
if (!rawPacksInput) { if (!packsInput) {
throw new Error(getConfigFilePropertyError(undefined, "packs", "A '+' was used in the 'packs' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs.")); throw new Error("A '+' was used in the 'packs' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs.");
} }
} }
return { return {
[languages[0]]: rawPacksInput.split(",").reduce((packs, pack) => { [languages[0]]: packsInput.split(",").reduce((packs, pack) => {
packs.push(validatePackSpecification(pack, "")); packs.push(toPackWithVersion(pack, ""));
return packs; return packs;
}, []), }, []),
}; };
} }
/** function toPackWithVersion(packStr, configFile) {
* Validates that this package specification is syntactically correct.
* It may not point to any real package, but after this function returns
* without throwing, we are guaranteed that the package specification
* is roughly correct.
*
* The CLI itself will do a more thorough validation of the package
* specification.
*
* A package specification looks like this:
*
* `scope/name@version:path`
*
* Version and path are optional.
*
* @param packStr the package specification to verify.
* @param configFile Config file to use for error reporting
*/
function parsePacksSpecification(packStr, configFile) {
if (typeof packStr !== "string") { if (typeof packStr !== "string") {
throw new Error(getPacksStrInvalid(packStr, configFile)); throw new Error(getPacksStrInvalid(packStr, configFile));
} }
packStr = packStr.trim(); const nameWithVersion = packStr.trim().split("@");
const atIndex = packStr.indexOf("@"); let version;
const colonIndex = packStr.indexOf(":", atIndex); if (nameWithVersion.length > 2 ||
const packStart = 0; !PACK_IDENTIFIER_PATTERN.test(nameWithVersion[0])) {
const versionStart = atIndex + 1 || undefined;
const pathStart = colonIndex + 1 || undefined;
const packEnd = Math.min(atIndex > 0 ? atIndex : Infinity, colonIndex > 0 ? colonIndex : Infinity, packStr.length);
const versionEnd = versionStart
? Math.min(colonIndex > 0 ? colonIndex : Infinity, packStr.length)
: undefined;
const pathEnd = pathStart ? packStr.length : undefined;
const packName = packStr.slice(packStart, packEnd).trim();
const version = versionStart
? packStr.slice(versionStart, versionEnd).trim()
: undefined;
const packPath = pathStart
? packStr.slice(pathStart, pathEnd).trim()
: undefined;
if (!PACK_IDENTIFIER_PATTERN.test(packName)) {
throw new Error(getPacksStrInvalid(packStr, configFile)); throw new Error(getPacksStrInvalid(packStr, configFile));
} }
if (version) { else if (nameWithVersion.length === 2) {
try { version = semver.clean(nameWithVersion[1]) || undefined;
new semver.Range(version); if (!version) {
}
catch (e) {
// The range string is invalid. OK to ignore the caught error
throw new Error(getPacksStrInvalid(packStr, configFile)); throw new Error(getPacksStrInvalid(packStr, configFile));
} }
} }
if (packPath &&
(path.isAbsolute(packPath) ||
// Permit using "/" instead of "\" on Windows
// Use `x.split(y).join(z)` as a polyfill for `x.replaceAll(y, z)` since
// if we used a regex we'd need to escape the path separator on Windows
// which seems more awkward.
path.normalize(packPath).split(path.sep).join("/") !==
packPath.split(path.sep).join("/"))) {
throw new Error(getPacksStrInvalid(packStr, configFile));
}
if (!packPath && pathStart) {
// 0 length path
throw new Error(getPacksStrInvalid(packStr, configFile));
}
return { return {
name: packName, packName: nameWithVersion[0].trim(),
version, version,
path: packPath,
}; };
} }
exports.parsePacksSpecification = parsePacksSpecification;
function prettyPrintPack(pack) {
return `${pack.name}${pack.version ? `@${pack.version}` : ""}${pack.path ? `:${pack.path}` : ""}`;
}
exports.prettyPrintPack = prettyPrintPack;
function validatePackSpecification(pack, configFile) {
return prettyPrintPack(parsePacksSpecification(pack, configFile));
}
exports.validatePackSpecification = validatePackSpecification;
// exported for testing // exported for testing
function parsePacks(rawPacksFromConfig, rawPacksFromInput, packsInputCombines, languages, configFile, logger) { function parsePacks(rawPacksFromConfig, rawPacksInput, languages, configFile) {
const packsFomConfig = parsePacksFromConfig(rawPacksFromConfig, languages, configFile, logger); const packsFromInput = parsePacksFromInput(rawPacksInput, languages);
const packsFromInput = parsePacksFromInput(rawPacksFromInput, languages, packsInputCombines); const packsFomConfig = parsePacksFromConfig(rawPacksFromConfig, languages, configFile);
if (!packsFromInput) { if (!packsFromInput) {
return packsFomConfig; return packsFomConfig;
} }
if (!packsInputCombines) { if (!shouldCombinePacks(rawPacksInput)) {
if (!packsFromInput) {
throw new Error(getPacksInvalid(configFile));
}
return packsFromInput; return packsFromInput;
} }
return combinePacks(packsFromInput, packsFomConfig); return combinePacks(packsFromInput, packsFomConfig);
} }
exports.parsePacks = parsePacks; exports.parsePacks = parsePacks;
/** function shouldCombinePacks(packsInput) {
* The convention in this action is that an input value that is prefixed with a '+' will return !!(packsInput === null || packsInput === void 0 ? void 0 : packsInput.trim().startsWith("+"));
* be combined with the corresponding value in the config file.
*
* Without a '+', an input value will override the corresponding value in the config file.
*
* @param inputValue The input value to process.
* @returns true if the input value should replace the corresponding value in the config file, false if it should be appended.
*/
function shouldCombine(inputValue) {
return !!(inputValue === null || inputValue === void 0 ? void 0 : inputValue.trim().startsWith("+"));
} }
function combinePacks(packs1, packs2) { function combinePacks(packs1, packs2) {
const packs = {}; const packs = {};
@@ -883,16 +719,16 @@ function dbLocationOrDefault(dbLocation, tempDir) {
* This will parse the config from the user input if present, or generate * This will parse the config from the user input if present, or generate
* a default config. The parsed config is then stored to a known location. * a default config. The parsed config is then stored to a known location.
*/ */
async function initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) { async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
var _a, _b, _c; var _a, _b, _c;
let config; let config;
// If no config file was provided create an empty one // If no config file was provided create an empty one
if (!configFile) { if (!configFile) {
logger.debug("No configuration file was provided"); logger.debug("No configuration file was provided");
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger); config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
} }
else { else {
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger); config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
} }
// The list of queries should not be empty for any language. If it is then // The list of queries should not be empty for any language. If it is then
// it is a user configuration error. // it is a user configuration error.
@@ -905,28 +741,11 @@ async function initConfig(languagesInput, queriesInput, packsInput, registriesIn
"Please make sure that the default queries are enabled, or you are specifying queries to run."); "Please make sure that the default queries are enabled, or you are specifying queries to run.");
} }
} }
// When using the codescanning config in the CLI, pack downloads
// happen in the CLI during the `database init` command, so no need
// to download them here.
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, featureFlags))) {
const registries = parseRegistries(registriesInput);
await downloadPacks(codeQL, config.languages, config.packs, registries, apiDetails, config.tempDir, logger);
}
// Save the config so we can easily access it again in the future // Save the config so we can easily access it again in the future
await saveConfig(config, logger); await saveConfig(config, logger);
return config; return config;
} }
exports.initConfig = initConfig; exports.initConfig = initConfig;
function parseRegistries(registriesInput) {
try {
return registriesInput
? yaml.load(registriesInput)
: undefined;
}
catch (e) {
throw new Error("Invalid registries input. Must be a YAML string.");
}
}
function isLocal(configPath) { function isLocal(configPath) {
// If the path starts with ./, look locally // If the path starts with ./, look locally
if (configPath.indexOf("./") === 0) { if (configPath.indexOf("./") === 0) {
@@ -1006,95 +825,4 @@ async function getConfig(tempDir, logger) {
return JSON.parse(configString); return JSON.parse(configString);
} }
exports.getConfig = getConfig; exports.getConfig = getConfig;
async function downloadPacks(codeQL, languages, packs, registries, apiDetails, tmpDir, logger) {
let qlconfigFile;
let registriesAuthTokens;
if (registries) {
if (!(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD))) {
throw new Error(`'registries' input is not supported on CodeQL versions less than ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD}.`);
}
// generate a qlconfig.yml file to hold the registry configs.
const qlconfig = createRegistriesBlock(registries);
qlconfigFile = path.join(tmpDir, "qlconfig.yml");
fs.writeFileSync(qlconfigFile, yaml.dump(qlconfig), "utf8");
registriesAuthTokens = registries
.map((registry) => `${registry.url}=${registry.token}`)
.join(",");
}
await wrapEnvironment({
GITHUB_TOKEN: apiDetails.auth,
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
}, async () => {
let numPacksDownloaded = 0;
logger.startGroup("Downloading packs");
for (const language of languages) {
const packsWithVersion = packs[language];
if (packsWithVersion === null || packsWithVersion === void 0 ? void 0 : packsWithVersion.length) {
logger.info(`Downloading custom packs for ${language}`);
const results = await codeQL.packDownload(packsWithVersion, qlconfigFile);
numPacksDownloaded += results.packs.length;
logger.info(`Downloaded: ${results.packs
.map((r) => `${r.name}@${r.version || "latest"}`)
.join(", ")}`);
}
}
if (numPacksDownloaded > 0) {
logger.info(`Downloaded ${numPacksDownloaded} ${packs === 1 ? "pack" : "packs"}`);
}
else {
logger.info("No packs to download");
}
logger.endGroup();
});
}
exports.downloadPacks = downloadPacks;
function createRegistriesBlock(registries) {
if (!Array.isArray(registries) ||
registries.some((r) => !r.url || !r.packages)) {
throw new Error("Invalid 'registries' input. Must be an array of objects with 'url' and 'packages' properties.");
}
// be sure to remove the `token` field from the registry before writing it to disk.
const safeRegistries = registries.map((registry) => ({
// ensure the url ends with a slash to avoid a bug in the CLI 2.10.4
url: !(registry === null || registry === void 0 ? void 0 : registry.url.endsWith("/")) ? `${registry.url}/` : registry.url,
packages: registry.packages,
}));
const qlconfig = {
registries: safeRegistries,
};
return qlconfig;
}
/**
* Create a temporary environment based on the existing environment and overridden
* by the given environment variables that are passed in as arguments.
*
* Use this new environment in the context of the given operation. After completing
* the operation, restore the original environment.
*
* This function does not support un-setting environment variables.
*
* @param env
* @param operation
*/
async function wrapEnvironment(env, operation) {
// Remember the original env
const oldEnv = { ...process.env };
// Set the new env
for (const [key, value] of Object.entries(env)) {
// Ignore undefined keys
if (value !== undefined) {
process.env[key] = value;
}
}
try {
// Run the operation
await operation();
}
finally {
// Restore the old env
for (const [key, value] of Object.entries(oldEnv)) {
process.env[key] = value;
}
}
}
//# sourceMappingURL=config-utils.js.map //# sourceMappingURL=config-utils.js.map

File diff suppressed because one or more lines are too long

560
lib/config-utils.test.js generated
View File

@@ -26,7 +26,7 @@ const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const github = __importStar(require("@actions/github")); const github = __importStar(require("@actions/github"));
const ava_1 = __importDefault(require("ava")); const ava_1 = __importDefault(require("ava"));
const yaml = __importStar(require("js-yaml")); const semver_1 = require("semver");
const sinon = __importStar(require("sinon")); const sinon = __importStar(require("sinon"));
const api = __importStar(require("./api-client")); const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
@@ -41,8 +41,6 @@ const sampleApiDetails = {
auth: "token", auth: "token",
externalRepoAuth: "token", externalRepoAuth: "token",
url: "https://github.example.com", url: "https://github.example.com",
apiURL: undefined,
registriesAuthTokens: undefined,
}; };
const gitHubVersion = { type: util.GitHubVariant.DOTCOM }; const gitHubVersion = { type: util.GitHubVariant.DOTCOM };
// Returns the filepath of the newly-created file // Returns the filepath of the newly-created file
@@ -90,12 +88,9 @@ function mockListLanguages(languages) {
multipleDeclaredLanguages: {}, multipleDeclaredLanguages: {},
}; };
}, },
async packDownload() {
return { packs: [] };
},
}); });
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger); const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger)); t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger));
}); });
}); });
(0, ava_1.default)("loading config saves config", async (t) => { (0, ava_1.default)("loading config saves config", async (t) => {
@@ -112,31 +107,26 @@ function mockListLanguages(languages) {
multipleDeclaredLanguages: {}, multipleDeclaredLanguages: {},
}; };
}, },
async packDownload() {
return { packs: [] };
},
}); });
// Sanity check the saved config file does not already exist // Sanity check the saved config file does not already exist
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir))); t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// Sanity check that getConfig returns undefined before we have called initConfig // Sanity check that getConfig returns undefined before we have called initConfig
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined); t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger); const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
// The saved config file should now exist // The saved config file should now exist
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir))); t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// And that same newly-initialised config should now be returned by getConfig // And that same newly-initialised config should now be returned by getConfig
const config2 = await configUtils.getConfig(tmpDir, logger); const config2 = await configUtils.getConfig(tmpDir, logger);
t.not(config2, undefined); t.not(config2, undefined);
if (config2 !== undefined) { if (config2 !== undefined) {
// removes properties assigned to undefined. t.deepEqual(config1, config2);
const expectedConfig = JSON.parse(JSON.stringify(config1));
t.deepEqual(expectedConfig, config2);
} }
}); });
}); });
(0, ava_1.default)("load input outside of workspace", async (t) => { (0, ava_1.default)("load input outside of workspace", async (t) => {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
try { try {
await configUtils.initConfig(undefined, undefined, undefined, undefined, "../input", undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -149,7 +139,7 @@ function mockListLanguages(languages) {
// no filename given, just a repo // no filename given, just a repo
const configFile = "octo-org/codeql-config@main"; const configFile = "octo-org/codeql-config@main";
try { try {
await configUtils.initConfig(undefined, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -163,7 +153,7 @@ function mockListLanguages(languages) {
const configFile = "input"; const configFile = "input";
t.false(fs.existsSync(path.join(tmpDir, configFile))); t.false(fs.existsSync(path.join(tmpDir, configFile)));
try { try {
await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -186,9 +176,6 @@ function mockListLanguages(languages) {
multipleDeclaredLanguages: {}, multipleDeclaredLanguages: {},
}; };
}, },
async packDownload() {
return { packs: [] };
},
}); });
// Just create a generic config object with non-default values for all fields // Just create a generic config object with non-default values for all fields
const inputFileContents = ` const inputFileContents = `
@@ -226,6 +213,7 @@ function mockListLanguages(languages) {
paths: ["c/d"], paths: ["c/d"],
}, },
tempDir: tmpDir, tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: codeQL.getPath(), codeQLCmd: codeQL.getPath(),
gitHubVersion, gitHubVersion,
dbLocation: path.resolve(tmpDir, "codeql_databases"), dbLocation: path.resolve(tmpDir, "codeql_databases"),
@@ -233,13 +221,11 @@ function mockListLanguages(languages) {
debugMode: false, debugMode: false,
debugArtifactName: "my-artifact", debugArtifactName: "my-artifact",
debugDatabaseName: "my-db", debugDatabaseName: "my-db",
augmentationProperties: configUtils.defaultAugmentationProperties, injectedMlQueries: false,
trapCaches: {},
trapCacheDownloadTime: 0,
}; };
const languages = "javascript"; const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir); const configFilePath = createConfigFile(inputFileContents, tmpDir);
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, undefined, configFilePath, undefined, false, false, "my-artifact", "my-db", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "my-artifact", "my-db", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Should exactly equal the object we constructed earlier // Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig); t.deepEqual(actualConfig, expectedConfig);
}); });
@@ -265,9 +251,6 @@ function mockListLanguages(languages) {
multipleDeclaredLanguages: {}, multipleDeclaredLanguages: {},
}; };
}, },
async packDownload() {
return { packs: [] };
},
}); });
// The important point of this config is that it doesn't specify // The important point of this config is that it doesn't specify
// the disable-default-queries field. // the disable-default-queries field.
@@ -278,7 +261,7 @@ function mockListLanguages(languages) {
fs.mkdirSync(path.join(tmpDir, "foo")); fs.mkdirSync(path.join(tmpDir, "foo"));
const languages = "javascript"; const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir); const configFilePath = createConfigFile(inputFileContents, tmpDir);
await configUtils.initConfig(languages, undefined, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolve queries was called correctly // Check resolve queries was called correctly
t.deepEqual(resolveQueriesArgs.length, 1); t.deepEqual(resolveQueriesArgs.length, 1);
t.deepEqual(resolveQueriesArgs[0].queries, [ t.deepEqual(resolveQueriesArgs[0].queries, [
@@ -319,23 +302,20 @@ function queriesToResolvedQueryForm(queries) {
resolveQueriesArgs.push({ queries, extraSearchPath }); resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries); return queriesToResolvedQueryForm(queries);
}, },
async packDownload() {
return { packs: [] };
},
}); });
const languages = "javascript"; const languages = "javascript";
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly // Check resolveQueries was called correctly
// It'll be called once for the default queries // It'll be called once for the default queries
// and once for `./foo` from the config file. // and once for `./foo` from the config file.
t.deepEqual(resolveQueriesArgs.length, 2); t.deepEqual(resolveQueriesArgs.length, 2);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1); t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.true(resolveQueriesArgs[1].queries[0].endsWith(`${path.sep}foo`)); t.regex(resolveQueriesArgs[1].queries[0], /.*\/foo$/);
// Now check that the end result contains the default queries and the query from config // Now check that the end result contains the default queries and the query from config
t.deepEqual(config.queries["javascript"].builtin.length, 1); t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 1); t.deepEqual(config.queries["javascript"].custom.length, 1);
t.true(config.queries["javascript"].builtin[0].endsWith("javascript-code-scanning.qls")); t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.true(config.queries["javascript"].custom[0].queries[0].endsWith(`${path.sep}foo`)); t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/foo$/);
}); });
}); });
(0, ava_1.default)("Queries from config file can be overridden in workflow file", async (t) => { (0, ava_1.default)("Queries from config file can be overridden in workflow file", async (t) => {
@@ -355,23 +335,20 @@ function queriesToResolvedQueryForm(queries) {
resolveQueriesArgs.push({ queries, extraSearchPath }); resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries); return queriesToResolvedQueryForm(queries);
}, },
async packDownload() {
return { packs: [] };
},
}); });
const languages = "javascript"; const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly // Check resolveQueries was called correctly
// It'll be called once for the default queries and once for `./override`, // It'll be called once for the default queries and once for `./override`,
// but won't be called for './foo' from the config file. // but won't be called for './foo' from the config file.
t.deepEqual(resolveQueriesArgs.length, 2); t.deepEqual(resolveQueriesArgs.length, 2);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1); t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.true(resolveQueriesArgs[1].queries[0].endsWith(`${path.sep}override`)); t.regex(resolveQueriesArgs[1].queries[0], /.*\/override$/);
// Now check that the end result contains only the default queries and the override query // Now check that the end result contains only the default queries and the override query
t.deepEqual(config.queries["javascript"].builtin.length, 1); t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 1); t.deepEqual(config.queries["javascript"].custom.length, 1);
t.true(config.queries["javascript"].builtin[0].endsWith("javascript-code-scanning.qls")); t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.true(config.queries["javascript"].custom[0].queries[0].endsWith(`${path.sep}override`)); t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/override$/);
}); });
}); });
(0, ava_1.default)("Queries in workflow file can be used in tandem with the 'disable default queries' option", async (t) => { (0, ava_1.default)("Queries in workflow file can be used in tandem with the 'disable default queries' option", async (t) => {
@@ -390,22 +367,19 @@ function queriesToResolvedQueryForm(queries) {
resolveQueriesArgs.push({ queries, extraSearchPath }); resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries); return queriesToResolvedQueryForm(queries);
}, },
async packDownload() {
return { packs: [] };
},
}); });
const languages = "javascript"; const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly // Check resolveQueries was called correctly
// It'll be called once for `./workflow-query`, // It'll be called once for `./workflow-query`,
// but won't be called for the default one since that was disabled // but won't be called for the default one since that was disabled
t.deepEqual(resolveQueriesArgs.length, 1); t.deepEqual(resolveQueriesArgs.length, 1);
t.deepEqual(resolveQueriesArgs[0].queries.length, 1); t.deepEqual(resolveQueriesArgs[0].queries.length, 1);
t.true(resolveQueriesArgs[0].queries[0].endsWith(`${path.sep}workflow-query`)); t.regex(resolveQueriesArgs[0].queries[0], /.*\/workflow-query$/);
// Now check that the end result contains only the workflow query, and not the default one // Now check that the end result contains only the workflow query, and not the default one
t.deepEqual(config.queries["javascript"].builtin.length, 0); t.deepEqual(config.queries["javascript"].builtin.length, 0);
t.deepEqual(config.queries["javascript"].custom.length, 1); t.deepEqual(config.queries["javascript"].custom.length, 1);
t.true(config.queries["javascript"].custom[0].queries[0].endsWith(`${path.sep}workflow-query`)); t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/workflow-query$/);
}); });
}); });
(0, ava_1.default)("Multiple queries can be specified in workflow file, no config file required", async (t) => { (0, ava_1.default)("Multiple queries can be specified in workflow file, no config file required", async (t) => {
@@ -419,26 +393,23 @@ function queriesToResolvedQueryForm(queries) {
resolveQueriesArgs.push({ queries, extraSearchPath }); resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries); return queriesToResolvedQueryForm(queries);
}, },
async packDownload() {
return { packs: [] };
},
}); });
const languages = "javascript"; const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly: // Check resolveQueries was called correctly:
// It'll be called once for the default queries, // It'll be called once for the default queries,
// and then once for each of the two queries from the workflow // and then once for each of the two queries from the workflow
t.deepEqual(resolveQueriesArgs.length, 3); t.deepEqual(resolveQueriesArgs.length, 3);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1); t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.deepEqual(resolveQueriesArgs[2].queries.length, 1); t.deepEqual(resolveQueriesArgs[2].queries.length, 1);
t.true(resolveQueriesArgs[1].queries[0].endsWith(`${path.sep}override1`)); t.regex(resolveQueriesArgs[1].queries[0], /.*\/override1$/);
t.true(resolveQueriesArgs[2].queries[0].endsWith(`${path.sep}override2`)); t.regex(resolveQueriesArgs[2].queries[0], /.*\/override2$/);
// Now check that the end result contains both the queries from the workflow, as well as the defaults // Now check that the end result contains both the queries from the workflow, as well as the defaults
t.deepEqual(config.queries["javascript"].builtin.length, 1); t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 2); t.deepEqual(config.queries["javascript"].custom.length, 2);
t.true(config.queries["javascript"].builtin[0].endsWith("javascript-code-scanning.qls")); t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.true(config.queries["javascript"].custom[0].queries[0].endsWith(`${path.sep}override1`)); t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/override1$/);
t.true(config.queries["javascript"].custom[1].queries[0].endsWith(`${path.sep}override2`)); t.regex(config.queries["javascript"].custom[1].queries[0], /.*\/override2$/);
}); });
}); });
(0, ava_1.default)("Queries in workflow file can be added to the set of queries without overriding config file", async (t) => { (0, ava_1.default)("Queries in workflow file can be added to the set of queries without overriding config file", async (t) => {
@@ -461,30 +432,27 @@ function queriesToResolvedQueryForm(queries) {
resolveQueriesArgs.push({ queries, extraSearchPath }); resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries); return queriesToResolvedQueryForm(queries);
}, },
async packDownload() {
return { packs: [] };
},
}); });
const languages = "javascript"; const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly // Check resolveQueries was called correctly
// It'll be called once for the default queries, // It'll be called once for the default queries,
// once for each of additional1 and additional2, // once for each of additional1 and additional2,
// and once for './foo' from the config file // and once for './foo' from the config file
t.deepEqual(resolveQueriesArgs.length, 4); t.deepEqual(resolveQueriesArgs.length, 4);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1); t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.true(resolveQueriesArgs[1].queries[0].endsWith(`${path.sep}additional1`)); t.regex(resolveQueriesArgs[1].queries[0], /.*\/additional1$/);
t.deepEqual(resolveQueriesArgs[2].queries.length, 1); t.deepEqual(resolveQueriesArgs[2].queries.length, 1);
t.true(resolveQueriesArgs[2].queries[0].endsWith(`${path.sep}additional2`)); t.regex(resolveQueriesArgs[2].queries[0], /.*\/additional2$/);
t.deepEqual(resolveQueriesArgs[3].queries.length, 1); t.deepEqual(resolveQueriesArgs[3].queries.length, 1);
t.true(resolveQueriesArgs[3].queries[0].endsWith(`${path.sep}foo`)); t.regex(resolveQueriesArgs[3].queries[0], /.*\/foo$/);
// Now check that the end result contains all the queries // Now check that the end result contains all the queries
t.deepEqual(config.queries["javascript"].builtin.length, 1); t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 3); t.deepEqual(config.queries["javascript"].custom.length, 3);
t.true(config.queries["javascript"].builtin[0].endsWith("javascript-code-scanning.qls")); t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.true(config.queries["javascript"].custom[0].queries[0].endsWith(`${path.sep}additional1`)); t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/additional1$/);
t.true(config.queries["javascript"].custom[1].queries[0].endsWith(`${path.sep}additional2`)); t.regex(config.queries["javascript"].custom[1].queries[0], /.*\/additional2$/);
t.true(config.queries["javascript"].custom[2].queries[0].endsWith(`${path.sep}foo`)); t.regex(config.queries["javascript"].custom[2].queries[0], /.*\/foo$/);
}); });
}); });
(0, ava_1.default)("Invalid queries in workflow file handled correctly", async (t) => { (0, ava_1.default)("Invalid queries in workflow file handled correctly", async (t) => {
@@ -503,12 +471,9 @@ function queriesToResolvedQueryForm(queries) {
multipleDeclaredLanguages: {}, multipleDeclaredLanguages: {},
}; };
}, },
async packDownload() {
return { packs: [] };
},
}); });
try { try {
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(languages, queries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
t.fail("initConfig did not throw error"); t.fail("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -530,9 +495,6 @@ function queriesToResolvedQueryForm(queries) {
multipleDeclaredLanguages: {}, multipleDeclaredLanguages: {},
}; };
}, },
async packDownload() {
return { packs: [] };
},
}); });
const inputFileContents = ` const inputFileContents = `
name: my config name: my config
@@ -554,7 +516,7 @@ function queriesToResolvedQueryForm(queries) {
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true }); fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
const configFile = "octo-org/codeql-config/config.yaml@main"; const configFile = "octo-org/codeql-config/config.yaml@main";
const languages = "javascript"; const languages = "javascript";
await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
t.assert(spyGetContents.called); t.assert(spyGetContents.called);
}); });
}); });
@@ -564,7 +526,7 @@ function queriesToResolvedQueryForm(queries) {
mockGetContents(dummyResponse); mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main"; const repoReference = "octo-org/codeql-config/config.yaml@main";
try { try {
await configUtils.initConfig(undefined, undefined, undefined, undefined, repoReference, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -580,7 +542,7 @@ function queriesToResolvedQueryForm(queries) {
mockGetContents(dummyResponse); mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main"; const repoReference = "octo-org/codeql-config/config.yaml@main";
try { try {
await configUtils.initConfig(undefined, undefined, undefined, undefined, repoReference, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -595,12 +557,9 @@ function queriesToResolvedQueryForm(queries) {
async resolveLanguages() { async resolveLanguages() {
return {}; return {};
}, },
async packDownload() {
return { packs: [] };
},
}); });
try { try {
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -612,7 +571,7 @@ function queriesToResolvedQueryForm(queries) {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
const languages = "rubbish,english"; const languages = "rubbish,english";
try { try {
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -630,9 +589,6 @@ function queriesToResolvedQueryForm(queries) {
multipleDeclaredLanguages: {}, multipleDeclaredLanguages: {},
}; };
}, },
async packDownload() {
return { packs: [] };
},
}); });
const inputFileContents = ` const inputFileContents = `
name: my config name: my config
@@ -643,9 +599,14 @@ function queriesToResolvedQueryForm(queries) {
const configFile = path.join(tmpDir, "codeql-config.yaml"); const configFile = path.join(tmpDir, "codeql-config.yaml");
fs.writeFileSync(configFile, inputFileContents); fs.writeFileSync(configFile, inputFileContents);
const languages = "javascript"; const languages = "javascript";
const { packs } = await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
t.deepEqual(packs, { t.deepEqual(packs, {
[languages_1.Language.javascript]: ["a/b@1.2.3"], [languages_1.Language.javascript]: [
{
packName: "a/b",
version: (0, semver_1.clean)("1.2.3"),
},
],
}); });
}); });
}); });
@@ -661,9 +622,6 @@ function queriesToResolvedQueryForm(queries) {
multipleDeclaredLanguages: {}, multipleDeclaredLanguages: {},
}; };
}, },
async packDownload() {
return { packs: [] };
},
}); });
const inputFileContents = ` const inputFileContents = `
name: my config name: my config
@@ -680,10 +638,20 @@ function queriesToResolvedQueryForm(queries) {
fs.writeFileSync(configFile, inputFileContents); fs.writeFileSync(configFile, inputFileContents);
fs.mkdirSync(path.join(tmpDir, "foo")); fs.mkdirSync(path.join(tmpDir, "foo"));
const languages = "javascript,python,cpp"; const languages = "javascript,python,cpp";
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example" }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
t.deepEqual(packs, { t.deepEqual(packs, {
[languages_1.Language.javascript]: ["a/b@1.2.3"], [languages_1.Language.javascript]: [
[languages_1.Language.python]: ["c/d@1.2.3"], {
packName: "a/b",
version: (0, semver_1.clean)("1.2.3"),
},
],
[languages_1.Language.python]: [
{
packName: "c/d",
version: (0, semver_1.clean)("1.2.3"),
},
],
}); });
t.deepEqual(queries, { t.deepEqual(queries, {
cpp: { cpp: {
@@ -717,16 +685,13 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
multipleDeclaredLanguages: {}, multipleDeclaredLanguages: {},
}; };
}, },
async packDownload() {
return { packs: [] };
},
}); });
const languages = "javascript"; const languages = "javascript";
const configFile = "input"; const configFile = "input";
const inputFile = path.join(tmpDir, configFile); const inputFile = path.join(tmpDir, configFile);
fs.writeFileSync(inputFile, inputFileContents, "utf8"); fs.writeFileSync(inputFile, inputFileContents, "utf8");
try { try {
await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true)); await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error"); throw new Error("initConfig did not throw error");
} }
catch (err) { catch (err) {
@@ -745,7 +710,7 @@ doInvalidInputTest("paths invalid type", `paths: 17`, configUtils.getPathsInvali
doInvalidInputTest("queries uses invalid type", ` doInvalidInputTest("queries uses invalid type", `
queries: queries:
- uses: - uses:
- hello: world`, configUtils.getQueriesMissingUses); - hello: world`, configUtils.getQueryUsesInvalid);
function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) { function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) {
// Invalid contents of a "queries.uses" field. // Invalid contents of a "queries.uses" field.
// Should fail with the expected error message // Should fail with the expected error message
@@ -800,14 +765,14 @@ const invalidPaths = ["a/***/b", "a/**b", "a/b**", "**"];
* Test macro for ensuring the packs block is valid * Test macro for ensuring the packs block is valid
*/ */
const parsePacksMacro = ava_1.default.macro({ const parsePacksMacro = ava_1.default.macro({
exec: (t, packsByLanguage, languages, expected) => t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b", mockLogger), expected), exec: (t, packsByLanguage, languages, expected) => t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), expected),
title: (providedTitle = "") => `Parse Packs: ${providedTitle}`, title: (providedTitle = "") => `Parse Packs: ${providedTitle}`,
}); });
/** /**
* Test macro for testing when the packs block is invalid * Test macro for testing when the packs block is invalid
*/ */
const parsePacksErrorMacro = ava_1.default.macro({ const parsePacksErrorMacro = ava_1.default.macro({
exec: (t, packsByLanguage, languages, expected) => t.throws(() => configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b", {}), { exec: (t, packsByLanguage, languages, expected) => t.throws(() => configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), {
message: expected, message: expected,
}), }),
title: (providedTitle = "") => `Parse Packs Error: ${providedTitle}`, title: (providedTitle = "") => `Parse Packs Error: ${providedTitle}`,
@@ -821,152 +786,92 @@ const invalidPackNameMacro = ava_1.default.macro({
}); });
(0, ava_1.default)("no packs", parsePacksMacro, {}, [], {}); (0, ava_1.default)("no packs", parsePacksMacro, {}, [], {});
(0, ava_1.default)("two packs", parsePacksMacro, ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp], { (0, ava_1.default)("two packs", parsePacksMacro, ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp]: [
{ packName: "a/b", version: undefined },
{ packName: "c/d", version: (0, semver_1.clean)("1.2.3") },
],
}); });
(0, ava_1.default)("two packs with spaces", parsePacksMacro, [" a/b ", " c/d@1.2.3 "], [languages_1.Language.cpp], { (0, ava_1.default)("two packs with spaces", parsePacksMacro, [" a/b ", " c/d@1.2.3 "], [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp]: [
{ packName: "a/b", version: undefined },
{ packName: "c/d", version: (0, semver_1.clean)("1.2.3") },
],
}); });
(0, ava_1.default)("two packs with language", parsePacksMacro, { (0, ava_1.default)("two packs with language", parsePacksMacro, {
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"], [languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
}, [languages_1.Language.cpp, languages_1.Language.java, languages_1.Language.csharp], { }, [languages_1.Language.cpp, languages_1.Language.java, languages_1.Language.csharp], {
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
});
(0, ava_1.default)("two packs with unused language in config", parsePacksMacro, {
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
}, [languages_1.Language.cpp, languages_1.Language.csharp], {
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
});
(0, ava_1.default)("packs with other valid names", parsePacksMacro, [
// ranges are ok
"c/d@1.0",
"c/d@~1.0.0",
"c/d@~1.0.0:a/b",
"c/d@~1.0.0+abc:a/b",
"c/d@~1.0.0-abc:a/b",
"c/d:a/b",
// whitespace is removed
" c/d @ ~1.0.0 : b.qls ",
// and it is retained within a path
" c/d @ ~1.0.0 : b/a path with/spaces.qls ",
// this is valid. the path is '@'. It will probably fail when passed to the CLI
"c/d@1.2.3:@",
// this is valid, too. It will fail if it doesn't match a path
// (globbing is not done)
"c/d@1.2.3:+*)_(",
], [languages_1.Language.cpp], {
[languages_1.Language.cpp]: [ [languages_1.Language.cpp]: [
"c/d@1.0", { packName: "a/b", version: undefined },
"c/d@~1.0.0", { packName: "c/d", version: (0, semver_1.clean)("1.2.3") },
"c/d@~1.0.0:a/b", ],
"c/d@~1.0.0+abc:a/b", [languages_1.Language.java]: [
"c/d@~1.0.0-abc:a/b", { packName: "d/e", version: undefined },
"c/d:a/b", { packName: "f/g", version: (0, semver_1.clean)("1.2.3") },
"c/d@~1.0.0:b.qls",
"c/d@~1.0.0:b/a path with/spaces.qls",
"c/d@1.2.3:@",
"c/d@1.2.3:+*)_(",
], ],
}); });
(0, ava_1.default)("no language", parsePacksErrorMacro, ["a/b@1.2.3"], [languages_1.Language.java, languages_1.Language.python], /The configuration file "\/a\/b" is invalid: property "packs" must split packages by language/); (0, ava_1.default)("no language", parsePacksErrorMacro, ["a/b@1.2.3"], [languages_1.Language.java, languages_1.Language.python], /The configuration file "\/a\/b" is invalid: property "packs" must split packages by language/);
(0, ava_1.default)("invalid language", parsePacksErrorMacro, { [languages_1.Language.java]: ["c/d"] }, [languages_1.Language.cpp], /The configuration file "\/a\/b" is invalid: property "packs" has "java", but it is not one of the languages to analyze/);
(0, ava_1.default)("not an array", parsePacksErrorMacro, { [languages_1.Language.cpp]: "c/d" }, [languages_1.Language.cpp], /The configuration file "\/a\/b" is invalid: property "packs" must be an array of non-empty strings/); (0, ava_1.default)("not an array", parsePacksErrorMacro, { [languages_1.Language.cpp]: "c/d" }, [languages_1.Language.cpp], /The configuration file "\/a\/b" is invalid: property "packs" must be an array of non-empty strings/);
(0, ava_1.default)(invalidPackNameMacro, "c"); // all packs require at least a scope and a name (0, ava_1.default)(invalidPackNameMacro, "c"); // all packs require at least a scope and a name
(0, ava_1.default)(invalidPackNameMacro, "c-/d"); (0, ava_1.default)(invalidPackNameMacro, "c-/d");
(0, ava_1.default)(invalidPackNameMacro, "-c/d"); (0, ava_1.default)(invalidPackNameMacro, "-c/d");
(0, ava_1.default)(invalidPackNameMacro, "c/d_d"); (0, ava_1.default)(invalidPackNameMacro, "c/d_d");
(0, ava_1.default)(invalidPackNameMacro, "c/d@@"); (0, ava_1.default)(invalidPackNameMacro, "c/d@x");
(0, ava_1.default)(invalidPackNameMacro, "c/d@1.0.0:");
(0, ava_1.default)(invalidPackNameMacro, "c/d:");
(0, ava_1.default)(invalidPackNameMacro, "c/d:/a");
(0, ava_1.default)(invalidPackNameMacro, "@1.0.0:a");
(0, ava_1.default)(invalidPackNameMacro, "c/d@../a");
(0, ava_1.default)(invalidPackNameMacro, "c/d@b/../a");
(0, ava_1.default)(invalidPackNameMacro, "c/d:z@1");
/**
* Test macro for pretty printing pack specs
*/
const packSpecPrettyPrintingMacro = ava_1.default.macro({
exec: (t, packStr, packObj) => {
const parsed = configUtils.parsePacksSpecification(packStr);
t.deepEqual(parsed, packObj, "parsed pack spec is correct");
const stringified = configUtils.prettyPrintPack(packObj);
t.deepEqual(stringified, packStr.trim(), "pretty-printed pack spec is correct");
t.deepEqual(configUtils.validatePackSpecification(packStr), packStr.trim(), "pack spec is valid");
},
title: (_providedTitle, packStr,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_packObj) => `Prettyprint pack spec: '${packStr}'`,
});
(0, ava_1.default)(packSpecPrettyPrintingMacro, "a/b", {
name: "a/b",
version: undefined,
path: undefined,
});
(0, ava_1.default)(packSpecPrettyPrintingMacro, "a/b@~1.2.3", {
name: "a/b",
version: "~1.2.3",
path: undefined,
});
(0, ava_1.default)(packSpecPrettyPrintingMacro, "a/b@~1.2.3:abc/def", {
name: "a/b",
version: "~1.2.3",
path: "abc/def",
});
(0, ava_1.default)(packSpecPrettyPrintingMacro, "a/b:abc/def", {
name: "a/b",
version: undefined,
path: "abc/def",
});
(0, ava_1.default)(packSpecPrettyPrintingMacro, " a/b:abc/def ", {
name: "a/b",
version: undefined,
path: "abc/def",
});
/** /**
* Test macro for testing the packs block and the packs input * Test macro for testing the packs block and the packs input
*/ */
function parseInputAndConfigMacro(t, packsFromConfig, packsFromInput, languages, expected) { function parseInputAndConfigMacro(t, packsFromConfig, packsFromInput, languages, expected) {
t.deepEqual(configUtils.parsePacks(packsFromConfig, packsFromInput, !!(packsFromInput === null || packsFromInput === void 0 ? void 0 : packsFromInput.trim().startsWith("+")), // coerce to boolean t.deepEqual(configUtils.parsePacks(packsFromConfig, packsFromInput, languages, "/a/b"), expected);
languages, "/a/b", mockLogger), expected);
} }
parseInputAndConfigMacro.title = (providedTitle) => `Parse Packs input and config: ${providedTitle}`; parseInputAndConfigMacro.title = (providedTitle) => `Parse Packs input and config: ${providedTitle}`;
const mockLogger = { function parseInputAndConfigErrorMacro(t, packsFromConfig, packsFromInput, languages, expected) {
info: (message) => {
console.log(message);
},
};
function parseInputAndConfigErrorMacro(t, packsFromConfig, packsFromInput, languages, packsFromInputOverride, expected) {
t.throws(() => { t.throws(() => {
configUtils.parsePacks(packsFromConfig, packsFromInput, packsFromInputOverride, languages, "/a/b", mockLogger); configUtils.parsePacks(packsFromConfig, packsFromInput, languages, "/a/b");
}, { }, {
message: expected, message: expected,
}); });
} }
parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and config Error: ${providedTitle}`; parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and config Error: ${providedTitle}`;
(0, ava_1.default)("input only", parseInputAndConfigMacro, {}, " c/d ", [languages_1.Language.cpp], { (0, ava_1.default)("input only", parseInputAndConfigMacro, {}, " c/d ", [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["c/d"], [languages_1.Language.cpp]: [{ packName: "c/d", version: undefined }],
}); });
(0, ava_1.default)("input only with multiple", parseInputAndConfigMacro, {}, "a/b , c/d@1.2.3", [languages_1.Language.cpp], { (0, ava_1.default)("input only with multiple", parseInputAndConfigMacro, {}, "a/b , c/d@1.2.3", [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp]: [
{ packName: "a/b", version: undefined },
{ packName: "c/d", version: "1.2.3" },
],
}); });
(0, ava_1.default)("input only with +", parseInputAndConfigMacro, {}, " + a/b , c/d@1.2.3 ", [languages_1.Language.cpp], { (0, ava_1.default)("input only with +", parseInputAndConfigMacro, {}, " + a/b , c/d@1.2.3 ", [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp]: [
{ packName: "a/b", version: undefined },
{ packName: "c/d", version: "1.2.3" },
],
}); });
(0, ava_1.default)("config only", parseInputAndConfigMacro, ["a/b", "c/d"], " ", [languages_1.Language.cpp], { (0, ava_1.default)("config only", parseInputAndConfigMacro, ["a/b", "c/d"], " ", [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["a/b", "c/d"], [languages_1.Language.cpp]: [
{ packName: "a/b", version: undefined },
{ packName: "c/d", version: undefined },
],
}); });
(0, ava_1.default)("input overrides", parseInputAndConfigMacro, ["a/b", "c/d"], " e/f, g/h@1.2.3 ", [languages_1.Language.cpp], { (0, ava_1.default)("input overrides", parseInputAndConfigMacro, ["a/b", "c/d"], " e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["e/f", "g/h@1.2.3"], [languages_1.Language.cpp]: [
{ packName: "e/f", version: undefined },
{ packName: "g/h", version: "1.2.3" },
],
}); });
(0, ava_1.default)("input and config", parseInputAndConfigMacro, ["a/b", "c/d"], " +e/f, g/h@1.2.3 ", [languages_1.Language.cpp], { (0, ava_1.default)("input and config", parseInputAndConfigMacro, ["a/b", "c/d"], " +e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
[languages_1.Language.cpp]: ["e/f", "g/h@1.2.3", "a/b", "c/d"], [languages_1.Language.cpp]: [
{ packName: "e/f", version: undefined },
{ packName: "g/h", version: "1.2.3" },
{ packName: "a/b", version: undefined },
{ packName: "c/d", version: undefined },
],
}); });
(0, ava_1.default)("input with no language", parseInputAndConfigErrorMacro, {}, "c/d", [], false, /No languages specified/); (0, ava_1.default)("input with no language", parseInputAndConfigErrorMacro, {}, "c/d", [], /No languages specified/);
(0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], false, /multi-language analysis/); (0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
(0, ava_1.default)("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], true, /remove the '\+'/); (0, ava_1.default)("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], /remove the '\+'/);
(0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], false, /"xxx" is not a valid pack/); (0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], /"xxx" is not a valid pack/);
const mlPoweredQueriesMacro = ava_1.default.macro({ const mlPoweredQueriesMacro = ava_1.default.macro({
exec: async (t, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString) => { exec: async (t, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString) => {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
@@ -983,17 +888,17 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
multipleDeclaredLanguages: {}, multipleDeclaredLanguages: {},
}; };
}, },
async packDownload() {
return { packs: [] };
},
}); });
const { packs } = await configUtils.initConfig("javascript", queriesInput, packsInput, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled const { packs } = await configUtils.initConfig("javascript", queriesInput, packsInput, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled
? [feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled] ? [feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled]
: []), (0, logging_1.getRunnerLogger)(true)); : []), (0, logging_1.getRunnerLogger)(true));
if (expectedVersionString !== undefined) { if (expectedVersionString !== undefined) {
t.deepEqual(packs, { t.deepEqual(packs, {
[languages_1.Language.javascript]: [ [languages_1.Language.javascript]: [
`codeql/javascript-experimental-atm-queries@${expectedVersionString}`, {
packName: "codeql/javascript-experimental-atm-queries",
version: expectedVersionString,
},
], ],
}); });
} }
@@ -1006,232 +911,11 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
? `${expectedVersionString} are` ? `${expectedVersionString} are`
: "aren't"} loaded for packs: ${packsInput}, queries: ${queriesInput} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`, : "aren't"} loaded for packs: ${packsInput}, queries: ${queriesInput} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`,
}); });
// macro, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString // macro, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, versionString
// Test that ML-powered queries aren't run on v2.7.4 of the CLI.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, undefined, "security-extended", undefined); (0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, undefined, "security-extended", undefined);
// Test that ML-powered queries aren't run when the feature flag is off.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, undefined, "security-extended", undefined); (0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, undefined, "security-extended", undefined);
// Test that the ~0.1.0 version of ML-powered queries is run on v2.8.3 of the CLI.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.3", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.1.0");
// Test that ML-powered queries aren't run when the user hasn't specified that we should run the
// `security-extended` or `security-and-quality` query suite.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined); (0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
// Test that ML-powered queries are run on non-Windows platforms running `security-extended` on (0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-extended", "~0.1.0");
// versions of the CodeQL CLI prior to 2.9.0. (0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-and-quality", "~0.1.0");
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.5", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.2.0"); (0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", "0.0.1");
// Test that ML-powered queries are run on non-Windows platforms running `security-and-quality` on
// versions of the CodeQL CLI prior to 2.9.0.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.5", true, undefined, "security-and-quality", process.platform === "win32" ? undefined : "~0.2.0");
// Test that ML-powered queries are run on all platforms running `security-extended` on CodeQL CLI
// 2.9.0+.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, undefined, "security-extended", "~0.2.0");
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
// CLI 2.9.0+.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, undefined, "security-and-quality", "~0.2.0");
// Test that we don't inject an ML-powered query pack if the user has already specified one.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", "0.0.1");
// Test that ML-powered queries are run on all platforms running `security-extended` on CodeQL
// CLI 2.9.3+.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.3", true, undefined, "security-extended", "~0.3.0");
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
// CLI 2.9.3+.
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.3", true, undefined, "security-and-quality", "~0.3.0");
const calculateAugmentationMacro = ava_1.default.macro({
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedAugmentationProperties) => {
const actualAugmentationProperties = configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, languages);
t.deepEqual(actualAugmentationProperties, expectedAugmentationProperties);
},
title: (_, title) => `Calculate Augmentation: ${title}`,
});
(0, ava_1.default)(calculateAugmentationMacro, "All empty", undefined, undefined, [languages_1.Language.javascript], {
queriesInputCombines: false,
queriesInput: undefined,
packsInputCombines: false,
packsInput: undefined,
injectedMlQueries: false,
});
(0, ava_1.default)(calculateAugmentationMacro, "With queries", undefined, " a, b , c, d", [languages_1.Language.javascript], {
queriesInputCombines: false,
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
packsInputCombines: false,
packsInput: undefined,
injectedMlQueries: false,
});
(0, ava_1.default)(calculateAugmentationMacro, "With queries combining", undefined, " + a, b , c, d ", [languages_1.Language.javascript], {
queriesInputCombines: true,
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
packsInputCombines: false,
packsInput: undefined,
injectedMlQueries: false,
});
(0, ava_1.default)(calculateAugmentationMacro, "With packs", " codeql/a , codeql/b , codeql/c , codeql/d ", undefined, [languages_1.Language.javascript], {
queriesInputCombines: false,
queriesInput: undefined,
packsInputCombines: false,
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
injectedMlQueries: false,
});
(0, ava_1.default)(calculateAugmentationMacro, "With packs combining", " + codeql/a, codeql/b, codeql/c, codeql/d", undefined, [languages_1.Language.javascript], {
queriesInputCombines: false,
queriesInput: undefined,
packsInputCombines: true,
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
injectedMlQueries: false,
});
const calculateAugmentationErrorMacro = ava_1.default.macro({
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedError) => {
t.throws(() => configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, languages), { message: expectedError });
},
title: (_, title) => `Calculate Augmentation Error: ${title}`,
});
(0, ava_1.default)(calculateAugmentationErrorMacro, "Plus (+) with nothing else (queries)", undefined, " + ", [languages_1.Language.javascript], /The workflow property "queries" is invalid/);
(0, ava_1.default)(calculateAugmentationErrorMacro, "Plus (+) with nothing else (packs)", " + ", undefined, [languages_1.Language.javascript], /The workflow property "packs" is invalid/);
(0, ava_1.default)(calculateAugmentationErrorMacro, "Packs input with multiple languages", " + a/b, c/d ", undefined, [languages_1.Language.javascript, languages_1.Language.java], /Cannot specify a 'packs' input in a multi-language analysis/);
(0, ava_1.default)(calculateAugmentationErrorMacro, "Packs input with no languages", " + a/b, c/d ", undefined, [], /No languages specified/);
(0, ava_1.default)(calculateAugmentationErrorMacro, "Invalid packs", " a-pack-without-a-scope ", undefined, [languages_1.Language.javascript], /"a-pack-without-a-scope" is not a valid pack/);
(0, ava_1.default)("downloadPacks-no-registries", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const packDownloadStub = sinon.stub();
packDownloadStub.callsFake((packs) => ({
packs,
}));
const codeQL = (0, codeql_1.setCodeQL)({
packDownload: packDownloadStub,
});
const logger = (0, logging_1.getRunnerLogger)(true);
// packs are supplied for go, java, and python
// analyzed languages are java, javascript, and python
await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {
java: ["a", "b"],
go: ["c", "d"],
python: ["e", "f"],
}, undefined, // registries
sampleApiDetails, tmpDir, logger);
// Expecting packs to be downloaded once for java and once for python
t.deepEqual(packDownloadStub.callCount, 2);
// no config file was created, so pass `undefined` as the config file path
t.deepEqual(packDownloadStub.firstCall.args, [["a", "b"], undefined]);
t.deepEqual(packDownloadStub.secondCall.args, [["e", "f"], undefined]);
});
});
(0, ava_1.default)("downloadPacks-with-registries", async (t) => {
// same thing, but this time include a registries block and
// associated env vars
return await util.withTmpDir(async (tmpDir) => {
process.env.GITHUB_TOKEN = "not-a-token";
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
const logger = (0, logging_1.getRunnerLogger)(true);
const registries = [
{
// no slash
url: "http://ghcr.io",
packages: ["codeql/*", "dsp-testing/*"],
token: "not-a-token",
},
{
// with slash
url: "https://containers.GHEHOSTNAME1/v2/",
packages: "semmle/*",
token: "still-not-a-token",
},
];
// append a slash to the first url
const expectedRegistries = registries.map((r, i) => ({
packages: r.packages,
url: i === 0 ? `${r.url}/` : r.url,
}));
const expectedConfigFile = path.join(tmpDir, "qlconfig.yml");
const packDownloadStub = sinon.stub();
packDownloadStub.callsFake((packs, configFile) => {
t.deepEqual(configFile, expectedConfigFile);
// verify the env vars were set correctly
t.deepEqual(process.env.GITHUB_TOKEN, sampleApiDetails.auth);
t.deepEqual(process.env.CODEQL_REGISTRIES_AUTH, "http://ghcr.io=not-a-token,https://containers.GHEHOSTNAME1/v2/=still-not-a-token");
// verify the config file contents were set correctly
const config = yaml.load(fs.readFileSync(configFile, "utf8"));
t.deepEqual(config.registries, expectedRegistries);
return {
packs,
};
});
const codeQL = (0, codeql_1.setCodeQL)({
packDownload: packDownloadStub,
getVersion: () => Promise.resolve("2.10.5"),
});
// packs are supplied for go, java, and python
// analyzed languages are java, javascript, and python
await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {
java: ["a", "b"],
go: ["c", "d"],
python: ["e", "f"],
}, registries, sampleApiDetails, tmpDir, logger);
// Same packs are downloaded as in previous test
t.deepEqual(packDownloadStub.callCount, 2);
t.deepEqual(packDownloadStub.firstCall.args, [
["a", "b"],
expectedConfigFile,
]);
t.deepEqual(packDownloadStub.secondCall.args, [
["e", "f"],
expectedConfigFile,
]);
// Verify that the env vars were unset.
t.deepEqual(process.env.GITHUB_TOKEN, "not-a-token");
t.deepEqual(process.env.CODEQL_REGISTRIES_AUTH, "not-a-registries-auth");
});
});
(0, ava_1.default)("downloadPacks-with-registries fails on 2.10.3", async (t) => {
// same thing, but this time include a registries block and
// associated env vars
return await util.withTmpDir(async (tmpDir) => {
process.env.GITHUB_TOKEN = "not-a-token";
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
const logger = (0, logging_1.getRunnerLogger)(true);
const registries = [
{
url: "http://ghcr.io",
packages: ["codeql/*", "dsp-testing/*"],
token: "not-a-token",
},
{
url: "https://containers.GHEHOSTNAME1/v2/",
packages: "semmle/*",
token: "still-not-a-token",
},
];
const codeQL = (0, codeql_1.setCodeQL)({
getVersion: () => Promise.resolve("2.10.3"),
});
await t.throwsAsync(async () => {
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, registries, sampleApiDetails, tmpDir, logger);
}, { instanceOf: Error }, "'registries' input is not supported on CodeQL versions less than 2.10.4.");
});
});
(0, ava_1.default)("downloadPacks-with-registries fails with invalid registries block", async (t) => {
// same thing, but this time include a registries block and
// associated env vars
return await util.withTmpDir(async (tmpDir) => {
process.env.GITHUB_TOKEN = "not-a-token";
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
const logger = (0, logging_1.getRunnerLogger)(true);
const registries = [
{
// missing url property
packages: ["codeql/*", "dsp-testing/*"],
token: "not-a-token",
},
{
url: "https://containers.GHEHOSTNAME1/v2/",
packages: "semmle/*",
token: "still-not-a-token",
},
];
const codeQL = (0, codeql_1.setCodeQL)({
getVersion: () => Promise.resolve("2.10.4"),
});
await t.throwsAsync(async () => {
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, registries, sampleApiDetails, tmpDir, logger);
}, { instanceOf: Error }, "Invalid 'registries' input. Must be an array of objects with 'url' and 'packages' properties.");
});
});
//# sourceMappingURL=config-utils.test.js.map //# sourceMappingURL=config-utils.test.js.map

Some files were not shown because too many files have changed in this diff Show More