Compare commits

..

3 Commits

Author SHA1 Message Date
Esben Sparre Andreasen
91e3c857ab add test 2021-10-28 14:43:00 +02:00
Esben Sparre Andreasen
3847055a30 fix TypeScript NPE type warning 2021-10-25 21:22:43 +02:00
Esben Sparre Andreasen
2a8725d4a8 let the codeql env decide the supported languages 2021-10-25 21:22:43 +02:00
2240 changed files with 157011 additions and 79662 deletions

View File

@@ -10,8 +10,7 @@
"plugin:@typescript-eslint/recommended",
"plugin:@typescript-eslint/recommended-requiring-type-checking",
"plugin:github/recommended",
"plugin:github/typescript",
"plugin:import/typescript"
"plugin:github/typescript"
],
"rules": {
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],

View File

@@ -28,6 +28,7 @@ runs:
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz"
elif [[ ${{ inputs.version }} == *"stable"* ]]; then
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
echo "Hello $VERSION"
echo "::set-output name=tools-url::https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz"
elif [[ ${{ inputs.version }} == "latest" ]]; then
echo "::set-output name=tools-url::latest"

View File

@@ -1,60 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: "PR Check - Analyze: 'ref' and 'sha' from inputs"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- v1
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
analyze-ref-input:
strategy:
matrix:
version:
- stable-20210308
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os: [ubuntu-latest, macos-latest, windows-2019]
name: "Analyze: 'ref' and 'sha' from inputs"
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
env:
TEST_MODE: true
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -1,77 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Debug artifact upload
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- v1
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
debug-artifacts:
strategy:
matrix:
version:
- stable-20210308
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os: [ubuntu-latest, macos-latest]
name: Debug artifact upload
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
debug: true
debug-artifact-name: my-debug-artifacts
debug-database-name: my-db
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
id: analysis
- uses: actions/download-artifact@v2
with:
name: my-debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
- shell: bash
run: |
LANGUAGES="cpp csharp go java javascript python"
for language in $LANGUAGES; do
echo "Checking $language"
if [[ ! -f "$language.sarif" ]] ; then
echo "Missing a SARIF file for $language"
exit 1
fi
if [[ ! -f "my-db-$language.zip" ]] ; then
echo "Missing a database bundle for $language"
exit 1
fi
if [[ ! -d "$language/log" ]] ; then
echo "Missing logs for $language"
exit 1
fi
done
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -1,63 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Extractor ram and threads options test
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- v1
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
extractor-ram-threads:
strategy:
matrix:
version: [latest]
os: [ubuntu-latest]
name: Extractor ram and threads options test
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
languages: java
ram: 230
threads: 1
- name: Assert Results
shell: bash
run: |
if [ "${CODEQL_RAM}" != "230" ]; then
echo "CODEQL_RAM is '${CODEQL_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_RAM}" != "230" ]; then
echo "CODEQL_EXTRACTOR_JAVA_RAM is '${CODEQL_EXTRACTOR_JAVA_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_THREADS}" != "1" ]; then
echo "CODEQL_THREADS is '${CODEQL_THREADS}' instead of 1"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_THREADS}" != "1" ]; then
echo "CODEQL_EXTRACTOR_JAVA_THREADS is '${CODEQL_EXTRACTOR_JAVA_THREADS}' instead of 1"
exit 1
fi
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -24,13 +24,16 @@ jobs:
strategy:
matrix:
version:
- stable-20210308
- stable-20201028
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os: [ubuntu-latest, macos-latest, windows-2019]
os:
- ubuntu-latest
- macos-latest
- windows-latest
name: 'Go: Custom queries'
runs-on: ${{ matrix.os }}
steps:

View File

@@ -24,7 +24,7 @@ jobs:
strategy:
matrix:
version:
- stable-20210308
- stable-20201028
- stable-20210319
- stable-20210809
- cached

View File

@@ -24,13 +24,16 @@ jobs:
strategy:
matrix:
version:
- stable-20210308
- stable-20201028
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os: [ubuntu-latest, macos-latest, windows-2019]
os:
- ubuntu-latest
- macos-latest
- windows-latest
name: 'Go: Custom tracing'
runs-on: ${{ matrix.os }}
steps:

View File

@@ -24,7 +24,7 @@ jobs:
strategy:
matrix:
version:
- stable-20210308
- stable-20201028
- stable-20210319
- stable-20210809
- cached

View File

@@ -24,13 +24,16 @@ jobs:
strategy:
matrix:
version:
- stable-20210308
- stable-20201028
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os: [ubuntu-latest, macos-latest, windows-2019]
os:
- ubuntu-latest
- macos-latest
- windows-latest
name: Remote config file
runs-on: ${{ matrix.os }}
steps:
@@ -45,7 +48,7 @@ jobs:
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
- name: Build code
shell: bash

View File

@@ -24,7 +24,7 @@ jobs:
strategy:
matrix:
version:
- stable-20210308
- stable-20201028
- stable-20210319
- stable-20210809
- cached

View File

@@ -1,88 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: PR Check - Test unsetting environment variables
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- v1
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
unset-environment:
strategy:
matrix:
version:
- stable-20210308
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os: [ubuntu-latest]
name: Test unsetting environment variables
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
db-location: ${{ runner.temp }}/customDbLocation
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
shell: bash
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
- uses: ./../action/analyze
id: analysis
env:
TEST_MODE: true
- shell: bash
run: |
CPP_DB=${{ fromJson(steps.analysis.outputs.db-locations).cpp }}
if [[ ! -d $CPP_DB ]] || [[ ! $CPP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for CPP, or created it in the wrong location."
exit 1
fi
CSHARP_DB=${{ fromJson(steps.analysis.outputs.db-locations).csharp }}
if [[ ! -d $CSHARP_DB ]] || [[ ! $CSHARP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for C Sharp, or created it in the wrong location."
exit 1
fi
GO_DB=${{ fromJson(steps.analysis.outputs.db-locations).go }}
if [[ ! -d $GO_DB ]] || [[ ! $GO_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Go, or created it in the wrong location."
exit 1
fi
JAVA_DB=${{ fromJson(steps.analysis.outputs.db-locations).java }}
if [[ ! -d $JAVA_DB ]] || [[ ! $JAVA_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Java, or created it in the wrong location."
exit 1
fi
JAVASCRIPT_DB=${{ fromJson(steps.analysis.outputs.db-locations).javascript }}
if [[ ! -d $JAVASCRIPT_DB ]] || [[ ! $JAVASCRIPT_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Javascript, or created it in the wrong location."
exit 1
fi
PYTHON_DB=${{ fromJson(steps.analysis.outputs.db-locations).python }}
if [[ ! -d $PYTHON_DB ]] || [[ ! $PYTHON_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
echo "Did not create a database for Python, or created it in the wrong location."
exit 1
fi
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -1,67 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pip install ruamel.yaml && python3 sync.py
# to regenerate this file.
name: "PR Check - Upload-sarif: 'ref' and 'sha' from inputs"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- v1
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
workflow_dispatch: {}
jobs:
upload-ref-sha-input:
strategy:
matrix:
version:
- stable-20210308
- stable-20210319
- stable-20210809
- cached
- latest
- nightly-latest
os: [ubuntu-latest, macos-latest, windows-2019]
name: "Upload-sarif: 'ref' and 'sha' from inputs"
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v2
- name: Prepare test
id: prepare-test
uses: ./.github/prepare-test
with:
version: ${{ matrix.version }}
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
upload: false
env:
TEST_MODE: true
- uses: ./../action/upload-sarif
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
env:
TEST_MODE: true
env:
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true

View File

@@ -182,9 +182,7 @@ jobs:
runner-analyze-csharp-windows:
name: Runner windows C# analyze
needs: [check-js, check-node-modules]
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
# `windows-latest`.
runs-on: windows-2019
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
@@ -303,9 +301,7 @@ jobs:
runner-analyze-csharp-autobuild-windows:
name: Runner windows autobuild C# analyze
needs: [check-js, check-node-modules]
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
# `windows-latest`.
runs-on: windows-2019
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
@@ -397,42 +393,3 @@ jobs:
# Deliberately don't use TEST_MODE here. This is specifically testing
# the compatibility with the API.
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
runner-extractor-ram-threads-options:
name: Runner ubuntu extractor RAM and threads options
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-linux init --ram=230 --threads=1 --repository $GITHUB_REPOSITORY --languages java --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Assert Results
shell: bash
run: |
. ./codeql-runner/codeql-env.sh
if [ "${CODEQL_RAM}" != "230" ]; then
echo "CODEQL_RAM is '${CODEQL_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_RAM}" != "230" ]; then
echo "CODEQL_EXTRACTOR_JAVA_RAM is '${CODEQL_EXTRACTOR_JAVA_RAM}' instead of 230"
exit 1
fi
if [ "${CODEQL_THREADS}" != "1" ]; then
echo "CODEQL_THREADS is '${CODEQL_THREADS}' instead of 1"
exit 1
fi
if [ "${CODEQL_EXTRACTOR_JAVA_THREADS}" != "1" ]; then
echo "CODEQL_EXTRACTOR_JAVA_THREADS is '${CODEQL_EXTRACTOR_JAVA_THREADS}' instead of 1"
exit 1
fi

View File

@@ -125,7 +125,7 @@ jobs:
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python_version }}
python-version: ${{ matrix.python-version }}
- name: Initialize CodeQL
uses: ./init

View File

@@ -1,5 +1,7 @@
name: Update release branch
on:
schedule:
- cron: 0 9 * * 1
repository_dispatch:
# Example of how to trigger this:
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'

View File

@@ -40,6 +40,5 @@ jobs:
body: ""
author: GitHub <noreply@github.com>
branch: update-supported-enterprise-server-versions
draft: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

2
.gitignore vendored
View File

@@ -1,4 +1,2 @@
/runner/dist/
/runner/node_modules/
# Ignore for example failing-tests.json from AVA
node_modules/.cache

View File

@@ -4,79 +4,6 @@
No user facing changes.
## 1.1.3 - 23 Feb 2022
- Fix bug where the CLR traces can continue tracing even after tracing should be stopped. [#938](https://github.com/github/codeql-action/pull/938)
## 1.1.2 - 17 Feb 2022
- Due to potential issues for GHES 3.13.3 customers who are using recent versions of the CodeQL Action via GHES Connect, the CodeQL Action now uses Node.js v12 rather than Node.js v16. [#937](https://github.com/github/codeql-action/pull/937)
## 1.1.1 - 17 Feb 2022
- The CodeQL CLI versions up to and including version 2.4.4 are not compatible with the CodeQL Action 1.1.1 and later. The Action will emit an error if it detects that it is being used by an incompatible version of the CLI. [#931](https://github.com/github/codeql-action/pull/931)
- Update default CodeQL bundle version to 2.8.1. [#925](https://github.com/github/codeql-action/pull/925)
## 1.1.0 - 11 Feb 2022
- The CodeQL Action now uses Node.js v16. [#909](https://github.com/github/codeql-action/pull/909)
- Beware that the CodeQL build tracer in this release (and in all earlier releases) is incompatible with Windows 11 and Windows Server 2022. This incompatibility affects database extraction for compiled languages: cpp, csharp, go, and java. As a result, analyzing these languages with the `windows-latest` or `windows-2022` Actions virtual environments is currently unsupported. If you use any of these languages, please use the `windows-2019` Actions virtual environment or otherwise avoid these specific Windows versions until a new release fixes this incompatibility.
## 1.0.32 - 07 Feb 2022
- Add `sarif-id` as an output for the `upload-sarif` and `analyze` actions. [#889](https://github.com/github/codeql-action/pull/889)
- Add `ref` and `sha` inputs to the `analyze` action, which override the defaults provided by the GitHub Action context. [#889](https://github.com/github/codeql-action/pull/889)
- Update default CodeQL bundle version to 2.8.0. [#911](https://github.com/github/codeql-action/pull/911)
## 1.0.31 - 31 Jan 2022
- Remove `experimental` message when using custom CodeQL packages. [#888](https://github.com/github/codeql-action/pull/888)
- Add a better warning message stating that experimental features will be disabled if the workflow has been triggered by a pull request from a fork or the `security-events: write` permission is not present. [#882](https://github.com/github/codeql-action/pull/882)
## 1.0.30 - 24 Jan 2022
- Display a better error message when encountering a workflow that runs the `codeql-action/init` action multiple times. [#876](https://github.com/github/codeql-action/pull/876)
- Update default CodeQL bundle version to 2.7.6. [#877](https://github.com/github/codeql-action/pull/877)
## 1.0.29 - 21 Jan 2022
- The feature to wait for SARIF processing to complete after upload has been disabled by default due to a bug in its interaction with pull requests from forks.
## 1.0.28 - 18 Jan 2022
- Update default CodeQL bundle version to 2.7.5. [#866](https://github.com/github/codeql-action/pull/866)
- Fix a bug where SARIF files were failing upload due to an invalid test for unique categories. [#872](https://github.com/github/codeql-action/pull/872)
## 1.0.27 - 11 Jan 2022
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`. [#855](https://github.com/github/codeql-action/pull/855)
## 1.0.26 - 10 Dec 2021
- Update default CodeQL bundle version to 2.7.3. [#842](https://github.com/github/codeql-action/pull/842)
## 1.0.25 - 06 Dec 2021
No user facing changes.
## 1.0.24 - 23 Nov 2021
- Update default CodeQL bundle version to 2.7.2. [#827](https://github.com/github/codeql-action/pull/827)
## 1.0.23 - 16 Nov 2021
- The `upload-sarif` action now allows multiple uploads in a single job, as long as they have different categories. [#801](https://github.com/github/codeql-action/pull/801)
- Update default CodeQL bundle version to 2.7.1. [#816](https://github.com/github/codeql-action/pull/816)
## 1.0.22 - 04 Nov 2021
- The `init` step of the Action now supports `ram` and `threads` inputs to limit resource use of CodeQL extractors. These inputs also serve as defaults to the subsequent `analyze` step, which finalizes the database and executes queries. [#738](https://github.com/github/codeql-action/pull/738)
- When used with CodeQL 2.7.1 or above, the Action now includes custom query help in the analysis results uploaded to GitHub code scanning, if available. To add help text for a custom query, create a Markdown file next to the `.ql` file containing the query, using the same base name but the file extension `.md`. [#804](https://github.com/github/codeql-action/pull/804)
## 1.0.21 - 28 Oct 2021
- Update default CodeQL bundle version to 2.7.0. [#795](https://github.com/github/codeql-action/pull/795)
## 1.0.20 - 25 Oct 2021
No user facing changes.

View File

@@ -39,8 +39,7 @@ on:
jobs:
CodeQL-Build:
# If you're only analyzing JavaScript or Python, CodeQL runs on ubuntu-latest, windows-latest, and macos-latest.
# If you're analyzing C/C++, C#, Go, or Java, CodeQL runs on ubuntu-latest, windows-2019, and macos-latest.
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
runs-on: ubuntu-latest
permissions:

View File

@@ -1,6 +1,6 @@
name: "CodeQL: Finish"
description: "Finalize CodeQL database"
author: "GitHub"
name: 'CodeQL: Finish'
description: 'Finalize CodeQL database'
author: 'GitHub'
inputs:
check_name:
description: The name of the check run to add text to.
@@ -8,9 +8,9 @@ inputs:
output:
description: The path of the directory in which to save the SARIF results
required: false
default: "../results"
default: '../results'
upload:
description: Upload the SARIF file to Code Scanning
description: Upload the SARIF file
required: false
default: "true"
cleanup-level:
@@ -18,12 +18,7 @@ inputs:
required: false
default: "brutal"
ram:
description: >-
The amount of memory in MB that can be used by CodeQL for database finalization and query execution.
By default, this action will use the same amount of memory as previously set in the "init" action.
If the "init" action also does not have an explicit "ram" input, this action will use most of the
memory available in the system (which for GitHub-hosted runners is 6GB for Linux, 5.5GB for Windows,
and 13GB for macOS).
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
required: false
add-snippets:
description: Specify whether or not to add code snippets to the output sarif file.
@@ -34,23 +29,12 @@ inputs:
required: false
default: "false"
threads:
description: >-
The number of threads that can be used by CodeQL for database finalization and query execution.
By default, this action will use the same number of threads as previously set in the "init" action.
If the "init" action also does not have an explicit "threads" input, this action will use all the
hardware threads available in the system (which for GitHub-hosted runners is 2 for Linux and Windows
and 3 for macOS).
description: The number of threads to be used by CodeQL.
required: false
checkout_path:
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
required: false
default: ${{ github.workspace }}
ref:
description: "The ref where results will be uploaded. If not provided, the Action will use the GITHUB_REF environment variable. If provided, the sha input must be provided as well. This input is not available in pull requests from forks."
required: false
sha:
description: "The sha of the HEAD of the ref where results will be uploaded. If not provided, the Action will use the GITHUB_SHA environment variable. If provided, the ref input must be provided as well. This input is not available in pull requests from forks."
required: false
category:
description: String used by Code Scanning for matching the analyses
required: false
@@ -58,10 +42,6 @@ inputs:
description: Whether to upload the resulting CodeQL database
required: false
default: "true"
wait-for-processing:
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
required: true
default: "false"
token:
default: ${{ github.token }}
matrix:
@@ -69,8 +49,6 @@ inputs:
outputs:
db-locations:
description: A map from language to absolute path for each database created by CodeQL.
sarif-id:
description: The ID of the uploaded SARIF file.
runs:
using: "node12"
main: "../lib/analyze-action.js"
using: 'node12'
main: '../lib/analyze-action.js'

View File

@@ -41,34 +41,6 @@ inputs:
source-root:
description: Path of the root source code directory, relative to $GITHUB_WORKSPACE.
required: false
ram:
description: >-
The amount of memory in MB that can be used by CodeQL extractors.
By default, CodeQL extractors will use most of the memory available in the system
(which for GitHub-hosted runners is 6GB for Linux, 5.5GB for Windows, and 13GB for macOS).
This input also sets the amount of memory that can later be used by the "analyze" action.
required: false
threads:
description: >-
The number of threads that can be used by CodeQL extractors.
By default, CodeQL extractors will use all the hardware threads available in the system
(which for GitHub-hosted runners is 2 for Linux and Windows and 3 for macOS).
This input also sets the number of threads that can later be used by the "analyze" action.
required: false
debug:
description: Enable debugging mode. This will result in more output being produced which may be useful when debugging certain issues.
required: false
default: 'false'
debug-artifact-name:
description: >-
The name of the artifact to store debugging information in.
This is only used when debug mode is enabled.
required: false
debug-database-name:
description: >-
The name of the database uploaded to the debugging artifact.
This is only used when debug mode is enabled.
required: false
outputs:
codeql-path:
description: The path of the CodeQL binary used for analysis

105
lib/actions-util.js generated
View File

@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const core = __importStar(require("@actions/core"));
@@ -51,10 +51,10 @@ exports.getRequiredInput = getRequiredInput;
* This allows us to get stronger type checking of required/optional inputs
* and make behaviour more consistent between actions and the runner.
*/
const getOptionalInput = function (name) {
function getOptionalInput(name) {
const value = core.getInput(name);
return value.length > 0 ? value : undefined;
};
}
exports.getOptionalInput = getOptionalInput;
function getTemporaryDirectory() {
const value = process.env["CODEQL_ACTION_TEMP"];
@@ -97,61 +97,11 @@ const getCommitOid = async function (ref = "HEAD") {
return commitOid.trim();
}
catch (e) {
core.info(`Failed to call git to get current commit. Continuing with data from environment or input: ${e}`);
core.info(e.stack || "NO STACK");
return (0, exports.getOptionalInput)("sha") || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
return (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
}
};
exports.getCommitOid = getCommitOid;
/**
* If the action was triggered by a pull request, determine the commit sha of the merge base.
* Returns undefined if run by other triggers or the merge base cannot be determined.
*/
const determineMergeBaseCommitOid = async function () {
if (process.env.GITHUB_EVENT_NAME !== "pull_request") {
return undefined;
}
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
try {
let commitOid = "";
let baseOid = "";
let headOid = "";
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), ["show", "-s", "--format=raw", mergeSha], {
silent: true,
listeners: {
stdline: (data) => {
if (data.startsWith("commit ") && commitOid === "") {
commitOid = data.substring(7);
}
else if (data.startsWith("parent ")) {
if (baseOid === "") {
baseOid = data.substring(7);
}
else if (headOid === "") {
headOid = data.substring(7);
}
}
},
stderr: (data) => {
process.stderr.write(data);
},
},
}).exec();
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
if (commitOid === mergeSha &&
headOid.length === 40 &&
baseOid.length === 40) {
return baseOid;
}
return undefined;
}
catch (e) {
core.info(`Failed to call git to determine merge base. Continuing with data from environment: ${e}`);
core.info(e.stack || "NO STACK");
return undefined;
}
};
exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
function isObject(o) {
return o !== null && typeof o === "object";
}
@@ -422,21 +372,8 @@ exports.computeAutomationID = computeAutomationID;
async function getRef() {
// Will be in the form "refs/heads/master" on a push event
// or in the form "refs/pull/N/merge" on a pull_request event
const refInput = (0, exports.getOptionalInput)("ref");
const shaInput = (0, exports.getOptionalInput)("sha");
const hasRefInput = !!refInput;
const hasShaInput = !!shaInput;
// If one of 'ref' or 'sha' are provided, both are required
if ((hasRefInput || hasShaInput) && !(hasRefInput && hasShaInput)) {
throw new Error("Both 'ref' and 'sha' are required if one of them is provided.");
}
const ref = refInput || (0, util_1.getRequiredEnvParam)("GITHUB_REF");
const sha = shaInput || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
// If the ref is a user-provided input, we have to skip logic
// and assume that it is really where they want to upload the results.
if (refInput) {
return refInput;
}
const ref = (0, util_1.getRequiredEnvParam)("GITHUB_REF");
const sha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
// For pull request refs we want to detect whether the workflow
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
// than the 'merge' ref. If so, we want to convert the ref that
@@ -464,15 +401,6 @@ async function getRef() {
}
}
exports.getRef = getRef;
function getActionsStatus(error, otherFailureCause) {
if (error || otherFailureCause) {
return error instanceof util_1.UserError ? "user-error" : "failure";
}
else {
return "success";
}
}
exports.getActionsStatus = getActionsStatus;
/**
* Compose a StatusReport.
*
@@ -483,7 +411,7 @@ exports.getActionsStatus = getActionsStatus;
* @param exception Exception (only supply if status is 'failure')
*/
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
const commitOid = (0, exports.getOptionalInput)("sha") || process.env["GITHUB_SHA"] || "";
const commitOid = process.env["GITHUB_SHA"] || "";
const ref = await getRef();
const workflowRunIDStr = process.env["GITHUB_RUN_ID"];
let workflowRunID = -1;
@@ -524,10 +452,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
if (exception) {
statusReport.exception = exception;
}
if (status === "success" ||
status === "failure" ||
status === "aborted" ||
status === "user-error") {
if (status === "success" || status === "failure" || status === "aborted") {
statusReport.completed_at = new Date().toISOString();
}
const matrix = getRequiredInput("matrix");
@@ -538,7 +463,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
}
exports.createStatusReportBase = createStatusReportBase;
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
const GENERIC_404_MSG = "Not authorized to use the CodeQL code scanning feature on this repo.";
const GENERIC_404_MSG = "Not authorized to used the CodeQL code scanning feature on this repo.";
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
/**
@@ -553,12 +478,6 @@ const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code sc
async function sendStatusReport(statusReport) {
const statusReportJSON = JSON.stringify(statusReport);
core.debug(`Sending status report: ${statusReportJSON}`);
// If in test mode we don't want to upload the results
const testMode = process.env["TEST_MODE"] === "true" || false;
if (testMode) {
core.debug("In test mode. Status reports are not uploaded.");
return true;
}
const nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY");
const [owner, repo] = nwo.split("/");
const client = api.getActionsApiClient();
@@ -655,8 +574,4 @@ async function isAnalyzingDefaultBranch() {
return currentRef === defaultBranch;
}
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
function sanitizeArifactName(name) {
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
}
exports.sanitizeArifactName = sanitizeArifactName;
//# sourceMappingURL=actions-util.js.map

File diff suppressed because one or more lines are too long

View File

@@ -71,43 +71,6 @@ function errorCodes(actual, expected) {
t.deepEqual(actualRef, "refs/pull/1/head");
callback.restore();
});
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
// These values are be ignored
process.env["GITHUB_REF"] = "refs/pull/1/merge";
process.env["GITHUB_SHA"] = "a".repeat(40);
const callback = sinon.stub(actionsutil, "getCommitOid");
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
callback.withArgs("HEAD").resolves("b".repeat(40));
const actualRef = await actionsutil.getRef();
t.deepEqual(actualRef, "refs/pull/2/merge");
callback.restore();
getAdditionalInputStub.restore();
});
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
await t.throwsAsync(async () => {
await actionsutil.getRef();
}, {
instanceOf: Error,
message: "Both 'ref' and 'sha' are required if one of them is provided.",
});
getAdditionalInputStub.restore();
});
(0, ava_1.default)("getRef() throws an error if only `sha` is provided as an input", async (t) => {
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
await t.throwsAsync(async () => {
await actionsutil.getRef();
}, {
instanceOf: Error,
message: "Both 'ref' and 'sha' are required if one of them is provided.",
});
getAdditionalInputStub.restore();
});
(0, ava_1.default)("computeAutomationID()", async (t) => {
let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
@@ -477,10 +440,4 @@ on: ["push"]
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
});
});
(0, ava_1.default)("sanitizeArifactName", (t) => {
t.deepEqual(actionsutil.sanitizeArifactName("hello-world_"), "hello-world_");
t.deepEqual(actionsutil.sanitizeArifactName("hello`world`"), "helloworld");
t.deepEqual(actionsutil.sanitizeArifactName("hello===123"), "hello123");
t.deepEqual(actionsutil.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
});
//# sourceMappingURL=actions-util.test.js.map

File diff suppressed because one or more lines are too long

4
lib/analysis-paths.js generated
View File

@@ -37,11 +37,11 @@ function buildIncludeExcludeEnvVar(paths) {
return paths.join("\n");
}
function printPathFiltersWarning(config, logger) {
// Index include/exclude/filters only work in javascript/python/ruby.
// Index include/exclude/filters only work in javascript and python.
// If any other languages are detected/configured then show a warning.
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
!config.languages.every(isInterpretedLanguage)) {
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript, Python, and Ruby');
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript and Python');
}
}
exports.printPathFiltersWarning = printPathFiltersWarning;

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,oEAAoE;IACpE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,4FAA4F,CAC7F,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}

View File

@@ -42,9 +42,6 @@ const util = __importStar(require("./util"));
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
@@ -66,9 +63,6 @@ const util = __importStar(require("./util"));
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
@@ -91,9 +85,6 @@ const util = __importStar(require("./util"));
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
dbLocation: path.resolve(tempDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

View File

@@ -1,78 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const sinon = __importStar(require("sinon"));
const actionsUtil = __importStar(require("./actions-util"));
const analyze = __importStar(require("./analyze"));
const configUtils = __importStar(require("./config-utils"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
(0, testing_utils_1.setupTests)(ava_1.default);
// This test needs to be in its own file so that ava would run it in its own
// nodejs process. The code being tested is in analyze-action.ts, which runs
// immediately on load. So the file needs to be loaded during part of the test,
// and that can happen only once per nodejs process. If multiple such tests are
// in the same test file, ava would run them in the same nodejs process, and all
// but the first test would fail.
(0, ava_1.default)("analyze action with RAM & threads from environment variables", async (t) => {
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
process.env["GITHUB_REPOSITORY"] = "fake/repository";
sinon
.stub(actionsUtil, "createStatusReportBase")
.resolves({});
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
sinon.stub(configUtils, "getConfig").resolves({
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
languages: [],
packs: [],
});
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("token").returns("fake-token");
requiredInputStub.withArgs("upload-database").returns("false");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("cleanup-level").returns("none");
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
// When there are no action inputs for RAM and threads, the action uses
// environment variables (passed down from the init action) to set RAM and
// threads usage.
process.env["CODEQL_THREADS"] = "-1";
process.env["CODEQL_RAM"] = "4992";
const runFinalizeStub = sinon.stub(analyze, "runFinalize");
const runQueriesStub = sinon.stub(analyze, "runQueries");
const analyzeAction = require("./analyze-action");
// When analyze-action.ts loads, it runs an async function from the top
// level but does not wait for it to finish. To ensure that calls to
// runFinalize and runQueries are correctly captured by spies, we explicitly
// wait for the action promise to complete before starting verification.
await analyzeAction.runPromise;
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=4992");
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=4992");
});
});
//# sourceMappingURL=analyze-action-env.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

View File

@@ -1,78 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const sinon = __importStar(require("sinon"));
const actionsUtil = __importStar(require("./actions-util"));
const analyze = __importStar(require("./analyze"));
const configUtils = __importStar(require("./config-utils"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
(0, testing_utils_1.setupTests)(ava_1.default);
// This test needs to be in its own file so that ava would run it in its own
// nodejs process. The code being tested is in analyze-action.ts, which runs
// immediately on load. So the file needs to be loaded during part of the test,
// and that can happen only once per nodejs process. If multiple such tests are
// in the same test file, ava would run them in the same nodejs process, and all
// but the first test would fail.
(0, ava_1.default)("analyze action with RAM & threads from action inputs", async (t) => {
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
process.env["GITHUB_REPOSITORY"] = "fake/repository";
sinon
.stub(actionsUtil, "createStatusReportBase")
.resolves({});
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
sinon.stub(configUtils, "getConfig").resolves({
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
languages: [],
packs: [],
});
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("token").returns("fake-token");
requiredInputStub.withArgs("upload-database").returns("false");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("cleanup-level").returns("none");
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
process.env["CODEQL_THREADS"] = "1";
process.env["CODEQL_RAM"] = "4992";
// Action inputs have precedence over environment variables.
optionalInputStub.withArgs("threads").returns("-1");
optionalInputStub.withArgs("ram").returns("3012");
const runFinalizeStub = sinon.stub(analyze, "runFinalize");
const runQueriesStub = sinon.stub(analyze, "runQueries");
const analyzeAction = require("./analyze-action");
// When analyze-action.ts loads, it runs an async function from the top
// level but does not wait for it to finish. To ensure that calls to
// runFinalize and runQueries are correctly captured by spies, we explicitly
// wait for the action promise to complete before starting verification.
await analyzeAction.runPromise;
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=3012");
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=3012");
});
});
//# sourceMappingURL=analyze-action-input.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

105
lib/analyze-action.js generated
View File

@@ -19,10 +19,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.runPromise = exports.sendStatusReport = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const artifact = __importStar(require("@actions/artifact"));
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const analyze_1 = require("./analyze");
@@ -33,27 +31,22 @@ const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
const util_1 = require("./util");
// eslint-disable-next-line import/no-commonjs
const pkg = require("../package.json");
async function sendStatusReport(startedAt, config, stats, error) {
const status = actionsUtil.getActionsStatus(error, stats === null || stats === void 0 ? void 0 : stats.analyze_failure_language);
async function sendStatusReport(startedAt, stats, error) {
const status = (stats === null || stats === void 0 ? void 0 : stats.analyze_failure_language) !== undefined || error !== undefined
? "failure"
: "success";
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, error === null || error === void 0 ? void 0 : error.message, error === null || error === void 0 ? void 0 : error.stack);
const statusReport = {
...statusReportBase,
...(config
? {
ml_powered_javascript_queries: util.getMlPoweredJsQueriesStatus(config),
}
: {}),
...(stats || {}),
};
await actionsUtil.sendStatusReport(statusReport);
}
exports.sendStatusReport = sendStatusReport;
async function run() {
const startedAt = new Date();
let uploadResult = undefined;
let uploadStats = undefined;
let runStats = undefined;
let config = undefined;
util.initializeEnvironment(util.Mode.actions, pkg.version);
@@ -72,33 +65,11 @@ async function run() {
url: util.getRequiredEnvParam("GITHUB_SERVER_URL"),
};
const outputDir = actionsUtil.getRequiredInput("output");
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]);
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger);
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram"));
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
if (config.debugMode) {
// Upload the SARIF files as an Actions artifact for debugging
await uploadDebugArtifacts(config.languages.map((lang) => path.resolve(outputDir, `${lang}.sarif`)), outputDir, config.debugArtifactName);
}
}
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
if (config.debugMode) {
// Upload the logs as an Actions artifact for debugging
const toUpload = [];
for (const language of config.languages) {
toUpload.push(...listFolder(path.resolve(util.getCodeQLDatabasePath(config, language), "log")));
}
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
// Multilanguage tracing: there are additional logs in the root of the cluster
toUpload.push(...listFolder(path.resolve(config.dbLocation, "log")));
}
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
await uploadDebugArtifacts([path.resolve(config.tempDir, "compound-build-tracer.log")], config.tempDir, config.debugArtifactName);
}
}
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
@@ -109,18 +80,13 @@ async function run() {
}
core.setOutput("db-locations", dbLocations);
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
uploadResult = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
core.setOutput("sarif-id", uploadResult.sarifID);
uploadStats = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
}
else {
logger.info("Not uploading results");
}
// Possibly upload the database bundles for remote queries
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger);
if (uploadResult !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
}
}
catch (origError) {
const error = origError instanceof Error ? origError : new Error(String(origError));
@@ -128,27 +94,14 @@ async function run() {
console.log(error);
if (error instanceof analyze_1.CodeQLAnalysisError) {
const stats = { ...error.queriesStatusReport };
await sendStatusReport(startedAt, config, stats, error);
await sendStatusReport(startedAt, stats, error);
}
else {
await sendStatusReport(startedAt, config, undefined, error);
await sendStatusReport(startedAt, undefined, error);
}
return;
}
finally {
if (config !== undefined && config.debugMode) {
try {
// Upload the database bundles as an Actions artifact for debugging
const toUpload = [];
for (const language of config.languages) {
toUpload.push(await (0, util_1.bundleDb)(config, language, await (0, codeql_1.getCodeQL)(config.codeQLCmd), `${config.debugDatabaseName}-${language}`));
}
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
}
catch (error) {
console.log(`Failed to upload database debug bundles: ${error}`);
}
}
if (core.isDebug() && config !== undefined) {
core.info("Debug mode is on. Printing CodeQL debug logs...");
for (const language of config.languages) {
@@ -171,45 +124,19 @@ async function run() {
}
}
}
if (runStats && uploadResult) {
await sendStatusReport(startedAt, config, {
...runStats,
...uploadResult.statusReport,
});
if (runStats && uploadStats) {
await sendStatusReport(startedAt, { ...runStats, ...uploadStats });
}
else if (runStats) {
await sendStatusReport(startedAt, config, { ...runStats });
await sendStatusReport(startedAt, { ...runStats });
}
else {
await sendStatusReport(startedAt, config, undefined);
await sendStatusReport(startedAt, undefined);
}
}
async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
let suffix = "";
const matrix = actionsUtil.getRequiredInput("matrix");
if (matrix !== undefined && matrix !== "null") {
for (const entry of Object.entries(JSON.parse(matrix)).sort())
suffix += `-${entry[1]}`;
}
await artifact.create().uploadArtifact(actionsUtil.sanitizeArifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
}
function listFolder(dir) {
const entries = fs.readdirSync(dir, { withFileTypes: true });
const files = [];
for (const entry of entries) {
if (entry.isFile()) {
files.push(path.resolve(dir, entry.name));
}
else if (entry.isDirectory()) {
files.push(...listFolder(path.resolve(dir, entry.name)));
}
}
return files;
}
exports.runPromise = run();
async function runWrapper() {
try {
await exports.runPromise;
await run();
}
catch (error) {
core.setFailed(`analyze action failed: ${error}`);

File diff suppressed because one or more lines are too long

17
lib/analyze.js generated
View File

@@ -29,7 +29,6 @@ const codeql_1 = require("./codeql");
const count_loc_1 = require("./count-loc");
const languages_1 = require("./languages");
const sharedEnv = __importStar(require("./shared-environment"));
const tracer_config_1 = require("./tracer-config");
const util = __importStar(require("./util"));
class CodeQLAnalysisError extends Error {
constructor(queriesStatusReport, message) {
@@ -133,7 +132,10 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
}
try {
if (hasPackWithCustomQueries) {
logger.info("Performing analysis with custom CodeQL Packs.");
logger.info("*************");
logger.info("Performing analysis with custom QL Packs. QL Packs are an experimental feature.");
logger.info("And should not be used in production yet.");
logger.info("*************");
logger.startGroup(`Downloading custom packs for ${language}`);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
const results = await codeql.packDownload(packsWithVersion);
@@ -233,15 +235,8 @@ function packWithVersionToQuerySuiteEntry(pack) {
return text;
}
async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger) {
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
if (await util.codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
// Delete variables as specified by the end-tracing script
await (0, tracer_config_1.endTracingForCluster)(config);
}
else {
// Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
}
// Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
fs.mkdirSync(outputDir, { recursive: true });
await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger);
}

File diff suppressed because one or more lines are too long

3
lib/analyze.test.js generated
View File

@@ -125,9 +125,6 @@ const util = __importStar(require("./util"));
},
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs,
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
};
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
recursive: true,

File diff suppressed because one or more lines are too long

View File

@@ -1 +1 @@
{ "maximumVersion": "3.4", "minimumVersion": "3.1" }
{ "maximumVersion": "3.3", "minimumVersion": "3.0" }

View File

@@ -29,7 +29,9 @@ const util_1 = require("./util");
const pkg = require("../package.json");
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
const status = (0, actions_util_1.getActionsStatus)(cause, failingLanguage);
const status = failingLanguage !== undefined || cause !== undefined
? "failure"
: "success";
const statusReportBase = await (0, actions_util_1.createStatusReportBase)("autobuild", status, startedAt, cause === null || cause === void 0 ? void 0 : cause.message, cause === null || cause === void 0 ? void 0 : cause.stack);
const statusReport = {
...statusReportBase,

View File

@@ -1 +1 @@
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAMwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,IAAA,sCAA0B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAKwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,IAAA,sCAA0B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

55
lib/codeql.js generated
View File

@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
exports.getExtraOptions = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
@@ -37,7 +37,6 @@ const languages_1 = require("./languages");
const toolcache = __importStar(require("./toolcache"));
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
const util = __importStar(require("./util"));
const util_1 = require("./util");
class CommandInvocationError extends Error {
constructor(cmd, args, exitCode, error) {
super(`Failure invoking ${cmd} with arguments ${args}.\n
@@ -59,10 +58,9 @@ const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
* below can be used to conditionally enable certain features on versions newer
* than this. Please record the reason we cannot support an older version.
*
* Reason: First version containing fix for the "We still have not reached
* idleness" deadlock.
* Reason: Changes to how the tracing environment is set up.
*/
const CODEQL_MINIMUM_VERSION = "2.4.5";
const CODEQL_MINIMUM_VERSION = "2.3.1";
/**
* Versions of CodeQL that version-flag certain functionality in the Action.
* For convenience, please keep these in descending order. Once a version
@@ -74,18 +72,22 @@ const CODEQL_VERSION_METRICS = "2.5.5";
const CODEQL_VERSION_GROUP_RULES = "2.5.5";
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
/**
* This variable controls using the new style of tracing from the CodeQL
* CLI. In particular, with versions above this we will use both indirect
* tracing, and multi-language tracing together with database clusters.
* Version above which we use the CLI's indirect build tracing and
* multi-language tracing features.
*
* Note that there were bugs in both of these features that were fixed in
* release 2.7.0 of the CodeQL CLI, therefore this flag is only enabled for
* versions above that.
* There are currently three blockers on the CLI's side to enabling this:
* (1) The logs directory should be created for a DB cluster, as some
* autobuilders expect it to be present.
* (2) The SEMMLE_PRELOAD_libtrace{32,64}? env variables need to be set.
* (3) The .environment and .win32env files need to be created next to
* the DB spec.
*
* Once _all_ of these are fixed, we can enable this by setting the
* version flag below to the earliest version of the CLI that resolved
* the above issues.
*/
exports.CODEQL_VERSION_NEW_TRACING = "2.7.0";
exports.CODEQL_VERSION_NEW_TRACING = "99.99.99";
function getCodeQLBundleName() {
let platform;
if (process.platform === "win32") {
@@ -218,7 +220,7 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant
// specified explicitly (in which case we always honor it).
if (!codeqlFolder && !codeqlURL && !forceLatest) {
const codeqlVersions = toolcache.findAllVersions("CodeQL", toolCacheDir, logger);
if (codeqlVersions.length === 1 && (0, util_1.isGoodVersion)(codeqlVersions[0])) {
if (codeqlVersions.length === 1) {
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0], toolCacheDir, logger);
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
@@ -356,15 +358,6 @@ function getCachedCodeQL() {
return cachedCodeQL;
}
exports.getCachedCodeQL = getCachedCodeQL;
/**
* Get a real, newly created CodeQL instance for testing. The instance refers to
* a non-existent placeholder codeql command, so tests that use this function
* should also stub the toolrunner.ToolRunner constructor.
*/
async function getCodeQLForTesting() {
return getCodeQLForCmd("codeql-for-testing", false);
}
exports.getCodeQLForTesting = getCodeQLForTesting;
async function getCodeQLForCmd(cmd, checkVersion) {
let cachedVersion = undefined;
const codeql = {
@@ -513,12 +506,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, args, error_matcher_1.errorMatchers);
},
async resolveLanguages() {
const codeqlArgs = [
"resolve",
"languages",
"--format=json",
...getExtraOptionsFromEnv(["resolve", "languages"]),
];
const codeqlArgs = ["resolve", "languages", "--format=json"];
const output = await runTool(cmd, codeqlArgs);
try {
return JSON.parse(output);
@@ -580,8 +568,6 @@ async function getCodeQLForCmd(cmd, checkVersion) {
codeqlArgs.push("--print-metrics-summary");
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_GROUP_RULES))
codeqlArgs.push("--sarif-group-rules-by-pack");
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_CUSTOM_QUERY_HELP))
codeqlArgs.push("--sarif-add-query-help");
if (automationDetailsId !== undefined &&
(await util.codeQlVersionAbove(this, CODEQL_VERSION_SARIF_GROUP))) {
codeqlArgs.push("--sarif-category", automationDetailsId);
@@ -641,18 +627,15 @@ async function getCodeQLForCmd(cmd, checkVersion) {
"cleanup",
databasePath,
`--mode=${cleanupLevel}`,
...getExtraOptionsFromEnv(["database", "cleanup"]),
];
await runTool(cmd, codeqlArgs);
},
async databaseBundle(databasePath, outputFilePath, databaseName) {
async databaseBundle(databasePath, outputFilePath) {
const args = [
"database",
"bundle",
databasePath,
`--output=${outputFilePath}`,
`--name=${databaseName}`,
...getExtraOptionsFromEnv(["database", "bundle"]),
];
await new toolrunner.ToolRunner(cmd, args).exec();
},

File diff suppressed because one or more lines are too long

23
lib/codeql.test.js generated
View File

@@ -23,11 +23,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const toolcache = __importStar(require("@actions/tool-cache"));
const ava_1 = __importDefault(require("ava"));
const nock_1 = __importDefault(require("nock"));
const sinon = __importStar(require("sinon"));
const codeql = __importStar(require("./codeql"));
const defaults = __importStar(require("./defaults.json"));
const logging_1 = require("./logging");
@@ -219,25 +217,4 @@ ava_1.default.beforeEach(() => {
const repoEnv = codeql.getCodeQLActionRepository(logger);
t.deepEqual(repoEnv, "xxx/yyy");
});
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-query-help for 2.7.0", async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
});
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
});
function stubToolRunnerConstructor() {
const runnerObjectStub = sinon.createStubInstance(toolrunner.ToolRunner);
runnerObjectStub.exec.resolves(0);
const runnerConstructorStub = sinon.stub(toolrunner, "ToolRunner");
runnerConstructorStub.returns(runnerObjectStub);
return runnerConstructorStub;
}
//# sourceMappingURL=codeql.test.js.map

File diff suppressed because one or more lines are too long

53
lib/config-utils.js generated
View File

@@ -25,11 +25,8 @@ const path = __importStar(require("path"));
const yaml = __importStar(require("js-yaml"));
const semver = __importStar(require("semver"));
const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
const externalQueries = __importStar(require("./external-queries"));
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const util_1 = require("./util");
// Property names from the user-supplied config file.
const NAME_PROPERTY = "name";
const DISABLE_DEFAULT_QUERIES_PROPERTY = "disable-default-queries";
@@ -119,25 +116,11 @@ const builtinSuites = ["security-extended", "security-and-quality"];
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
* Throws an error if suiteName is not a valid builtin suite.
*/
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureFlags, configFile) {
var _a;
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, suiteName, configFile) {
const found = builtinSuites.find((suite) => suite === suiteName);
if (!found) {
throw new Error(getQueryUsesInvalid(configFile, suiteName));
}
// If we're running the JavaScript security-extended analysis (or a superset of it), the repo is
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
// pack, then add the ML-powered query pack so that we run ML-powered queries.
if (languages.includes("javascript") &&
(found === "security-extended" || found === "security-and-quality") &&
!((_a = packs.javascript) === null || _a === void 0 ? void 0 : _a.some((pack) => pack.packName === util_1.ML_POWERED_JS_QUERIES_PACK.packName)) &&
(await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) &&
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) {
if (!packs.javascript) {
packs.javascript = [];
}
packs.javascript.push(util_1.ML_POWERED_JS_QUERIES_PACK);
}
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined);
}
@@ -197,7 +180,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
* local paths starting with './', or references to remote repos, or
* a finite set of hardcoded terms for builtin suites.
*/
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureFlags, logger, configFile) {
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, workspacePath, apiDetails, logger, configFile) {
queryUses = queryUses.trim();
if (queryUses === "") {
throw new Error(getQueryUsesInvalid(configFile));
@@ -209,7 +192,7 @@ async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, te
}
// Check for one of the builtin suites
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureFlags, configFile);
await addBuiltinSuiteQueries(languages, codeQL, resultMap, queryUses, configFile);
return;
}
// Otherwise, must be a reference to another repo
@@ -421,12 +404,12 @@ async function getLanguages(codeQL, languagesInput, repository, apiDetails, logg
}
return parsedLanguages;
}
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureFlags, logger) {
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, workspacePath, apiDetails, logger) {
queriesInput = queriesInput.trim();
// "+" means "don't override config file" - see shouldAddConfigFileQueries
queriesInput = queriesInput.replace(/^\+/, "");
for (const query of queriesInput.split(",")) {
await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureFlags, logger);
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, workspacePath, apiDetails, logger);
}
}
// Returns true if either no queries were provided in the workflow.
@@ -442,7 +425,7 @@ function shouldAddConfigFileQueries(queriesInput) {
/**
* Get the default config for when the user has not supplied one.
*/
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
var _a;
const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger);
const queries = {};
@@ -453,10 +436,10 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
};
}
await addDefaultQueries(codeQL, languages, queries);
const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
if (queriesInput) {
await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger);
}
const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
return {
languages,
queries,
@@ -469,16 +452,13 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
codeQLCmd: codeQL.getPath(),
gitHubVersion,
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
debugMode,
debugArtifactName,
debugDatabaseName,
};
}
exports.getDefaultConfig = getDefaultConfig;
/**
* Load the config from the given file.
*/
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
var _a;
let parsedYAML;
if (isLocal(configFile)) {
@@ -519,13 +499,12 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
if (!disableDefaultQueries) {
await addDefaultQueries(codeQL, languages, queries);
}
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
// If queries were provided using `with` in the action configuration,
// they should take precedence over the queries in the config file
// unless they're prefixed with "+", in which case they supplement those
// in the config file.
if (queriesInput) {
await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger);
}
if (shouldAddConfigFileQueries(queriesInput) &&
QUERIES_PROPERTY in parsedYAML) {
@@ -538,7 +517,7 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueryUsesInvalid(configFile));
}
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, featureFlags, logger, configFile);
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, logger, configFile);
}
}
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
@@ -563,6 +542,7 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
}
}
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
return {
languages,
queries,
@@ -575,9 +555,6 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
codeQLCmd: codeQL.getPath(),
gitHubVersion,
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
debugMode,
debugArtifactName,
debugDatabaseName,
};
}
/**
@@ -703,16 +680,16 @@ function dbLocationOrDefault(dbLocation, tempDir) {
* This will parse the config from the user input if present, or generate
* a default config. The parsed config is then stored to a known location.
*/
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
var _a, _b, _c;
let config;
// If no config file was provided create an empty one
if (!configFile) {
logger.debug("No configuration file was provided");
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
}
else {
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
}
// The list of queries should not be empty for any language. If it is then
// it is a user configuration error.

File diff suppressed because one or more lines are too long

129
lib/config-utils.test.js generated
View File

@@ -31,7 +31,6 @@ const sinon = __importStar(require("sinon"));
const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
@@ -89,8 +88,8 @@ function mockListLanguages(languages) {
};
},
});
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger));
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger));
});
});
(0, ava_1.default)("loading config saves config", async (t) => {
@@ -112,21 +111,18 @@ function mockListLanguages(languages) {
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// Sanity check that getConfig returns undefined before we have called initConfig
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
// The saved config file should now exist
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// And that same newly-initialised config should now be returned by getConfig
const config2 = await configUtils.getConfig(tmpDir, logger);
t.not(config2, undefined);
if (config2 !== undefined) {
t.deepEqual(config1, config2);
}
t.deepEqual(config1, config2);
});
});
(0, ava_1.default)("load input outside of workspace", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
try {
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -139,7 +135,7 @@ function mockListLanguages(languages) {
// no filename given, just a repo
const configFile = "octo-org/codeql-config@main";
try {
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -153,7 +149,7 @@ function mockListLanguages(languages) {
const configFile = "input";
t.false(fs.existsSync(path.join(tmpDir, configFile)));
try {
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -218,13 +214,10 @@ function mockListLanguages(languages) {
gitHubVersion,
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: "my-artifact",
debugDatabaseName: "my-db",
};
const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir);
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "my-artifact", "my-db", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
@@ -260,7 +253,7 @@ function mockListLanguages(languages) {
fs.mkdirSync(path.join(tmpDir, "foo"));
const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir);
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
// Check resolve queries was called correctly
t.deepEqual(resolveQueriesArgs.length, 1);
t.deepEqual(resolveQueriesArgs[0].queries, [
@@ -303,7 +296,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries
// and once for `./foo` from the config file.
@@ -336,7 +329,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries and once for `./override`,
// but won't be called for './foo' from the config file.
@@ -368,7 +361,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly
// It'll be called once for `./workflow-query`,
// but won't be called for the default one since that was disabled
@@ -394,7 +387,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly:
// It'll be called once for the default queries,
// and then once for each of the two queries from the workflow
@@ -433,7 +426,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries,
// once for each of additional1 and additional2,
@@ -472,7 +465,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
try {
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
t.fail("initConfig did not throw error");
}
catch (err) {
@@ -515,7 +508,7 @@ function queriesToResolvedQueryForm(queries) {
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
const configFile = "octo-org/codeql-config/config.yaml@main";
const languages = "javascript";
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
t.assert(spyGetContents.called);
});
});
@@ -525,7 +518,7 @@ function queriesToResolvedQueryForm(queries) {
mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -541,7 +534,7 @@ function queriesToResolvedQueryForm(queries) {
mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -558,7 +551,7 @@ function queriesToResolvedQueryForm(queries) {
},
});
try {
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -570,7 +563,7 @@ function queriesToResolvedQueryForm(queries) {
return await util.withTmpDir(async (tmpDir) => {
const languages = "rubbish,english";
try {
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -598,7 +591,7 @@ function queriesToResolvedQueryForm(queries) {
const configFile = path.join(tmpDir, "codeql-config.yaml");
fs.writeFileSync(configFile, inputFileContents);
const languages = "javascript";
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
t.deepEqual(packs, {
[languages_1.Language.javascript]: [
{
@@ -637,7 +630,7 @@ function queriesToResolvedQueryForm(queries) {
fs.writeFileSync(configFile, inputFileContents);
fs.mkdirSync(path.join(tmpDir, "foo"));
const languages = "javascript,python,cpp";
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
t.deepEqual(packs, {
[languages_1.Language.javascript]: [
{
@@ -690,7 +683,7 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
const inputFile = path.join(tmpDir, configFile);
fs.writeFileSync(inputFile, inputFileContents, "utf8");
try {
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -763,26 +756,28 @@ const invalidPaths = ["a/***/b", "a/**b", "a/b**", "**"];
/**
* Test macro for ensuring the packs block is valid
*/
const parsePacksMacro = ava_1.default.macro({
exec: (t, packsByLanguage, languages, expected) => t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), expected),
title: (providedTitle = "") => `Parse Packs: ${providedTitle}`,
});
function parsePacksMacro(t, packsByLanguage, languages, expected) {
t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), expected);
}
parsePacksMacro.title = (providedTitle) => `Parse Packs: ${providedTitle}`;
/**
* Test macro for testing when the packs block is invalid
*/
const parsePacksErrorMacro = ava_1.default.macro({
exec: (t, packsByLanguage, languages, expected) => t.throws(() => configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), {
function parsePacksErrorMacro(t, packsByLanguage, languages, expected) {
t.throws(() => {
configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b");
}, {
message: expected,
}),
title: (providedTitle = "") => `Parse Packs Error: ${providedTitle}`,
});
});
}
parsePacksErrorMacro.title = (providedTitle) => `Parse Packs Error: ${providedTitle}`;
/**
* Test macro for testing when the packs block is invalid
*/
const invalidPackNameMacro = ava_1.default.macro({
exec: (t, name) => parsePacksErrorMacro.exec(t, { [languages_1.Language.cpp]: [name] }, [languages_1.Language.cpp], new RegExp(`The configuration file "/a/b" is invalid: property "packs" "${name}" is not a valid pack`)),
title: (_providedTitle, arg) => `Invalid pack string: ${arg}`,
});
function invalidPackNameMacro(t, name) {
parsePacksErrorMacro(t, { [languages_1.Language.cpp]: [name] }, [languages_1.Language.cpp], new RegExp(`The configuration file "/a/b" is invalid: property "packs" "${name}" is not a valid pack`));
}
invalidPackNameMacro.title = (_, arg) => `Invalid pack string: ${arg}`;
(0, ava_1.default)("no packs", parsePacksMacro, {}, [], {});
(0, ava_1.default)("two packs", parsePacksMacro, ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp], {
[languages_1.Language.cpp]: [
@@ -871,50 +866,6 @@ parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and
(0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
(0, ava_1.default)("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], /remove the '\+'/);
(0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], /"xxx" is not a valid pack/);
const mlPoweredQueriesMacro = ava_1.default.macro({
exec: async (t, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString) => {
return await util.withTmpDir(async (tmpDir) => {
const codeQL = (0, codeql_1.setCodeQL)({
async getVersion() {
return codeQLVersion;
},
async resolveQueries() {
return {
byLanguage: {
javascript: { "fake-query.ql": {} },
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
},
});
const { packs } = await configUtils.initConfig("javascript", queriesInput, packsInput, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled
? [feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled]
: []), (0, logging_1.getRunnerLogger)(true));
if (expectedVersionString !== undefined) {
t.deepEqual(packs, {
[languages_1.Language.javascript]: [
{
packName: "codeql/javascript-experimental-atm-queries",
version: expectedVersionString,
},
],
});
}
else {
t.deepEqual(packs, {});
}
});
},
title: (_providedTitle, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString) => `ML-powered queries ${expectedVersionString !== undefined
? `${expectedVersionString} are`
: "aren't"} loaded for packs: ${packsInput}, queries: ${queriesInput} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`,
});
// macro, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, versionString
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, undefined, "security-extended", undefined);
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, undefined, "security-extended", undefined);
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-extended", "~0.0.2");
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-and-quality", "~0.0.2");
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", "0.0.1");
// errors
// input w invalid pack name
//# sourceMappingURL=config-utils.test.js.map

File diff suppressed because one or more lines are too long

35
lib/database-upload.js generated
View File

@@ -25,7 +25,6 @@ const actionsUtil = __importStar(require("./actions-util"));
const api_client_1 = require("./api-client");
const codeql_1 = require("./codeql");
const util = __importStar(require("./util"));
const util_1 = require("./util");
async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
if (actionsUtil.getRequiredInput("upload-database") !== "true") {
logger.debug("Database upload disabled in workflow. Skipping upload.");
@@ -42,24 +41,36 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
return;
}
const client = (0, api_client_1.getApiClient)(apiDetails);
try {
await client.request("GET /repos/:owner/:repo/code-scanning/codeql/databases", {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
});
}
catch (e) {
if (util.isHTTPError(e) && e.status === 404) {
logger.debug("Repository is not opted in to database uploads. Skipping upload.");
}
else {
console.log(e);
logger.info(`Skipping database upload due to unknown error: ${e}`);
}
return;
}
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
for (const language of config.languages) {
// Upload the database bundle.
// Although we are uploading arbitrary file contents to the API, it's worth
// noting that it's the API's job to validate that the contents is acceptable.
// This API method is available to anyone with write access to the repo.
const payload = fs.readFileSync(await (0, util_1.bundleDb)(config, language, codeql, language));
// Bundle the database up into a single zip file
const databasePath = util.getCodeQLDatabasePath(config, language);
const databaseBundlePath = `${databasePath}.zip`;
await codeql.databaseBundle(databasePath, databaseBundlePath);
// Upload the database bundle
const payload = fs.readFileSync(databaseBundlePath);
try {
await client.request(`POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name`, {
await client.request(`PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language`, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
language,
name: `${language}-database`,
data: payload,
headers: {
authorization: `token ${apiDetails.auth}`,
"Content-Type": "application/zip",
},
});
logger.debug(`Successfully uploaded database for ${language}`);
}

View File

@@ -1 +1 @@
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,8BAA8B;QAC9B,2EAA2E;QAC3E,8EAA8E;QAC9E,wEAAwE;QACxE,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAC7B,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CACnD,CAAC;QACF,IAAI;YACF,MAAM,MAAM,CAAC,OAAO,CAClB,wGAAwG,EACxG;gBACE,KAAK,EAAE,aAAa,CAAC,KAAK;gBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;gBACxB,QAAQ;gBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;gBAC5B,IAAI,EAAE,OAAO;gBACb,OAAO,EAAE;oBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;oBACzC,cAAc,EAAE,iBAAiB;iBAClC;aACF,CACF,CAAC;YACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAxDD,0CAwDC"}
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAExB,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,IAAI;QACF,MAAM,MAAM,CAAC,OAAO,CAClB,wDAAwD,EACxD;YACE,KAAK,EAAE,aAAa,CAAC,KAAK;YAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;SACzB,CACF,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3C,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;SACH;aAAM;YACL,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,MAAM,CAAC,IAAI,CAAC,kDAAkD,CAAC,EAAE,CAAC,CAAC;SACpE;QACD,OAAO;KACR;IAED,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,gDAAgD;QAChD,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAClE,MAAM,kBAAkB,GAAG,GAAG,YAAY,MAAM,CAAC;QACjD,MAAM,MAAM,CAAC,cAAc,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC;QAE9D,6BAA6B;QAC7B,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,kBAAkB,CAAC,CAAC;QACpD,IAAI;YACF,MAAM,MAAM,CAAC,OAAO,CAClB,kEAAkE,EAClE;gBACE,KAAK,EAAE,aAAa,CAAC,KAAK;gBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;gBACxB,QAAQ;gBACR,IAAI,EAAE,OAAO;aACd,CACF,CAAC;YACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAtED,0CAsEC"}

View File

@@ -55,22 +55,50 @@ function getTestConfig(tmpDir) {
gitHubVersion: { type: util_1.GitHubVariant.DOTCOM },
dbLocation: tmpDir,
packs: {},
debugMode: false,
debugArtifactName: util_1.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util_1.DEFAULT_DEBUG_DATABASE_NAME,
};
}
async function mockHttpRequests(databaseUploadStatusCode) {
function getRecordingLogger(messages) {
return {
debug: (message) => {
messages.push({ type: "debug", message });
console.debug(message);
},
info: (message) => {
messages.push({ type: "info", message });
console.info(message);
},
warning: (message) => {
messages.push({ type: "warning", message });
console.warn(message);
},
error: (message) => {
messages.push({ type: "error", message });
console.error(message);
},
isDebug: () => true,
startGroup: () => undefined,
endGroup: () => undefined,
};
}
function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
// Passing an auth token is required, so we just use a dummy value
const client = github.getOctokit("123");
const requestSpy = sinon.stub(client, "request");
const url = "POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name";
const databaseUploadSpy = requestSpy.withArgs(url);
if (databaseUploadStatusCode < 300) {
databaseUploadSpy.resolves(undefined);
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql/databases");
if (optInStatusCode < 300) {
optInSpy.resolves(undefined);
}
else {
databaseUploadSpy.throws(new util_1.HTTPError("some error message", databaseUploadStatusCode));
optInSpy.throws(new util_1.HTTPError("some error message", optInStatusCode));
}
if (databaseUploadStatusCode !== undefined) {
const databaseUploadSpy = requestSpy.withArgs("PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language");
if (databaseUploadStatusCode < 300) {
databaseUploadSpy.resolves(undefined);
}
else {
databaseUploadSpy.throws(new util_1.HTTPError("some error message", databaseUploadStatusCode));
}
}
sinon.stub(apiClient, "getApiClient").value(() => client);
}
@@ -83,7 +111,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
.returns("false");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Database upload disabled in workflow. Skipping upload.") !== undefined);
});
@@ -99,7 +127,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
const config = getTestConfig(tmpDir);
config.gitHubVersion = { type: util_1.GitHubVariant.GHES, version: "3.0" };
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Not running against github.com. Skipping upload.") !== undefined);
});
@@ -115,7 +143,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
const config = getTestConfig(tmpDir);
config.gitHubVersion = { type: util_1.GitHubVariant.GHAE };
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Not running against github.com. Skipping upload.") !== undefined);
});
@@ -129,11 +157,53 @@ async function mockHttpRequests(databaseUploadStatusCode) {
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Not analyzing default branch. Skipping upload.") !== undefined);
});
});
(0, ava_1.default)("Abort database upload if opt-in request returns 404", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(404);
(0, codeql_1.setCodeQL)({
async databaseBundle() {
return;
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message ===
"Repository is not opted in to database uploads. Skipping upload.") !== undefined);
});
});
(0, ava_1.default)("Abort database upload if opt-in request fails with something other than 404", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(500);
(0, codeql_1.setCodeQL)({
async databaseBundle() {
return;
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "info" &&
v.message ===
"Skipping database upload due to unknown error: Error: some error message") !== undefined);
});
});
(0, ava_1.default)("Don't crash if uploading a database fails", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
@@ -142,20 +212,20 @@ async function mockHttpRequests(databaseUploadStatusCode) {
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
await mockHttpRequests(500);
mockHttpRequests(204, 500);
(0, codeql_1.setCodeQL)({
async databaseBundle(_, outputFilePath) {
fs.writeFileSync(outputFilePath, "");
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "warning" &&
v.message ===
"Failed to upload database for javascript: Error: some error message") !== undefined);
});
});
(0, ava_1.default)("Successfully uploading a database to api.github.com", async (t) => {
(0, ava_1.default)("Successfully uploading a database", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
sinon
@@ -163,34 +233,14 @@ async function mockHttpRequests(databaseUploadStatusCode) {
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
await mockHttpRequests(201);
mockHttpRequests(204, 201);
(0, codeql_1.setCodeQL)({
async databaseBundle(_, outputFilePath) {
fs.writeFileSync(outputFilePath, "");
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Successfully uploaded database for javascript") !== undefined);
});
});
(0, ava_1.default)("Successfully uploading a database to uploads.github.com", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
await mockHttpRequests(201);
(0, codeql_1.setCodeQL)({
async databaseBundle(_, outputFilePath) {
fs.writeFileSync(outputFilePath, "");
},
});
const loggedMessages = [];
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Successfully uploaded database for javascript") !== undefined);
});

File diff suppressed because one or more lines are too long

View File

@@ -1,3 +1,3 @@
{
"bundleVersion": "codeql-bundle-20220214"
"bundleVersion": "codeql-bundle-20211013"
}

94
lib/feature-flags.js generated
View File

@@ -1,94 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.createFeatureFlags = exports.GitHubFeatureFlags = exports.FeatureFlag = void 0;
const api_client_1 = require("./api-client");
const util = __importStar(require("./util"));
var FeatureFlag;
(function (FeatureFlag) {
FeatureFlag["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
})(FeatureFlag = exports.FeatureFlag || (exports.FeatureFlag = {}));
class GitHubFeatureFlags {
constructor(gitHubVersion, apiDetails, repositoryNwo, logger) {
this.gitHubVersion = gitHubVersion;
this.apiDetails = apiDetails;
this.repositoryNwo = repositoryNwo;
this.logger = logger;
}
async getValue(flag) {
const response = (await this.getApiResponse())[flag];
if (response === undefined) {
this.logger.debug(`Feature flag '${flag}' undefined in API response, considering it disabled.`);
return false;
}
return response;
}
async getApiResponse() {
const loadApiResponse = async () => {
// Do nothing when not running against github.com
if (this.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
this.logger.debug("Not running against github.com. Disabling all feature flags.");
return {};
}
const client = (0, api_client_1.getApiClient)(this.apiDetails);
try {
const response = await client.request("GET /repos/:owner/:repo/code-scanning/codeql-action/features", {
owner: this.repositoryNwo.owner,
repo: this.repositoryNwo.repo,
});
return response.data;
}
catch (e) {
if (util.isHTTPError(e) && e.status === 403) {
this.logger.warning("This run of the CodeQL Action does not have permission to access Code Scanning API endpoints. " +
"As a result, it will not be opted into any experimental features. " +
"This could be because the Action is running on a pull request from a fork. If not, " +
`please ensure the Action has the 'security-events: write' permission. Details: ${e}`);
}
else {
// Some feature flags, such as `ml_powered_queries_enabled` affect the produced alerts.
// Considering these feature flags disabled in the event of a transient error could
// therefore lead to alert churn. As a result, we crash if we cannot determine the value of
// the feature flags.
throw new Error(`Encountered an error while trying to load feature flags: ${e}`);
}
}
};
const apiResponse = this.cachedApiResponse || (await loadApiResponse());
this.cachedApiResponse = apiResponse;
return apiResponse;
}
}
exports.GitHubFeatureFlags = GitHubFeatureFlags;
/**
* Create a feature flags instance with the specified set of enabled flags.
*
* This should be only used within tests.
*/
function createFeatureFlags(enabledFlags) {
return {
getValue: async (flag) => {
return enabledFlags.includes(flag);
},
};
}
exports.createFeatureFlags = createFeatureFlags;
//# sourceMappingURL=feature-flags.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,6CAA8D;AAG9D,6CAA+B;AAM/B,IAAY,WAEX;AAFD,WAAY,WAAW;IACrB,qEAAsD,CAAA;AACxD,CAAC,EAFW,WAAW,GAAX,mBAAW,KAAX,mBAAW,QAEtB;AAUD,MAAa,kBAAkB;IAG7B,YACU,aAAiC,EACjC,UAA4B,EAC5B,aAA4B,EAC5B,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,eAAU,GAAV,UAAU,CAAkB;QAC5B,kBAAa,GAAb,aAAa,CAAe;QAC5B,WAAM,GAAN,MAAM,CAAQ;IACrB,CAAC;IAEJ,KAAK,CAAC,QAAQ,CAAC,IAAiB;QAC9B,MAAM,QAAQ,GAAG,CAAC,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC;QACrD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,iBAAiB,IAAI,uDAAuD,CAC7E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE;YACjC,iDAAiD;YACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;gBACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8DAA8D,CAC/D,CAAC;gBACF,OAAO,EAAE,CAAC;aACX;YACD,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC7C,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,8DAA8D,EAC9D;oBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;oBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;iBAC9B,CACF,CAAC;gBACF,OAAO,QAAQ,CAAC,IAAI,CAAC;aACtB;YAAC,OAAO,CAAC,EAAE;gBACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;oBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;wBAC9F,oEAAoE;wBACpE,qFAAqF;wBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;iBACH;qBAAM;oBACL,uFAAuF;oBACvF,mFAAmF;oBACnF,2FAA2F;oBAC3F,qBAAqB;oBACrB,MAAM,IAAI,KAAK,CACb,4DAA4D,CAAC,EAAE,CAChE,CAAC;iBACH;aACF;QACH,CAAC,CAAC;QAEF,MAAM,WAAW,GAAG,IAAI,CAAC,iBAAiB,IAAI,CAAC,MAAM,eAAe,EAAE,CAAC,CAAC;QACxE,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;CACF;AAhED,gDAgEC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,YAA2B;IAC5D,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YACvB,OAAO,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;AACJ,CAAC;AAND,gDAMC"}

View File

@@ -1,88 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const feature_flags_1 = require("./feature-flags");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const testing_utils_1 = require("./testing-utils");
const util_1 = require("./util");
(0, testing_utils_1.setupTests)(ava_1.default);
ava_1.default.beforeEach(() => {
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
});
const testApiDetails = {
auth: "1234",
url: "https://github.com",
};
const testRepositoryNwo = (0, repository_1.parseRepositoryNwo)("github/example");
const ALL_FEATURE_FLAGS_DISABLED_VARIANTS = [
{
description: "GHES",
gitHubVersion: { type: util_1.GitHubVariant.GHES, version: "3.0.0" },
},
{ description: "GHAE", gitHubVersion: { type: util_1.GitHubVariant.GHAE } },
];
for (const variant of ALL_FEATURE_FLAGS_DISABLED_VARIANTS) {
(0, ava_1.default)(`All feature flags are disabled if running against ${variant.description}`, async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const loggedMessages = [];
const featureFlags = new feature_flags_1.GitHubFeatureFlags(variant.gitHubVersion, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
t.assert((await featureFlags.getValue(flag)) === false);
}
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message ===
"Not running against github.com. Disabling all feature flags.") !== undefined);
});
});
}
(0, ava_1.default)("Feature flags are disabled if they're not returned in API response", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const loggedMessages = [];
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
t.assert((await featureFlags.getValue(flag)) === false);
}
for (const featureFlag of ["ml_powered_queries_enabled"]) {
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message ===
`Feature flag '${featureFlag}' undefined in API response, considering it disabled.`) !== undefined);
}
});
});
(0, ava_1.default)("Feature flags exception is propagated if the API request errors", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, logging_1.getRunnerLogger)(true));
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(500, {});
await t.throwsAsync(async () => featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled), {
message: "Encountered an error while trying to load feature flags: Error: some error message",
});
});
});
const FEATURE_FLAGS = ["ml_powered_queries_enabled"];
for (const featureFlag of FEATURE_FLAGS) {
(0, ava_1.default)(`Feature flag '${featureFlag}' is enabled if enabled in the API response`, async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, logging_1.getRunnerLogger)(true));
const expectedFeatureFlags = {};
for (const f of FEATURE_FLAGS) {
expectedFeatureFlags[f] = false;
}
expectedFeatureFlags[featureFlag] = true;
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureFlags);
const actualFeatureFlags = {
ml_powered_queries_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled),
};
t.deepEqual(actualFeatureFlags, expectedFeatureFlags);
});
});
}
//# sourceMappingURL=feature-flags.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"feature-flags.test.js","sourceRoot":"","sources":["../src/feature-flags.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAGvB,mDAAkE;AAClE,uCAA4C;AAC5C,6CAAkD;AAClD,mDAMyB;AAEzB,iCAAgF;AAEhF,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,MAAM,cAAc,GAAqB;IACvC,IAAI,EAAE,MAAM;IACZ,GAAG,EAAE,oBAAoB;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAA,+BAAkB,EAAC,gBAAgB,CAAC,CAAC;AAE/D,MAAM,mCAAmC,GAGpC;IACH;QACE,WAAW,EAAE,MAAM;QACnB,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC9D;IACD,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,EAAE;CACrE,CAAC;AAEF,KAAK,MAAM,OAAO,IAAI,mCAAmC,EAAE;IACzD,IAAA,aAAI,EAAC,qDAAqD,OAAO,CAAC,WAAW,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC3F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,OAAO,CAAC,aAAa,EACrB,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;YAEF,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;gBAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;aACzD;YAED,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,8DAA8D,CACnE,KAAK,SAAS,CAChB,CAAC;QACJ,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ;AAED,IAAA,aAAI,EAAC,oEAAoE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI,CAAC,4BAA4B,CAAC,EAAE;YACxD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,iBAAiB,WAAW,uDAAuD,CACxF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iEAAiE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,MAAM,CAAC,CAAC,WAAW,CACjB,KAAK,IAAI,EAAE,CAAC,YAAY,CAAC,QAAQ,CAAC,2BAAW,CAAC,uBAAuB,CAAC,EACtE;YACE,OAAO,EACL,oFAAoF;SACvF,CACF,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,MAAM,aAAa,GAAG,CAAC,4BAA4B,CAAC,CAAC;AAErD,KAAK,MAAM,WAAW,IAAI,aAAa,EAAE;IACvC,IAAA,aAAI,EAAC,iBAAiB,WAAW,6CAA6C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC1F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;YAEF,MAAM,oBAAoB,GAAgC,EAAE,CAAC;YAC7D,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;gBAC7B,oBAAoB,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;aACjC;YACD,oBAAoB,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC;YACzC,IAAA,0CAA0B,EAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;YAEtD,MAAM,kBAAkB,GAAgC;gBACtD,0BAA0B,EAAE,MAAM,YAAY,CAAC,QAAQ,CACrD,2BAAW,CAAC,uBAAuB,CACpC;aACF,CAAC;YAEF,CAAC,CAAC,SAAS,CAAC,kBAAkB,EAAE,oBAAoB,CAAC,CAAC;QACxD,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ"}

5
lib/fingerprints.js generated
View File

@@ -226,8 +226,9 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
exports.resolveUriToFile = resolveUriToFile;
// Compute fingerprints for results in the given sarif file
// and return an updated sarif file contents.
async function addFingerprints(sarif, sourceRoot, logger) {
async function addFingerprints(sarifContents, sourceRoot, logger) {
var _a, _b, _c;
const sarif = JSON.parse(sarifContents);
// Gather together results for the same file and construct
// callbacks to accept hashes for that file and update the location
const callbacksByFile = {};
@@ -265,7 +266,7 @@ async function addFingerprints(sarif, sourceRoot, logger) {
};
await hash(teeCallback, filepath);
}
return sarif;
return JSON.stringify(sarif);
}
exports.addFingerprints = addFingerprints;
//# sourceMappingURL=fingerprints.js.map

File diff suppressed because one or more lines are too long

View File

@@ -169,24 +169,30 @@ function testResolveUriToFile(uri, index, artifactsURIs) {
});
(0, ava_1.default)("addFingerprints", async (t) => {
// Run an end-to-end test on a test file
const input = JSON.parse(fs
let input = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
.toString());
const expected = JSON.parse(fs
.toString();
let expected = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.expected.sarif`)
.toString());
.toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, (0, logging_1.getRunnerLogger)(true)), expected);
});
(0, ava_1.default)("missingRegions", async (t) => {
// Run an end-to-end test on a test file
const input = JSON.parse(fs
let input = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
.toString());
const expected = JSON.parse(fs
.toString();
let expected = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.expected.sarif`)
.toString());
.toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, (0, logging_1.getRunnerLogger)(true)), expected);

File diff suppressed because one or more lines are too long

23
lib/init-action.js generated
View File

@@ -23,7 +23,6 @@ const path = __importStar(require("path"));
const core = __importStar(require("@actions/core"));
const actions_util_1 = require("./actions-util");
const codeql_1 = require("./codeql");
const feature_flags_1 = require("./feature-flags");
const init_1 = require("./init");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
@@ -54,15 +53,14 @@ async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
}
const statusReport = {
...statusReportBase,
disable_default_queries: disableDefaultQueries,
languages,
ml_powered_javascript_queries: (0, util_1.getMlPoweredJsQueriesStatus)(config),
workflow_languages: workflowLanguages || "",
paths,
paths_ignore: pathsIgnore,
disable_default_queries: disableDefaultQueries,
queries: queries.join(","),
tools_input: (0, actions_util_1.getOptionalInput)("tools") || "",
tools_resolved_version: toolsVersion,
workflow_languages: workflowLanguages || "",
};
await (0, actions_util_1.sendStatusReport)(statusReport);
}
@@ -80,8 +78,6 @@ async function run() {
};
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.actions);
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, apiDetails, repositoryNwo, logger);
try {
const workflowErrors = await (0, actions_util_1.validateWorkflow)();
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
@@ -91,7 +87,7 @@ async function run() {
codeql = initCodeQLResult.codeql;
toolsVersion = initCodeQLResult.toolsVersion;
await (0, util_1.enrichEnvironment)(util_1.Mode.actions, codeql);
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), (0, actions_util_1.getOptionalInput)("debug") === "true", (0, actions_util_1.getOptionalInput)("debug-artifact-name") || util_1.DEFAULT_DEBUG_ARTIFACT_NAME, (0, actions_util_1.getOptionalInput)("debug-database-name") || util_1.DEFAULT_DEBUG_DATABASE_NAME, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE"), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, featureFlags, logger);
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), (0, actions_util_1.getTemporaryDirectory)(), (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE"), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, logger);
if (config.languages.includes(languages_1.Language.python) &&
(0, actions_util_1.getRequiredInput)("setup-python-dependencies") === "true") {
try {
@@ -117,14 +113,9 @@ async function run() {
core.exportVariable("GOFLAGS", goFlags);
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
}
// Limit RAM and threads for extractors. When running extractors, the CodeQL CLI obeys the
// CODEQL_RAM and CODEQL_THREADS environment variables to decide how much RAM and how many
// threads it would ask extractors to use. See help text for the "--ram" and "--threads"
// options at https://codeql.github.com/docs/codeql-cli/manual/database-trace-command/
// for details.
core.exportVariable("CODEQL_RAM", process.env["CODEQL_RAM"] ||
(0, util_1.getMemoryFlagValue)((0, actions_util_1.getOptionalInput)("ram")).toString());
core.exportVariable("CODEQL_THREADS", (0, util_1.getThreadsFlagValue)((0, actions_util_1.getOptionalInput)("threads"), logger).toString());
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
const codeqlRam = process.env["CODEQL_RAM"] || "6500";
core.exportVariable("CODEQL_RAM", codeqlRam);
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", undefined);
if (tracerConfig !== undefined) {
@@ -141,7 +132,7 @@ async function run() {
catch (error) {
core.setFailed(String(error));
console.log(error);
await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", (0, actions_util_1.getActionsStatus)(error), startedAt, String(error), error instanceof Error ? error.stack : undefined));
await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "failure", startedAt, String(error), error instanceof Error ? error.stack : undefined));
return;
}
await sendSuccessStatusReport(startedAt, config, toolsVersion);

File diff suppressed because one or more lines are too long

37
lib/init.js generated
View File

@@ -38,43 +38,24 @@ async function initCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant,
return { codeql, toolsVersion };
}
exports.initCodeQL = initCodeQL;
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
logger.startGroup("Load language configuration");
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
analysisPaths.printPathFiltersWarning(config, logger);
logger.endGroup();
return config;
}
exports.initConfig = initConfig;
async function runInit(codeql, config, sourceRoot, processName, processLevel) {
var _a, _b;
fs.mkdirSync(config.dbLocation, { recursive: true });
try {
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
// Init a database cluster
await codeql.databaseInitCluster(config.dbLocation, config.languages, sourceRoot, processName, processLevel);
}
else {
for (const language of config.languages) {
// Init language database
await codeql.databaseInit(util.getCodeQLDatabasePath(config, language), language, sourceRoot);
}
}
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
// Init a database cluster
await codeql.databaseInitCluster(config.dbLocation, config.languages, sourceRoot, processName, processLevel);
}
catch (e) {
// Handle the situation where init is called twice
// for the same database in the same job.
if (e instanceof Error &&
((_a = e.message) === null || _a === void 0 ? void 0 : _a.includes("Refusing to create databases")) &&
e.message.includes("exists and is not an empty directory.")) {
throw new util.UserError(`Is the "init" action called twice in the same job? ${e.message}`);
}
else if (e instanceof Error &&
((_b = e.message) === null || _b === void 0 ? void 0 : _b.includes("is not compatible with this CodeQL CLI"))) {
throw new util.UserError(e.message);
}
else {
throw e;
else {
for (const language of config.languages) {
// Init language database
await codeql.databaseInit(util.getCodeQLDatabasePath(config, language), language, sourceRoot);
}
}
return await (0, tracer_config_1.getCombinedTracerConfig)(config, codeql);

View File

@@ -1 +1 @@
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,YAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,YAAY,EACZ,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA1CD,gCA0CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC;;IAEhC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI;QACF,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;YAChE,0BAA0B;YAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,SAAS,EAChB,UAAU,EACV,WAAW,EACX,YAAY,CACb,CAAC;SACH;aAAM;YACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,yBAAyB;gBACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,kDAAkD;QAClD,yCAAyC;QACzC,IACE,CAAC,YAAY,KAAK;aAClB,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,8BAA8B,CAAC,CAAA;YACnD,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,uCAAuC,CAAC,EAC3D;YACA,MAAM,IAAI,IAAI,CAAC,SAAS,CACtB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;SACH;aAAM,IACL,CAAC,YAAY,KAAK;aAClB,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,wCAAwC,CAAC,CAAA,EAC7D;YACA,MAAM,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACrC;aAAM;YACL,MAAM,CAAC,CAAC;SACT;KACF;IACD,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAlDD,0BAkDC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAlCD,gCAkCC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC;IAEhC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;QAChE,0BAA0B;QAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,SAAS,EAChB,UAAU,EACV,WAAW,EACX,YAAY,CACb,CAAC;KACH;SAAM;QACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACvC,yBAAyB;YACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;SACH;KACF;IAED,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AA9BD,0BA8BC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}

51
lib/runner.js generated
View File

@@ -18,20 +18,15 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path"));
const commander_1 = require("commander");
const del_1 = __importDefault(require("del"));
const analyze_1 = require("./analyze");
const autobuild_1 = require("./autobuild");
const codeql_1 = require("./codeql");
const config_utils_1 = require("./config-utils");
const feature_flags_1 = require("./feature-flags");
const init_1 = require("./init");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
@@ -59,14 +54,11 @@ function getToolsDir(userInput) {
return toolsDir;
}
const codeqlEnvJsonFilename = "codeql-env.json";
function loadTracerEnvironment(config) {
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
return JSON.parse(fs.readFileSync(jsonEnvFile).toString("utf-8"));
}
// Imports the environment from codeqlEnvJsonFilename if not already present
function importTracerEnvironment(config) {
if (!("ODASA_TRACER_CONFIGURATION" in process.env)) {
const env = loadTracerEnvironment(config);
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
const env = JSON.parse(fs.readFileSync(jsonEnvFile).toString("utf-8"));
for (const key of Object.keys(env)) {
process.env[key] = env[key];
}
@@ -124,12 +116,6 @@ program
.option("--debug", "Print more verbose output", false)
.option("--trace-process-name <string>", "(Advanced, windows-only) Inject a windows tracer of this process into a process with the given process name.")
.option("--trace-process-level <number>", "(Advanced, windows-only) Inject a windows tracer of this process into a parent process <number> levels up.")
.option("--ram <number>", "The amount of memory in MB that can be used by CodeQL extractors. " +
"By default, CodeQL extractors will use most of the memory available in the system. " +
'This input also sets the amount of memory that can later be used by the "analyze" command.')
.option("--threads <number>", "The number of threads that can be used by CodeQL extractors. " +
"By default, CodeQL extractors will use all the hardware threads available in the system. " +
'This input also sets the number of threads that can later be used by the "analyze" command.')
.action(async (cmd) => {
const logger = (0, logging_1.getRunnerLogger)(cmd.debug);
try {
@@ -138,7 +124,7 @@ program
const checkoutPath = cmd.checkoutPath || process.cwd();
// Wipe the temp dir
logger.info(`Cleaning temp directory ${tempDir}`);
await (0, del_1.default)(tempDir, { force: true });
fs.rmSync(tempDir, { recursive: true, force: true });
fs.mkdirSync(tempDir, { recursive: true });
const auth = await (0, util_1.getGitHubAuth)(logger, cmd.githubAuth, cmd.githubAuthStdin);
const apiDetails = {
@@ -148,13 +134,6 @@ program
};
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.runner);
// Limit RAM and threads for extractors. When running extractors, the CodeQL CLI obeys the
// CODEQL_RAM and CODEQL_THREADS environment variables to decide how much RAM and how many
// threads it would ask extractors to use. See help text for the "--ram" and "--threads"
// options at https://codeql.github.com/docs/codeql-cli/manual/database-trace-command/
// for details.
process.env["CODEQL_RAM"] = (0, util_1.getMemoryFlagValue)(cmd.ram).toString();
process.env["CODEQL_THREADS"] = (0, util_1.getThreadsFlagValue)(cmd.threads, logger).toString();
let codeql;
if (cmd.codeqlPath !== undefined) {
codeql = await (0, codeql_1.getCodeQL)(cmd.codeqlPath);
@@ -164,7 +143,7 @@ program
}
await (0, util_1.enrichEnvironment)(util_1.Mode.runner, codeql);
const workspacePath = checkoutPath;
const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, false, "", "", (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, logger);
const sourceRoot = checkoutPath;
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, parseTraceProcessName(), parseTraceProcessLevel());
if (tracerConfig === undefined) {
@@ -261,15 +240,10 @@ program
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--no-upload", "Do not upload results after analysis.")
.option("--output-dir <dir>", "Directory to output SARIF files to. Default is in the temp directory.")
.option("--ram <ram>", "The amount of memory in MB that can be used by CodeQL for database finalization and query execution. " +
'By default, this command will use the same amount of memory as previously set in the "init" command. ' +
'If the "init" command also does not have an explicit "ram" flag, this command will use most of the ' +
"memory available in the system.")
.option("--ram <ram>", "Amount of memory to use when running queries. Default is to use all available memory.")
.option("--no-add-snippets", "Specify whether to include code snippets in the sarif output.")
.option("--threads <threads>", "The number of threads that can be used by CodeQL for database finalization and query execution. " +
'By default, this command will use the same number of threads as previously set in the "init" command. ' +
'If the "init" command also does not have an explicit "threads" flag, this command will use all the ' +
"hardware threads available in the system.")
.option("--threads <threads>", "Number of threads to use when running queries. " +
"Default is to use all available cores.")
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
.option("--category <category>", "String used by Code Scanning for matching the analyses.")
.option("--debug", "Print more verbose output", false)
@@ -288,15 +262,8 @@ program
url: (0, util_1.parseGitHubUrl)(cmd.githubUrl),
};
const outputDir = cmd.outputDir || path.join(config.tempDir, "codeql-sarif");
let initEnv = {};
try {
initEnv = loadTracerEnvironment(config);
}
catch (err) {
// The init command did not generate a tracer environment file
}
const threads = (0, util_1.getThreadsFlag)(cmd.threads || initEnv["CODEQL_THREADS"], logger);
const memory = (0, util_1.getMemoryFlag)(cmd.ram || initEnv["CODEQL_RAM"]);
const threads = (0, util_1.getThreadsFlag)(cmd.threads, logger);
const memory = (0, util_1.getMemoryFlag)(cmd.ram);
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
await (0, analyze_1.runQueries)(outputDir, memory, (0, util_1.getAddSnippetsFlag)(cmd.addSnippets), threads, cmd.category, config, logger);
if (!cmd.upload) {

File diff suppressed because one or more lines are too long

49
lib/testing-utils.js generated
View File

@@ -19,12 +19,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.mockFeatureFlagApiEndpoint = exports.getRecordingLogger = exports.setupActionsVars = exports.setupTests = void 0;
const github = __importStar(require("@actions/github"));
exports.setupActionsVars = exports.setupTests = void 0;
const sinon = __importStar(require("sinon"));
const apiClient = __importStar(require("./api-client"));
const CodeQL = __importStar(require("./codeql"));
const util_1 = require("./util");
function wrapOutput(context) {
// Function signature taken from Socket.write.
// Note there are two overloads:
@@ -92,48 +89,4 @@ function setupActionsVars(tempDir, toolsDir) {
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
}
exports.setupActionsVars = setupActionsVars;
function getRecordingLogger(messages) {
return {
debug: (message) => {
messages.push({ type: "debug", message });
console.debug(message);
},
info: (message) => {
messages.push({ type: "info", message });
console.info(message);
},
warning: (message) => {
messages.push({ type: "warning", message });
console.warn(message);
},
error: (message) => {
messages.push({ type: "error", message });
console.error(message);
},
isDebug: () => true,
startGroup: () => undefined,
endGroup: () => undefined,
};
}
exports.getRecordingLogger = getRecordingLogger;
/** Mock the HTTP request to the feature flags enablement API endpoint. */
function mockFeatureFlagApiEndpoint(responseStatusCode, response) {
// Passing an auth token is required, so we just use a dummy value
const client = github.getOctokit("123");
const requestSpy = sinon.stub(client, "request");
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql-action/features");
if (responseStatusCode < 300) {
optInSpy.resolves({
status: responseStatusCode,
data: response,
headers: {},
url: "GET /repos/:owner/:repo/code-scanning/codeql-action/features",
});
}
else {
optInSpy.throws(new util_1.HTTPError("some error message", responseStatusCode));
}
sinon.stub(apiClient, "getApiClient").value(() => client);
}
exports.mockFeatureFlagApiEndpoint = mockFeatureFlagApiEndpoint;
//# sourceMappingURL=testing-utils.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAiB;IAC1C,MAAM,SAAS,GAAG,IAA2B,CAAC;IAE9C,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,6CAA+B;AAE/B,iDAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC"}

12
lib/toolcache.js generated
View File

@@ -18,9 +18,6 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.downloadTool = exports.findAllVersions = exports.find = exports.cacheDir = exports.extractTar = void 0;
const fs = __importStar(require("fs"));
@@ -30,7 +27,6 @@ const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const io = __importStar(require("@actions/io"));
const actionsToolcache = __importStar(require("@actions/tool-cache"));
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
const del_1 = __importDefault(require("del"));
const semver = __importStar(require("semver"));
const uuid_1 = require("uuid");
const util_1 = require("./util");
@@ -127,7 +123,7 @@ async function cacheDir(sourceDir, tool, version, toolCacheDir, logger) {
throw new Error("sourceDir is not a directory");
}
// Create the tool dir
const destPath = await createToolPath(tool, version, arch, toolCacheDir, logger);
const destPath = createToolPath(tool, version, arch, toolCacheDir, logger);
// copy each child item. do not move. move can fail on Windows
// due to anti-virus software having an open handle on a file.
for (const itemName of fs.readdirSync(sourceDir)) {
@@ -236,12 +232,12 @@ function createExtractFolder(tempDir) {
}
return dest;
}
async function createToolPath(tool, version, arch, toolCacheDir, logger) {
function createToolPath(tool, version, arch, toolCacheDir, logger) {
const folderPath = path.join(toolCacheDir, tool, semver.clean(version) || version, arch || "");
logger.debug(`destination ${folderPath}`);
const markerPath = `${folderPath}.complete`;
await (0, del_1.default)(folderPath, { force: true });
await (0, del_1.default)(markerPath, { force: true });
fs.rmSync(folderPath, { recursive: true, force: true });
fs.rmSync(markerPath, { recursive: true, force: true });
fs.mkdirSync(folderPath, { recursive: true });
return folderPath;
}

File diff suppressed because one or more lines are too long

View File

@@ -44,6 +44,10 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
if (((_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout) !== undefined) {
options.listeners.stdout(data);
}
else {
// if no stdout listener was originally defined then we match default behavior of Toolrunner
process.stdout.write(data);
}
},
stderr: (data) => {
var _a;
@@ -51,6 +55,10 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
if (((_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stderr) !== undefined) {
options.listeners.stderr(data);
}
else {
// if no stderr listener was originally defined then we match default behavior of Toolrunner
process.stderr.write(data);
}
},
};
// we capture the original return code or error so that if no match is found we can duplicate the behavior

View File

@@ -1 +1 @@
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;KAC7D;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW;iBAChC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;iBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAnED,wDAmEC"}
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;KAC7D;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW;iBAChC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;iBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAzED,wDAyEC"}

44
lib/tracer-config.js generated
View File

@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getCombinedTracerConfig = exports.concatTracerConfigs = exports.getTracerConfigForLanguage = exports.getTracerConfigForCluster = exports.endTracingForCluster = void 0;
exports.getCombinedTracerConfig = exports.concatTracerConfigs = exports.getTracerConfigForLanguage = exports.getTracerConfigForCluster = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const codeql_1 = require("./codeql");
@@ -33,30 +33,6 @@ const CRITICAL_TRACER_VARS = new Set([
"SEMMLE_DEPTRACE_SOCKET",
"SEMMLE_JAVA_TOOL_OPTIONS",
]);
async function endTracingForCluster(config) {
// If there are no traced languages, we don't need to do anything.
if (!config.languages.some(languages_1.isTracedLanguage))
return;
const envVariablesFile = path.resolve(config.dbLocation, "temp/tracingEnvironment/end-tracing.json");
if (!fs.existsSync(envVariablesFile)) {
throw new Error(`Environment file for ending tracing not found: ${envVariablesFile}`);
}
try {
const endTracingEnvVariables = JSON.parse(fs.readFileSync(envVariablesFile, "utf8"));
for (const [key, value] of Object.entries(endTracingEnvVariables)) {
if (value !== null) {
process.env[key] = value;
}
else {
delete process.env[key];
}
}
}
catch (e) {
throw new Error(`Failed to parse file containing end tracing environment variables: ${e}`);
}
}
exports.endTracingForCluster = endTracingForCluster;
async function getTracerConfigForCluster(config) {
const tracingEnvVariables = JSON.parse(fs.readFileSync(path.resolve(config.dbLocation, "temp/tracingEnvironment/start-tracing.json"), "utf8"));
return {
@@ -206,15 +182,15 @@ async function getCombinedTracerConfig(config, codeql) {
tracedLanguageConfigs[language] = await getTracerConfigForLanguage(codeql, config, language);
}
mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
// Add a couple more variables
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
const codeQLDir = path.dirname(codeql.getPath());
if (process.platform === "darwin") {
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
}
else if (process.platform !== "win32") {
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
}
}
// Add a couple more variables
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
const codeQLDir = path.dirname(codeql.getPath());
if (process.platform === "darwin") {
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
}
else if (process.platform !== "win32") {
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
}
// On macos it's necessary to prefix the build command with the runner executable
// on order to trace when System Integrity Protection is enabled.

File diff suppressed because one or more lines are too long

View File

@@ -44,9 +44,6 @@ function getTestConfig(tmpDir) {
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
dbLocation: path.resolve(tmpDir, "codeql_databases"),
packs: {},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
};
}
// A very minimal setup
@@ -162,10 +159,7 @@ function getTestConfig(tmpDir) {
javascript: { spec, env: { a: "a", b: "b" } },
python: { spec, env: { b: "c" } },
}, config));
// If e is undefined, then the previous assertion will fail.
if (e !== undefined) {
t.deepEqual(e.message, "Incompatible values in environment parameter b: b and c");
}
t.deepEqual(e.message, "Incompatible values in environment parameter b: b and c");
});
});
(0, ava_1.default)("concatTracerConfigs - cpp spec lines come last if present", async (t) => {

File diff suppressed because one or more lines are too long

179
lib/upload-lib.js generated
View File

@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.validateUniqueCategory = exports.waitForProcessing = exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const zlib_1 = __importDefault(require("zlib"));
@@ -54,24 +54,25 @@ function combineSarifFiles(sarifFiles) {
}
combinedSarif.runs.push(...sarifObject.runs);
}
return combinedSarif;
return JSON.stringify(combinedSarif);
}
exports.combineSarifFiles = combineSarifFiles;
// Populates the run.automationDetails.id field using the analysis_key and environment
// and return an updated sarif file contents.
function populateRunAutomationDetails(sarif, category, analysis_key, environment) {
const automationID = getAutomationID(category, analysis_key, environment);
if (automationID !== undefined) {
for (const run of sarif.runs || []) {
if (run.automationDetails === undefined) {
run.automationDetails = {
id: automationID,
};
}
}
return sarif;
function populateRunAutomationDetails(sarifContents, category, analysis_key, environment) {
if (analysis_key === undefined) {
return sarifContents;
}
return sarif;
const automationID = getAutomationID(category, analysis_key, environment);
const sarif = JSON.parse(sarifContents);
for (const run of sarif.runs || []) {
if (run.automationDetails === undefined) {
run.automationDetails = {
id: automationID,
};
}
}
return JSON.stringify(sarif);
}
exports.populateRunAutomationDetails = populateRunAutomationDetails;
function getAutomationID(category, analysis_key, environment) {
@@ -82,11 +83,7 @@ function getAutomationID(category, analysis_key, environment) {
}
return automationID;
}
// analysis_key is undefined for the runner.
if (analysis_key !== undefined) {
return actionsUtil.computeAutomationID(analysis_key, environment);
}
return undefined;
return actionsUtil.computeAutomationID(analysis_key, environment);
}
// Upload the given payload.
// If the request fails then this will retry a small number of times.
@@ -95,7 +92,6 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, logger) {
// If in test mode we don't want to upload the results
const testMode = process.env["TEST_MODE"] === "true" || false;
if (testMode) {
logger.debug("In test mode. Results are not uploaded.");
return;
}
const client = api.getApiClient(apiDetails);
@@ -109,7 +105,6 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, logger) {
});
logger.debug(`response status: ${response.status}`);
logger.info("Successfully uploaded results");
return response.data.id;
}
// Recursively walks a directory and returns all SARIF files it finds.
// Does not follow symlinks.
@@ -204,7 +199,7 @@ function validateSarifFileSchema(sarifFilePath, logger) {
exports.validateSarifFileSchema = validateSarifFileSchema;
// buildPayload constructs a map ready to be uploaded to the API from the given
// parameters, respecting the current mode and target GitHub instance version.
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion, mergeBaseCommitOid) {
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion) {
if (util.isActions()) {
const payloadObj = {
commit_oid: commitOid,
@@ -223,23 +218,11 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
// This behaviour can be made the default when support for GHES 3.0 is discontinued.
if (gitHubVersion.type !== util.GitHubVariant.GHES ||
semver.satisfies(gitHubVersion.version, `>=3.1`)) {
if (process.env.GITHUB_EVENT_NAME === "pull_request") {
if (commitOid === util.getRequiredEnvParam("GITHUB_SHA") &&
mergeBaseCommitOid) {
// We're uploading results for the merge commit
// and were able to determine the merge base.
// So we use that as the most accurate base.
payloadObj.base_ref = `refs/heads/${util.getRequiredEnvParam("GITHUB_BASE_REF")}`;
payloadObj.base_sha = mergeBaseCommitOid;
}
else if (process.env.GITHUB_EVENT_PATH) {
// Either we're not uploading results for the merge commit
// or we could not determine the merge base.
// Using the PR base is the only option here
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
}
if (process.env.GITHUB_EVENT_NAME === "pull_request" &&
process.env.GITHUB_EVENT_PATH) {
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
}
}
return payloadObj;
@@ -260,19 +243,25 @@ exports.buildPayload = buildPayload;
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, sourceRoot, environment, gitHubVersion, apiDetails, logger) {
logger.startGroup("Uploading results");
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
if (util.isActions()) {
// This check only works on actions as env vars don't persist between calls to the runner
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
if (process.env[sentinelEnvVar]) {
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
}
// Validate that the files we were asked to upload are all valid SARIF files
for (const file of sarifFiles) {
validateSarifFileSchema(file, logger);
}
let sarif = combineSarifFiles(sarifFiles);
sarif = await fingerprints.addFingerprints(sarif, sourceRoot, logger);
sarif = populateRunAutomationDetails(sarif, category, analysisKey, environment);
const toolNames = util.getToolNames(sarif);
validateUniqueCategory(sarif);
const sarifPayload = JSON.stringify(sarif);
let sarifPayload = combineSarifFiles(sarifFiles);
sarifPayload = await fingerprints.addFingerprints(sarifPayload, sourceRoot, logger);
sarifPayload = populateRunAutomationDetails(sarifPayload, category, analysisKey, environment);
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = (0, file_url_1.default)(sourceRoot);
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion, await actionsUtil.determineMergeBaseCommitOid());
const toolNames = util.getToolNames(sarifPayload);
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion);
// Log some useful debug info about the info
const rawUploadSizeBytes = sarifPayload.length;
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
@@ -281,102 +270,12 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
const numResultInSarif = countResultsInSarif(sarifPayload);
logger.debug(`Number of results in upload: ${numResultInSarif}`);
// Make the upload
const sarifID = await uploadPayload(payload, repositoryNwo, apiDetails, logger);
await uploadPayload(payload, repositoryNwo, apiDetails, logger);
logger.endGroup();
return {
statusReport: {
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,
num_results_in_sarif: numResultInSarif,
},
sarifID,
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,
num_results_in_sarif: numResultInSarif,
};
}
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
// Waits until either the analysis is successfully processed, a processing error is reported, or STATUS_CHECK_TIMEOUT_MILLISECONDS elapses.
async function waitForProcessing(repositoryNwo, sarifID, apiDetails, logger) {
logger.startGroup("Waiting for processing to finish");
const client = api.getApiClient(apiDetails);
const statusCheckingStarted = Date.now();
// eslint-disable-next-line no-constant-condition
while (true) {
if (Date.now() >
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
break;
}
try {
const response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
sarif_id: sarifID,
});
const status = response.data.processing_status;
logger.info(`Analysis upload status is ${status}.`);
if (status === "complete") {
break;
}
else if (status === "failed") {
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
}
}
catch (e) {
if (util.isHTTPError(e)) {
switch (e.status) {
case 404:
logger.debug("Analysis is not found yet...");
break; // Note this breaks from the case statement, not the outer loop.
default:
throw e;
}
}
else {
throw e;
}
}
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
}
logger.endGroup();
}
exports.waitForProcessing = waitForProcessing;
function validateUniqueCategory(sarif) {
var _a, _b, _c;
// This check only works on actions as env vars don't persist between calls to the runner
if (util.isActions()) {
// duplicate categories are allowed in the same sarif file
// but not across multiple sarif files
const categories = {};
for (const run of sarif.runs) {
const id = (_a = run === null || run === void 0 ? void 0 : run.automationDetails) === null || _a === void 0 ? void 0 : _a.id;
const tool = (_c = (_b = run.tool) === null || _b === void 0 ? void 0 : _b.driver) === null || _c === void 0 ? void 0 : _c.name;
const category = `${sanitize(id)}_${sanitize(tool)}`;
categories[category] = { id, tool };
}
for (const [category, { id, tool }] of Object.entries(categories)) {
const sentinelEnvVar = `CODEQL_UPLOAD_SARIF_${category}`;
if (process.env[sentinelEnvVar]) {
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. " +
"The easiest fix is to specify a unique value for the `category` input. " +
`Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})`);
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
}
}
}
exports.validateUniqueCategory = validateUniqueCategory;
/**
* Santizes a string to be used as an environment variable name.
* This will replace all non-alphanumeric characters with underscores.
* There could still be some false category clashes if two uploads
* occur that differ only in their non-alphanumeric characters. This is
* unlikely.
*
* @param str the initial value to sanitize
*/
function sanitize(str) {
return (str !== null && str !== void 0 ? str : "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
}
//# sourceMappingURL=upload-lib.js.map

File diff suppressed because one or more lines are too long

117
lib/upload-lib.test.js generated
View File

@@ -53,29 +53,20 @@ ava_1.default.beforeEach(() => {
const allVersions = newVersions.concat(oldVersions);
process.env["GITHUB_EVENT_NAME"] = "push";
for (const version of allVersions) {
const payload = uploadLib.buildPayload("commit", "refs/heads/master", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
const payload = uploadLib.buildPayload("commit", "refs/heads/master", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version);
// Not triggered by a pull request
t.falsy(payload.base_ref);
t.falsy(payload.base_sha);
}
process.env["GITHUB_EVENT_NAME"] = "pull_request";
process.env["GITHUB_SHA"] = "commit";
process.env["GITHUB_BASE_REF"] = "master";
process.env["GITHUB_EVENT_PATH"] = `${__dirname}/../src/testdata/pull_request.json`;
for (const version of newVersions) {
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
// Uploads for a merge commit use the merge base
t.deepEqual(payload.base_ref, "refs/heads/master");
t.deepEqual(payload.base_sha, "mergeBaseCommit");
}
for (const version of newVersions) {
const payload = uploadLib.buildPayload("headCommit", "refs/pull/123/head", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
// Uploads for the head use the PR base
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version);
t.deepEqual(payload.base_ref, "refs/heads/master");
t.deepEqual(payload.base_sha, "f95f852bd8fca8fcc58a9a2d6c842781e32a215e");
}
for (const version of oldVersions) {
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version);
// These older versions won't expect these values
t.falsy(payload.base_ref);
t.falsy(payload.base_sha);
@@ -107,13 +98,9 @@ ava_1.default.beforeEach(() => {
});
});
(0, ava_1.default)("populateRunAutomationDetails", (t) => {
let sarif = {
runs: [{}],
};
let sarif = '{"runs": [{}]}';
const analysisKey = ".github/workflows/codeql-analysis.yml:analyze";
let expectedSarif = {
runs: [{ automationDetails: { id: "language:javascript/os:linux/" } }],
};
let expectedSarif = '{"runs":[{"automationDetails":{"id":"language:javascript/os:linux/"}}]}';
// Category has priority over analysis_key/environment
let modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, "language:javascript/os:linux", analysisKey, '{"language": "other", "os": "other"}');
t.deepEqual(modifiedSarif, expectedSarif);
@@ -121,99 +108,9 @@ ava_1.default.beforeEach(() => {
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, "language:javascript/os:linux/", analysisKey, "");
t.deepEqual(modifiedSarif, expectedSarif);
// check that the automation details doesn't get overwritten
sarif = { runs: [{ automationDetails: { id: "my_id" } }] };
expectedSarif = { runs: [{ automationDetails: { id: "my_id" } }] };
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, undefined, analysisKey, '{"os": "linux", "language": "javascript"}');
t.deepEqual(modifiedSarif, expectedSarif);
// check multiple runs
sarif = { runs: [{ automationDetails: { id: "my_id" } }, {}] };
expectedSarif = {
runs: [
{ automationDetails: { id: "my_id" } },
{
automationDetails: {
id: ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/",
},
},
],
};
sarif = '{"runs":[{"automationDetails":{"id":"my_id"}}]}';
expectedSarif = '{"runs":[{"automationDetails":{"id":"my_id"}}]}';
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, undefined, analysisKey, '{"os": "linux", "language": "javascript"}');
t.deepEqual(modifiedSarif, expectedSarif);
});
(0, ava_1.default)("validateUniqueCategory when empty", (t) => {
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif()));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif()));
});
(0, ava_1.default)("validateUniqueCategory for automation details id", (t) => {
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("AbC")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("def")));
// Our category sanitization is not perfect. Here are some examples
// of where we see false clashes
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc/def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc@def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc_def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc def")));
// this one is fine
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc_ def")));
});
(0, ava_1.default)("validateUniqueCategory for tool name", (t) => {
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "AbC")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "def")));
// Our category sanitization is not perfect. Here are some examples
// of where we see false clashes
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc/def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc@def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc_def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc def")));
// this one is fine
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc_ def")));
});
(0, ava_1.default)("validateUniqueCategory for automation details id and tool name", (t) => {
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "abc")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc_", "def")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc_", "def")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("ghi", "_jkl")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("ghi", "_jkl")));
// Our category sanitization is not perfect. Here are some examples
// of where we see false clashes
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "_")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "def__")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc_def")));
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("mno_", "pqr")));
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("mno", "_pqr")));
});
(0, ava_1.default)("validateUniqueCategory for multiple runs", (t) => {
const sarif1 = createMockSarif("abc", "def");
const sarif2 = createMockSarif("ghi", "jkl");
// duplicate categories are allowed within the same sarif file
const multiSarif = { runs: [sarif1.runs[0], sarif1.runs[0], sarif2.runs[0]] };
t.notThrows(() => uploadLib.validateUniqueCategory(multiSarif));
// should throw if there are duplicate categories in separate validations
t.throws(() => uploadLib.validateUniqueCategory(sarif1));
t.throws(() => uploadLib.validateUniqueCategory(sarif2));
});
function createMockSarif(id, tool) {
return {
runs: [
{
automationDetails: {
id,
},
tool: {
driver: {
name: tool,
},
},
},
],
};
}
//# sourceMappingURL=upload-lib.test.js.map

File diff suppressed because one or more lines are too long

View File

@@ -22,7 +22,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const util_1 = require("./util");
// eslint-disable-next-line import/no-commonjs
@@ -47,19 +46,15 @@ async function run() {
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
};
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
core.setOutput("sarif-id", uploadResult.sarifID);
if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
}
await sendSuccessStatusReport(startedAt, uploadResult.statusReport);
const uploadStats = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
await sendSuccessStatusReport(startedAt, uploadStats);
}
catch (error) {
const message = error instanceof Error ? error.message : String(error);
const stack = error instanceof Error ? error.stack : String(error);
core.setFailed(message);
console.log(error);
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", actionsUtil.getActionsStatus(error), startedAt, message, stack));
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", "failure", startedAt, message, stack));
return;
}
}

View File

@@ -1 +1 @@
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QACjD,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YAClE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,EACnC,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

156
lib/util.js generated
View File

@@ -18,37 +18,20 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getMlPoweredJsQueriesStatus = exports.ML_POWERED_JS_QUERIES_PACK = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.GITHUB_DOTCOM_URL = void 0;
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path"));
const core = __importStar(require("@actions/core"));
const del_1 = __importDefault(require("del"));
const semver = __importStar(require("semver"));
const api_client_1 = require("./api-client");
const apiCompatibility = __importStar(require("./api-compatibility.json"));
const codeql_1 = require("./codeql");
/**
* Specifies bundle versions that are known to be broken
* and will not be used if found in the toolcache.
*/
const BROKEN_VERSIONS = ["0.0.0-20211207"];
/**
* The URL for github.com.
*/
exports.GITHUB_DOTCOM_URL = "https://github.com";
/**
* Default name of the debugging artifact.
*/
exports.DEFAULT_DEBUG_ARTIFACT_NAME = "debug-artifacts";
/**
* Default name of the database in the debugging artifact.
*/
exports.DEFAULT_DEBUG_DATABASE_NAME = "db";
/**
* Get the extra options for the codeql commands.
*/
@@ -72,7 +55,8 @@ exports.getExtraOptionsEnvParam = getExtraOptionsEnvParam;
*
* Returns an array of unique string tool names.
*/
function getToolNames(sarif) {
function getToolNames(sarifContents) {
const sarif = JSON.parse(sarifContents);
const toolNames = {};
for (const run of sarif.runs || []) {
const tool = run.tool || {};
@@ -93,7 +77,7 @@ async function withTmpDir(body) {
const symlinkSubdir = path.join(tmpDir, "symlink");
fs.symlinkSync(realSubdir, symlinkSubdir, "dir");
const result = await body(symlinkSubdir);
await (0, del_1.default)(tmpDir, { force: true });
fs.rmSync(tmpDir, { recursive: true, force: true });
return result;
}
exports.withTmpDir = withTmpDir;
@@ -109,13 +93,13 @@ function getSystemReservedMemoryMegaBytes() {
return 1024 * (process.platform === "win32" ? 1.5 : 1);
}
/**
* Get the value of the codeql `--ram` flag as configured by the `ram` input.
* If no value was specified, the total available memory will be used minus a
* threshold reserved for the OS.
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
* specified, the total available memory will be used minus a threshold
* reserved for the OS.
*
* @returns {number} the amount of RAM to use, in megabytes
* @returns string
*/
function getMemoryFlagValue(userInput) {
function getMemoryFlag(userInput) {
let memoryToUseMegaBytes;
if (userInput) {
memoryToUseMegaBytes = Number(userInput);
@@ -129,18 +113,7 @@ function getMemoryFlagValue(userInput) {
const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes();
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
}
return Math.floor(memoryToUseMegaBytes);
}
exports.getMemoryFlagValue = getMemoryFlagValue;
/**
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
* specified, the total available memory will be used minus a threshold
* reserved for the OS.
*
* @returns string
*/
function getMemoryFlag(userInput) {
return `--ram=${getMemoryFlagValue(userInput)}`;
return `--ram=${Math.floor(memoryToUseMegaBytes)}`;
}
exports.getMemoryFlag = getMemoryFlag;
/**
@@ -157,14 +130,14 @@ function getAddSnippetsFlag(userInput) {
}
exports.getAddSnippetsFlag = getAddSnippetsFlag;
/**
* Get the value of the codeql `--threads` flag specified for the `threads`
* input. If no value was specified, all available threads will be used.
* Get the codeql `--threads` value specified for the `threads` input.
* If no value was specified, all available threads will be used.
*
* The value will be capped to the number of available CPUs.
*
* @returns {number}
* @returns string
*/
function getThreadsFlagValue(userInput, logger) {
function getThreadsFlag(userInput, logger) {
let numThreads;
const maxThreads = os.cpus().length;
if (userInput) {
@@ -186,19 +159,7 @@ function getThreadsFlagValue(userInput, logger) {
// Default to using all threads
numThreads = maxThreads;
}
return numThreads;
}
exports.getThreadsFlagValue = getThreadsFlagValue;
/**
* Get the codeql `--threads` flag specified for the `threads` input.
* If no value was specified, all available threads will be used.
*
* The value will be capped to the number of available CPUs.
*
* @returns string
*/
function getThreadsFlag(userInput, logger) {
return `--threads=${getThreadsFlagValue(userInput, logger)}`;
return `--threads=${numThreads}`;
}
exports.getThreadsFlag = getThreadsFlag;
/**
@@ -486,16 +447,6 @@ class HTTPError extends Error {
}
}
exports.HTTPError = HTTPError;
/**
* An Error class that indicates an error that occurred due to
* a misconfiguration of the action or the CodeQL CLI.
*/
class UserError extends Error {
constructor(message) {
super(message);
}
}
exports.UserError = UserError;
function isHTTPError(arg) {
return (arg === null || arg === void 0 ? void 0 : arg.status) !== undefined && Number.isInteger(arg.status);
}
@@ -504,81 +455,4 @@ async function codeQlVersionAbove(codeql, requiredVersion) {
return semver.gte(await codeql.getVersion(), requiredVersion);
}
exports.codeQlVersionAbove = codeQlVersionAbove;
// Create a bundle for the given DB, if it doesn't already exist
async function bundleDb(config, language, codeql, dbName) {
const databasePath = getCodeQLDatabasePath(config, language);
const databaseBundlePath = path.resolve(config.dbLocation, `${dbName}.zip`);
// For a tiny bit of added safety, delete the file if it exists.
// The file is probably from an earlier call to this function, either
// as part of this action step or a previous one, but it could also be
// from somewhere else or someone trying to make the action upload a
// non-database file.
if (fs.existsSync(databaseBundlePath)) {
await (0, del_1.default)(databaseBundlePath, { force: true });
}
await codeql.databaseBundle(databasePath, databaseBundlePath, dbName);
return databaseBundlePath;
}
exports.bundleDb = bundleDb;
async function delay(milliseconds) {
return new Promise((resolve) => setTimeout(resolve, milliseconds));
}
exports.delay = delay;
function isGoodVersion(versionSpec) {
return !BROKEN_VERSIONS.includes(versionSpec);
}
exports.isGoodVersion = isGoodVersion;
/**
* The ML-powered JS query pack to add to the analysis if a repo is opted into the ML-powered
* queries beta.
*/
exports.ML_POWERED_JS_QUERIES_PACK = {
packName: "codeql/javascript-experimental-atm-queries",
version: "~0.0.2",
};
/**
* Get information about ML-powered JS queries to populate status reports with.
*
* This will be:
*
* - The version string if the analysis is using the ML-powered query pack that will be added to the
* analysis if the repo is opted into the ML-powered queries beta, i.e.
* {@link ML_POWERED_JS_QUERIES_PACK.version}. If the version string
* {@link ML_POWERED_JS_QUERIES_PACK.version} is undefined, then the status report string will be
* "latest", however this shouldn't occur in practice (see comment below).
* - "false" if the analysis won't run any ML-powered JS queries.
* - "other" in all other cases.
*
* Our goal of the status report here is to allow us to compare the occurrence of timeouts and other
* errors with ML-powered queries turned on and off. We also want to be able to compare minor
* version bumps caused by us bumping the version range of `ML_POWERED_JS_QUERIES_PACK` in a new
* version of the CodeQL Action. For instance, we might want to compare the `~0.1.0` and `~0.0.2`
* version strings.
*
* We restrict the set of strings we report here by excluding other version strings and combinations
* of version strings. We do this to limit the cardinality of the ML-powered JS queries status
* report field, since some platforms that ingest this status report bill based on the cardinality
* of its fields.
*
* This function lives here rather than in `init-action.ts` so it's easier to test, since tests for
* `init-action.ts` would each need to live in their own file. See `analyze-action-env.ts` for an
* explanation as to why this is.
*/
function getMlPoweredJsQueriesStatus(config) {
const mlPoweredJsQueryPacks = (config.packs.javascript || []).filter((pack) => pack.packName === exports.ML_POWERED_JS_QUERIES_PACK.packName);
if (mlPoweredJsQueryPacks.length === 0) {
return "false";
}
const firstVersionString = mlPoweredJsQueryPacks[0].version;
if (mlPoweredJsQueryPacks.length === 1 &&
exports.ML_POWERED_JS_QUERIES_PACK.version === firstVersionString) {
// We should always specify an explicit version string in `ML_POWERED_JS_QUERIES_PACK`,
// otherwise we won't be able to make changes to the pack unless those changes are compatible
// with each version of the CodeQL Action. Therefore in practice, we should never hit the
// `latest` case here.
return exports.ML_POWERED_JS_QUERIES_PACK.version || "latest";
}
return "other";
}
exports.getMlPoweredJsQueriesStatus = getMlPoweredJsQueriesStatus;
//# sourceMappingURL=util.js.map

File diff suppressed because one or more lines are too long

60
lib/util.test.js generated
View File

@@ -35,7 +35,7 @@ const util = __importStar(require("./util"));
(0, testing_utils_1.setupTests)(ava_1.default);
(0, ava_1.default)("getToolNames", (t) => {
const input = fs.readFileSync(`${__dirname}/../src/testdata/tool-names.sarif`, "utf8");
const toolNames = util.getToolNames(JSON.parse(input));
const toolNames = util.getToolNames(input);
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
});
(0, ava_1.default)("getMemoryFlag() should return the correct --ram flag", (t) => {
@@ -204,62 +204,4 @@ async function mockStdInForAuthExpectError(t, mockLogger, ...text) {
const stdin = stream.Readable.from(text);
await t.throwsAsync(async () => util.getGitHubAuth(mockLogger, undefined, true, stdin));
}
const ML_POWERED_JS_STATUS_TESTS = [
[[], "false"],
[[{ packName: "someOtherPack" }], "false"],
[
[{ packName: "someOtherPack" }, util.ML_POWERED_JS_QUERIES_PACK],
util.ML_POWERED_JS_QUERIES_PACK.version,
],
[[util.ML_POWERED_JS_QUERIES_PACK], util.ML_POWERED_JS_QUERIES_PACK.version],
[[{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName }], "other"],
[
[{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName, version: "~0.0.1" }],
"other",
],
[
[
{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName, version: "0.0.1" },
{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName, version: "0.0.2" },
],
"other",
],
[
[
{ packName: "someOtherPack" },
{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName },
],
"other",
],
];
for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
const packDescriptions = `[${packs
.map((pack) => JSON.stringify(pack))
.join(", ")}]`;
(0, ava_1.default)(`ML-powered JS queries status report is "${expectedStatus}" for packs = ${packDescriptions}`, (t) => {
return util.withTmpDir(async (tmpDir) => {
const config = {
languages: [],
queries: {},
paths: [],
pathsIgnore: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
gitHubVersion: {
type: util.GitHubVariant.DOTCOM,
},
dbLocation: "",
packs: {
javascript: packs,
},
debugMode: false,
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
};
t.is(util.getMlPoweredJsQueriesStatus(config), expectedStatus);
});
});
}
//# sourceMappingURL=util.test.js.map

File diff suppressed because one or more lines are too long

2
node_modules/.bin/ava generated vendored
View File

@@ -1 +1 @@
../ava/entrypoints/cli.mjs
../ava/cli.js

1
node_modules/.bin/import-local-fixture generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../import-local/fixtures/cli.js

1
node_modules/.bin/is-ci generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../is-ci/bin.js

1
node_modules/.bin/rc generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../rc/cli.js

2964
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

View File

@@ -1,21 +1,11 @@
# @ava/typescript
Adds [TypeScript](https://www.typescriptlang.org/) support to [AVA 4](https://avajs.dev).
Adds [TypeScript](https://www.typescriptlang.org/) support to [AVA](https://avajs.dev).
This is designed to work for projects that precompile TypeScript. It allows AVA to load the compiled JavaScript, while configuring AVA to treat the TypeScript files as test files.
In other words, say you have a test file at `src/test.ts`. You've configured TypeScript to output to `build/`. Using `@ava/typescript` you can run the test using `npx ava src/test.ts`.
## For AVA 3 users
Use version 2:
```console
npm install --save-dev @ava/typescript@2
```
Note that v2 does not support ES modules. This requires v3 and AVA 4.
## Enabling TypeScript support
Add this package to your project:
@@ -49,10 +39,6 @@ Output files are expected to have the `.js` extension.
AVA searches your entire project for `*.js`, `*.cjs`, `*.mjs` and `*.ts` files (or other extensions you've configured). It will ignore such files found in the `rewritePaths` targets (e.g. `build/`). If you use more specific paths, for instance `build/main/`, you may need to change AVA's `files` configuration to ignore other directories.
## ES Modules
If your `package.json` has configured `"type": "module"`, or you've configured AVA to treat the `js` extension as `module`, then `@ava/typescript` will import the output file as an ES module. Note that this is based on the *output file*, not the `ts` extension.
## Add additional extensions
You can configure AVA to recognize additional file extensions. To add (partial†) JSX support:

View File

@@ -1,10 +1,9 @@
import fs from 'node:fs';
import path from 'node:path';
import {pathToFileURL} from 'node:url';
import escapeStringRegexp from 'escape-string-regexp';
import execa from 'execa';
'use strict';
const path = require('path');
const escapeStringRegexp = require('escape-string-regexp');
const execa = require('execa');
const pkg = require('./package.json');
const pkg = JSON.parse(fs.readFileSync(new URL('package.json', import.meta.url)));
const help = `See https://github.com/avajs/typescript/blob/v${pkg.version}/README.md`;
function isPlainObject(x) {
@@ -45,7 +44,7 @@ const configProperties = {
required: true,
isValid(compile) {
return compile === false || compile === 'tsc';
},
}
},
rewritePaths: {
required: true,
@@ -54,21 +53,23 @@ const configProperties = {
return false;
}
return Object.entries(rewritePaths).every(([from, to]) => from.endsWith('/') && typeof to === 'string' && to.endsWith('/'));
},
return Object.entries(rewritePaths).every(([from, to]) => {
return from.endsWith('/') && typeof to === 'string' && to.endsWith('/');
});
}
},
extensions: {
required: false,
isValid(extensions) {
return Array.isArray(extensions)
&& extensions.length > 0
&& extensions.every(ext => typeof ext === 'string' && ext !== '')
&& new Set(extensions).size === extensions.length;
},
},
return Array.isArray(extensions) &&
extensions.length > 0 &&
extensions.every(ext => typeof ext === 'string' && ext !== '') &&
new Set(extensions).size === extensions.length;
}
}
};
export default function typescriptProvider({negotiateProtocol}) {
module.exports = ({negotiateProtocol}) => {
const protocol = negotiateProtocol(['ava-3.2'], {version: pkg.version});
if (protocol === null) {
return;
@@ -85,12 +86,12 @@ export default function typescriptProvider({negotiateProtocol}) {
const {
extensions = ['ts'],
rewritePaths: relativeRewritePaths,
compile,
compile
} = config;
const rewritePaths = Object.entries(relativeRewritePaths).map(([from, to]) => [
path.join(protocol.projectDir, from),
path.join(protocol.projectDir, to),
path.join(protocol.projectDir, to)
]);
const testFileExtension = new RegExp(`\\.(${extensions.map(ext => escapeStringRegexp(ext)).join('|')})$`);
@@ -101,13 +102,13 @@ export default function typescriptProvider({negotiateProtocol}) {
}
return {
extensions: [...extensions],
rewritePaths: [...rewritePaths],
extensions: extensions.slice(),
rewritePaths: rewritePaths.slice()
};
},
get extensions() {
return [...extensions];
return extensions.slice();
},
ignoreChange(filePath) {
@@ -138,19 +139,18 @@ export default function typescriptProvider({negotiateProtocol}) {
filePatterns: [
...filePatterns,
'!**/*.d.ts',
...Object.values(relativeRewritePaths).map(to => `!${to}**`),
...Object.values(relativeRewritePaths).map(to => `!${to}**`)
],
ignoredByWatcherPatterns: [
...ignoredByWatcherPatterns,
...Object.values(relativeRewritePaths).map(to => `${to}**/*.js.map`),
],
...Object.values(relativeRewritePaths).map(to => `${to}**/*.js.map`)
]
};
},
}
};
},
worker({extensionsToLoadAsModules, state: {extensions, rewritePaths}}) {
const useImport = extensionsToLoadAsModules.includes('js');
const testFileExtension = new RegExp(`\\.(${extensions.map(ext => escapeStringRegexp(ext)).join('|')})$`);
return {
@@ -159,12 +159,18 @@ export default function typescriptProvider({negotiateProtocol}) {
},
async load(ref, {requireFn}) {
for (const extension of extensionsToLoadAsModules) {
if (ref.endsWith(`.${extension}`)) {
throw new Error('@ava/typescript cannot yet load ESM files');
}
}
const [from, to] = rewritePaths.find(([from]) => ref.startsWith(from));
// TODO: Support JSX preserve mode — https://www.typescriptlang.org/docs/handbook/jsx.html
const rewritten = `${to}${ref.slice(from.length)}`.replace(testFileExtension, '.js');
return useImport ? import(pathToFileURL(rewritten)) : requireFn(rewritten); // eslint-disable-line node/no-unsupported-features/es-syntax
},
return requireFn(rewritten);
}
};
},
}
};
}
};

View File

@@ -5,7 +5,7 @@ You can also use this to escape a string that is inserted into the middle of a r
@example
```
import escapeStringRegexp from 'escape-string-regexp';
import escapeStringRegexp = require('escape-string-regexp');
const escapedString = escapeStringRegexp('How much $ for a 🦄?');
//=> 'How much \\$ for a 🦄\\?'
@@ -13,4 +13,6 @@ const escapedString = escapeStringRegexp('How much $ for a 🦄?');
new RegExp(escapedString);
```
*/
export default function escapeStringRegexp(string: string): string;
declare const escapeStringRegexp: (string: string) => string;
export = escapeStringRegexp;

View File

@@ -1,11 +1,13 @@
export default function escapeStringRegexp(string) {
'use strict';
module.exports = string => {
if (typeof string !== 'string') {
throw new TypeError('Expected a string');
}
// Escape characters with special meaning either inside or outside character sets.
// Use a simple backslash escape when its always valid, and a `\xnn` escape when the simpler form would be disallowed by Unicode patterns stricter grammar.
// Use a simple backslash escape when its always valid, and a \unnnn escape when the simpler form would be disallowed by Unicode patterns stricter grammar.
return string
.replace(/[|\\{}()[\]^$+*?.]/g, '\\$&')
.replace(/-/g, '\\x2d');
}
};

View File

@@ -1,6 +1,6 @@
{
"name": "escape-string-regexp",
"version": "5.0.0",
"version": "4.0.0",
"description": "Escape RegExp special characters",
"license": "MIT",
"repository": "sindresorhus/escape-string-regexp",
@@ -10,10 +10,8 @@
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": "./index.js",
"engines": {
"node": ">=12"
"node": ">=10"
},
"scripts": {
"test": "xo && ava && tsd"
@@ -33,8 +31,8 @@
"characters"
],
"devDependencies": {
"ava": "^3.15.0",
"tsd": "^0.14.0",
"xo": "^0.38.2"
"ava": "^1.4.1",
"tsd": "^0.11.0",
"xo": "^0.28.3"
}
}

Some files were not shown because too many files have changed in this diff Show More