Compare commits

..

1 Commits

Author SHA1 Message Date
David Verdeguer
1e83c72db7 pre-hook simple try 2020-07-31 14:30:35 +02:00
5655 changed files with 64320 additions and 506817 deletions

View File

@@ -1,5 +0,0 @@
**/webpack.config.js
lib/**
runner/dist/**
src/testdata/**
tests/**

View File

@@ -1,59 +0,0 @@
{
"parser": "@typescript-eslint/parser",
"parserOptions": {
"project": "./tsconfig.json"
},
"plugins": ["@typescript-eslint", "filenames", "github", "import", "no-async-foreach"],
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/recommended",
"plugin:@typescript-eslint/recommended-requiring-type-checking",
"plugin:github/recommended",
"plugin:github/typescript"
],
"rules": {
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],
"import/extensions": "error",
"import/no-amd": "error",
"import/no-commonjs": "error",
"import/no-dynamic-require": "error",
"import/no-extraneous-dependencies": ["error", {"devDependencies": false}],
"import/no-namespace": "off",
"import/no-unresolved": "error",
"import/no-webpack-loader-syntax": "error",
"no-async-foreach/no-async-foreach": "error",
"no-console": "off",
"no-sequences": "error",
"one-var": ["error", "never"],
"sort-imports": ["error", { "allowSeparatedGroups": true }]
},
"overrides": [{
// "temporarily downgraded during transition to eslint
"files": "**",
"rules": {
"@typescript-eslint/ban-types": "off",
"@typescript-eslint/explicit-module-boundary-types": "off",
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/no-unsafe-assignment": "off",
"@typescript-eslint/no-unsafe-call": "off",
"@typescript-eslint/no-unsafe-member-access": "off",
"@typescript-eslint/no-unsafe-return": "off",
"@typescript-eslint/no-unused-vars": "off",
"@typescript-eslint/no-var-requires": "off",
"@typescript-eslint/prefer-regexp-exec": "off",
"@typescript-eslint/require-await": "off",
"@typescript-eslint/restrict-template-expressions": "off",
"eslint-comments/no-use": "off",
"func-style": "off",
"github/array-foreach": "off",
"github/no-then": "off",
"import/no-extraneous-dependencies": "off",
"no-shadow": "off",
"no-sparse-arrays": "off",
"no-throw-literal": "off",
"no-useless-escape": "off",
"sort-imports": "off"
}
}]
}

View File

@@ -1,9 +1,6 @@
name: "CodeQL action" name: "CodeQL action"
on: on: [push, pull_request]
push:
branches: [main, v1]
pull_request:
jobs: jobs:
build: build:
@@ -24,8 +21,7 @@ jobs:
- run: git checkout HEAD^2 - run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }} if: ${{ github.event_name == 'pull_request' }}
- uses: ./init - uses: github/codeql-action/full@pre-hook
with: with:
languages: javascript languages: javascript
config-file: ./.github/codeql/codeql-config.yml config-file: ./.github/codeql/codeql-config.yml
- uses: ./analyze

View File

@@ -1,9 +1,6 @@
name: "Integration Testing" name: "Integration Testing"
on: on: [push, pull_request]
push:
branches: [main, v1]
pull_request:
jobs: jobs:
multi-language-repo_test-autodetect-languages: multi-language-repo_test-autodetect-languages:
@@ -25,7 +22,7 @@ jobs:
env: env:
TEST_MODE: true TEST_MODE: true
- run: | - run: |
cd "$RUNNER_TEMP/codeql_databases" cd "$CODEQL_ACTION_DATABASE_DIR"
# List all directories as there will be precisely one directory per database # List all directories as there will be precisely one directory per database
# but there may be other files in this directory such as query suites. # but there may be other files in this directory such as query suites.
if [ "$(ls -d */ | wc -l)" != 6 ] || \ if [ "$(ls -d */ | wc -l)" != 6 ] || \
@@ -127,48 +124,6 @@ jobs:
env: env:
TEST_MODE: true TEST_MODE: true
single-language-bundles:
# These are 21 jobs, run them only if the earlier multi-language job suceeded
# needs: multi-language-repo_test-autodetect-languages
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
language: ["none", "cpp", "csharp", "go", "java", "javascript", "python"]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
# The next 2 steps are mutually exclusive.
# In one case, we setup codeql for a single language.
# In the other, we setup codeql for a platform
- name: Test language-specific bundle
uses: ./../action/init
if: matrix.language != 'none'
with:
languages: ${{ matrix.language }}
- name: Test platform-secific bundle
uses: ./../action/init
if: matrix.language == 'none'
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
- name: Check ToolCache (language-specific bundle)
if: matrix.language != 'none'
shell: bash
run: test -n "$(find -maxdepth 1 -name "codeql-bundle-0.0.0-*.${OS}-${{matrix.language}}" -print -quit)"
test-proxy: test-proxy:
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: container:
@@ -195,297 +150,3 @@ jobs:
- uses: ./../action/analyze - uses: ./../action/analyze
env: env:
TEST_MODE: true TEST_MODE: true
runner-analyze-javascript-ubuntu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
# Pass --config-file here, but not for other jobs in this workflow.
# This means we're testing the config file parsing in the runner
# but not slowing down all jobs unnecessarily as it doesn't add much
# testing the parsing on different operating systems and languages.
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-ubuntu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
. ./codeql-runner/codeql-env.sh
dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
. ./codeql-runner/codeql-env.sh
dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-ubuntu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
../action/runner/dist/codeql-runner-linux autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
../action/runner/dist/codeql-runner-win.exe autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
../action/runner/dist/codeql-runner-macos autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-upload-sarif:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Upload with runner
run: |
# Deliberately don't use TEST_MODE here. This is specifically testing
# the compatibility with the API.
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}

View File

@@ -1,24 +1,21 @@
name: "PR checks" name: "PR checks"
on: on: [push, pull_request]
push:
branches: [main, v1]
pull_request:
jobs: jobs:
lint-js: tslint:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v1
- name: Run Lint - name: tslint
run: npm run-script lint run: npm run-script lint
check-js: check-js:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v1
- name: Check generated JavaScript - name: Check generated JavaScript
run: | run: |
# Sanity check that repo is clean to start with # Sanity check that repo is clean to start with
@@ -27,8 +24,6 @@ jobs:
>&2 echo "Failed: Repo should be clean before testing!" >&2 echo "Failed: Repo should be clean before testing!"
exit 1 exit 1
fi fi
# Wipe the lib directory incase there are extra unnecessary files in there
rm -rf lib
# Generate the JavaScript files # Generate the JavaScript files
npm run-script build npm run-script build
# Check that repo is still clean # Check that repo is still clean
@@ -44,7 +39,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v1
- name: Check node modules up to date - name: Check node modules up to date
run: | run: |
# Sanity check that repo is clean to start with # Sanity check that repo is clean to start with
@@ -53,6 +48,7 @@ jobs:
>&2 echo "Failed: Repo should be clean before testing!" >&2 echo "Failed: Repo should be clean before testing!"
exit 1 exit 1
fi fi
# Reinstall modules and then clean to remove absolute paths # Reinstall modules and then clean to remove absolute paths
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible # Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
npm ci npm ci
@@ -67,12 +63,9 @@ jobs:
echo "Success: node_modules are up to date" echo "Success: node_modules are up to date"
npm-test: npm-test:
strategy: runs-on: ubuntu-latest
matrix:
os: [ubuntu-latest,macos-latest]
runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v1
- name: npm run-script test - name: npm run-script test
run: npm run-script test run: npm run-script test

2
.gitignore vendored
View File

@@ -1,2 +0,0 @@
/runner/dist/
/runner/node_modules/

View File

@@ -26,33 +26,10 @@ This project also includes configuration to run tests from VSCode (with support
To see the effect of your changes and to test them, push your changes in a branch and then look at the [Actions output](https://github.com/github/codeql-action/actions) for that branch. You can also exercise the code locally by running the automated tests. To see the effect of your changes and to test them, push your changes in a branch and then look at the [Actions output](https://github.com/github/codeql-action/actions) for that branch. You can also exercise the code locally by running the automated tests.
### Running the action locally
It is possible to run this action locally via [act](https://github.com/nektos/act) via the following steps:
1. Create a GitHub [Personal Access Token](https://github.com/settings/tokens) (PAT).
1. Install [act](https://github.com/nektos/act) v0.2.10 or greater.
1. Add a `.env` file in the root of the project you are running:
```bash
CODEQL_LOCAL_RUN=true
# Optional, for better logging
GITHUB_JOB=<ANY_JOB_NAME>
```
1. Run `act -j codeql -s GITHUB_TOKEN=<PAT>`
Running locally will generate the CodeQL database and run all the queries, but it will avoid uploading and reporting results to GitHub. Note that this must be done on a repository that _consumes_ this action, not this repository. The use case is to debug failures of this action on specific repositories.
### Integration tests ### Integration tests
As well as the unit tests (see _Common tasks_ above), there are integration tests, defined in `.github/workflows/integration-testing.yml`. These are run by a CI check. Depending on the change youre making, you may want to add a test to this file or extend an existing one. As well as the unit tests (see _Common tasks_ above), there are integration tests, defined in `.github/workflows/integration-testing.yml`. These are run by a CI check. Depending on the change youre making, you may want to add a test to this file or extend an existing one.
### Building the CodeQL runner
Navigate to the `runner` directory and run `npm install` to install dependencies needed only for compiling the CodeQL runner. Run `npm run build-runner` to output files to the `runner/dist` directory.
## Submitting a pull request ## Submitting a pull request
1. [Fork][fork] and clone the repository 1. [Fork][fork] and clone the repository

View File

@@ -98,23 +98,7 @@ Use the `config-file` parameter of the `init` action to enable the configuration
config-file: ./.github/codeql/codeql-config.yml config-file: ./.github/codeql/codeql-config.yml
``` ```
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration file](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration-file)." The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration)."
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:
```yaml
- uses: github/codeql-action/init@v1
with:
queries: <local-or-remote-query>,<another-query>
```
By default, this will override any queries specified in a config file. If you wish to use both sets of queries, prefix the list of queries in the workflow with `+`:
```yaml
- uses: github/codeql-action/init@v1
with:
queries: +<local-or-remote-query>,<another-query>
```
## Troubleshooting ## Troubleshooting

View File

@@ -16,13 +16,10 @@ inputs:
ram: ram:
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used. description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
required: false required: false
add-snippets:
description: Specify whether or not to add code snippets to the output sarif file.
required: false
default: "false"
threads: threads:
description: The number of threads to be used by CodeQL. description: The number of threads to be used by CodeQL.
required: false required: false
default: "1"
checkout_path: checkout_path:
description: "The path at which the analyzed repository was checked out. Used to relativeize any absolute paths in the uploaded SARIF file." description: "The path at which the analyzed repository was checked out. Used to relativeize any absolute paths in the uploaded SARIF file."
required: false required: false
@@ -33,4 +30,4 @@ inputs:
default: ${{ toJson(matrix) }} default: ${{ toJson(matrix) }}
runs: runs:
using: 'node12' using: 'node12'
main: '../lib/analyze-action.js' main: '../lib/finalize-db.js'

View File

@@ -8,4 +8,4 @@ inputs:
default: ${{ toJson(matrix) }} default: ${{ toJson(matrix) }}
runs: runs:
using: 'node12' using: 'node12'
main: '../lib/autobuild-action.js' main: '../lib/autobuild.js'

45
full/action.yml Normal file
View File

@@ -0,0 +1,45 @@
name: 'CodeQL'
description: 'Setup the CodeQL tracer'
author: 'GitHub'
inputs:
tools:
description: URL of CodeQL tools
required: false
# If not specified the Action will check in several places until it finds the CodeQL tools.
languages:
description: The languages to be analysed
required: false
token:
default: ${{ github.token }}
matrix:
default: ${{ toJson(matrix) }}
config-file:
description: Path of the config file to use
required: false
check_name:
description: The name of the check run to add text to.
required: false
output:
description: The path of the directory in which to save the SARIF results
required: false
default: '../results'
upload:
description: Upload the SARIF file
required: false
default: "true"
ram:
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
required: false
threads:
description: The number of threads to be used by CodeQL.
required: false
default: "1"
checkout_path:
description: "The path at which the analyzed repository was checked out. Used to relativeize any absolute paths in the uploaded SARIF file."
required: false
default: ${{ github.workspace }}
runs:
using: 'node12'
pre: '../lib/setup-tracer.js'
main: '../lib/finalize-db.js'

View File

@@ -16,9 +16,6 @@ inputs:
config-file: config-file:
description: Path of the config file to use description: Path of the config file to use
required: false required: false
queries:
description: Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file; prefix with "+" to use both sets of queries.
required: false
runs: runs:
using: 'node12' using: 'node12'
main: '../lib/init-action.js' main: '../lib/setup-tracer.js'

266
lib/actions-util.js generated
View File

@@ -1,266 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
const api = __importStar(require("./api-client"));
const sharedEnv = __importStar(require("./shared-environment"));
const util_1 = require("./util");
/**
* Wrapper around core.getInput for inputs that always have a value.
* Also see getOptionalInput.
*
* This allows us to get stronger type checking of required/optional inputs
* and make behaviour more consistent between actions and the runner.
*/
function getRequiredInput(name) {
return core.getInput(name, { required: true });
}
exports.getRequiredInput = getRequiredInput;
/**
* Wrapper around core.getInput that converts empty inputs to undefined.
* Also see getRequiredInput.
*
* This allows us to get stronger type checking of required/optional inputs
* and make behaviour more consistent between actions and the runner.
*/
function getOptionalInput(name) {
const value = core.getInput(name);
return value.length > 0 ? value : undefined;
}
exports.getOptionalInput = getOptionalInput;
/**
* Get an environment parameter, but throw an error if it is not set.
*/
function getRequiredEnvParam(paramName) {
const value = process.env[paramName];
if (value === undefined || value.length === 0) {
throw new Error(`${paramName} environment variable must be set`);
}
core.debug(`${paramName}=${value}`);
return value;
}
exports.getRequiredEnvParam = getRequiredEnvParam;
/**
* Ensures all required environment variables are set in the context of a local run.
*/
function prepareLocalRunEnvironment() {
if (!util_1.isLocalRun()) {
return;
}
core.debug("Action is running locally.");
if (!process.env.GITHUB_JOB) {
core.exportVariable("GITHUB_JOB", "UNKNOWN-JOB");
}
}
exports.prepareLocalRunEnvironment = prepareLocalRunEnvironment;
/**
* Gets the SHA of the commit that is currently checked out.
*/
async function getCommitOid() {
// Try to use git to get the current commit SHA. If that fails then
// log but otherwise silently fall back to using the SHA from the environment.
// The only time these two values will differ is during analysis of a PR when
// the workflow has changed the current commit to the head commit instead of
// the merge commit, which must mean that git is available.
// Even if this does go wrong, it's not a huge problem for the alerts to
// reported on the merge commit.
try {
let commitOid = "";
await new toolrunnner.ToolRunner("git", ["rev-parse", "HEAD"], {
silent: true,
listeners: {
stdout: (data) => {
commitOid += data.toString();
},
stderr: (data) => {
process.stderr.write(data);
},
},
}).exec();
return commitOid.trim();
}
catch (e) {
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
return getRequiredEnvParam("GITHUB_SHA");
}
}
exports.getCommitOid = getCommitOid;
/**
* Get the path of the currently executing workflow.
*/
async function getWorkflowPath() {
const repo_nwo = getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
const owner = repo_nwo[0];
const repo = repo_nwo[1];
const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID"));
const apiClient = api.getActionsApiClient();
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
owner,
repo,
run_id,
});
const workflowUrl = runsResponse.data.workflow_url;
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
return workflowResponse.data.path;
}
/**
* Get the workflow run ID.
*/
function getWorkflowRunID() {
const workflowRunID = parseInt(getRequiredEnvParam("GITHUB_RUN_ID"), 10);
if (Number.isNaN(workflowRunID)) {
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
}
return workflowRunID;
}
exports.getWorkflowRunID = getWorkflowRunID;
/**
* Get the analysis key paramter for the current job.
*
* This will combine the workflow path and current job name.
* Computing this the first time requires making requests to
* the github API, but after that the result will be cached.
*/
async function getAnalysisKey() {
const analysisKeyEnvVar = "CODEQL_ACTION_ANALYSIS_KEY";
let analysisKey = process.env[analysisKeyEnvVar];
if (analysisKey !== undefined) {
return analysisKey;
}
const workflowPath = await getWorkflowPath();
const jobName = getRequiredEnvParam("GITHUB_JOB");
analysisKey = `${workflowPath}:${jobName}`;
core.exportVariable(analysisKeyEnvVar, analysisKey);
return analysisKey;
}
exports.getAnalysisKey = getAnalysisKey;
/**
* Get the ref currently being analyzed.
*/
function getRef() {
// Will be in the form "refs/heads/master" on a push event
// or in the form "refs/pull/N/merge" on a pull_request event
const ref = getRequiredEnvParam("GITHUB_REF");
// For pull request refs we want to convert from the 'merge' ref
// to the 'head' ref, as that is what we want to analyse.
// There should have been some code earlier in the workflow to do
// the checkout, but we have no way of verifying that here.
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
if (pull_ref_regex.test(ref)) {
return ref.replace(pull_ref_regex, "refs/pull/$1/head");
}
else {
return ref;
}
}
exports.getRef = getRef;
/**
* Compose a StatusReport.
*
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
* @param status The status. Must be 'success', 'failure', or 'starting'
* @param startedAt The time this action started executing.
* @param cause Cause of failure (only supply if status is 'failure')
* @param exception Exception (only supply if status is 'failure')
*/
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
const commitOid = process.env["GITHUB_SHA"] || "";
const ref = getRef();
const workflowRunIDStr = process.env["GITHUB_RUN_ID"];
let workflowRunID = -1;
if (workflowRunIDStr) {
workflowRunID = parseInt(workflowRunIDStr, 10);
}
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
const jobName = process.env["GITHUB_JOB"] || "";
const analysis_key = await getAnalysisKey();
let workflowStartedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
if (workflowStartedAt === undefined) {
workflowStartedAt = actionStartedAt.toISOString();
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
}
const statusReport = {
workflow_run_id: workflowRunID,
workflow_name: workflowName,
job_name: jobName,
analysis_key,
commit_oid: commitOid,
ref,
action_name: actionName,
action_oid: "unknown",
started_at: workflowStartedAt,
action_started_at: actionStartedAt.toISOString(),
status,
};
// Add optional parameters
if (cause) {
statusReport.cause = cause;
}
if (exception) {
statusReport.exception = exception;
}
if (status === "success" || status === "failure" || status === "aborted") {
statusReport.completed_at = new Date().toISOString();
}
const matrix = getRequiredInput("matrix");
if (matrix) {
statusReport.matrix_vars = matrix;
}
return statusReport;
}
exports.createStatusReportBase = createStatusReportBase;
/**
* Send a status report to the code_scanning/analysis/status endpoint.
*
* Optionally checks the response from the API endpoint and sets the action
* as failed if the status report failed. This is only expected to be used
* when sending a 'starting' report.
*
* Returns whether sending the status report was successful of not.
*/
async function sendStatusReport(statusReport, ignoreFailures) {
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
core.debug("Not sending status report to GitHub Enterprise");
return true;
}
if (util_1.isLocalRun()) {
core.debug("Not sending status report because this is a local run");
return true;
}
const statusReportJSON = JSON.stringify(statusReport);
core.debug(`Sending status report: ${statusReportJSON}`);
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
const [owner, repo] = nwo.split("/");
const client = api.getActionsApiClient();
const statusResponse = await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
owner,
repo,
data: statusReportJSON,
});
if (!ignoreFailures) {
// If the status report request fails with a 403 or a 404, then this is a deliberate
// message from the endpoint that the SARIF upload can be expected to fail too,
// so the action should fail to avoid wasting actions minutes.
//
// Other failure responses (or lack thereof) could be transitory and should not
// cause the action to fail.
if (statusResponse.status === 403) {
core.setFailed("The repo on which this action is running is not opted-in to CodeQL code scanning.");
return false;
}
if (statusResponse.status === 404) {
core.setFailed("Not authorized to used the CodeQL code scanning feature on this repo.");
return false;
}
}
return true;
}
exports.sendStatusReport = sendStatusReport;
//# sourceMappingURL=actions-util.js.map

File diff suppressed because one or more lines are too long

View File

@@ -1,31 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const actions_util_1 = require("./actions-util");
const testing_utils_1 = require("./testing-utils");
testing_utils_1.setupTests(ava_1.default);
ava_1.default("getRef() throws on the empty string", (t) => {
process.env["GITHUB_REF"] = "";
t.throws(actions_util_1.getRef);
});
ava_1.default("prepareEnvironment() when a local run", (t) => {
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
process.env.CODEQL_LOCAL_RUN = "false";
process.env.GITHUB_JOB = "YYY";
actions_util_1.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, "YYY");
process.env.CODEQL_LOCAL_RUN = "true";
actions_util_1.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, "YYY");
process.env.GITHUB_JOB = "";
actions_util_1.prepareLocalRunEnvironment();
// updated
t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB");
process.env.CODEQL_LOCAL_RUN = origLocalRun;
});
//# sourceMappingURL=actions-util.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"actions-util.test.js","sourceRoot":"","sources":["../src/actions-util.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,iDAAoE;AACpE,mDAA6C;AAE7C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE,EAAE;IAChD,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC;IAC/B,CAAC,CAAC,MAAM,CAAC,qBAAM,CAAC,CAAC;AACnB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,uCAAuC,EAAE,CAAC,CAAC,EAAE,EAAE;IAClD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC;IAElD,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,OAAO,CAAC;IACvC,OAAO,CAAC,GAAG,CAAC,UAAU,GAAG,KAAK,CAAC;IAE/B,yCAA0B,EAAE,CAAC;IAE7B,YAAY;IACZ,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;IAE3C,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,MAAM,CAAC;IAEtC,yCAA0B,EAAE,CAAC;IAE7B,YAAY;IACZ,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;IAE3C,OAAO,CAAC,GAAG,CAAC,UAAU,GAAG,EAAE,CAAC;IAE5B,yCAA0B,EAAE,CAAC;IAE7B,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAEnD,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,YAAY,CAAC;AAC9C,CAAC,CAAC,CAAC"}

45
lib/analysis-paths.js generated
View File

@@ -1,29 +1,28 @@
"use strict"; "use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
function isInterpretedLanguage(language) { function isInterpretedLanguage(language) {
return language === "javascript" || language === "python"; return language === 'javascript' || language === 'python';
} }
// Matches a string containing only characters that are legal to include in paths on windows. // Matches a string containing only characters that are legal to include in paths on windows.
exports.legalWindowsPathCharactersRegex = /^[^<>:"\|?]*$/; exports.legalWindowsPathCharactersRegex = /^[^<>:"\|?]*$/;
// Builds an environment variable suitable for LGTM_INDEX_INCLUDE or LGTM_INDEX_EXCLUDE // Builds an environment variable suitable for LGTM_INDEX_INCLUDE or LGTM_INDEX_EXCLUDE
function buildIncludeExcludeEnvVar(paths) { function buildIncludeExcludeEnvVar(paths) {
// Ignore anything containing a * // Ignore anything containing a *
paths = paths.filter((p) => p.indexOf("*") === -1); paths = paths.filter(p => p.indexOf('*') === -1);
// Some characters are illegal in path names in windows // Some characters are illegal in path names in windows
if (process.platform === "win32") { if (process.platform === 'win32') {
paths = paths.filter((p) => p.match(exports.legalWindowsPathCharactersRegex)); paths = paths.filter(p => p.match(exports.legalWindowsPathCharactersRegex));
} }
return paths.join("\n"); return paths.join('\n');
} }
function printPathFiltersWarning(config, logger) {
// Index include/exclude/filters only work in javascript and python.
// If any other languages are detected/configured then show a warning.
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
!config.languages.every(isInterpretedLanguage)) {
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
}
}
exports.printPathFiltersWarning = printPathFiltersWarning;
function includeAndExcludeAnalysisPaths(config) { function includeAndExcludeAnalysisPaths(config) {
// The 'LGTM_INDEX_INCLUDE' and 'LGTM_INDEX_EXCLUDE' environment variables // The 'LGTM_INDEX_INCLUDE' and 'LGTM_INDEX_EXCLUDE' environment variables
// control which files/directories are traversed when scanning. // control which files/directories are traversed when scanning.
@@ -33,19 +32,27 @@ function includeAndExcludeAnalysisPaths(config) {
// traverse the entire file tree to determine which files are matched. // traverse the entire file tree to determine which files are matched.
// Any paths containing "*" are not included in these. // Any paths containing "*" are not included in these.
if (config.paths.length !== 0) { if (config.paths.length !== 0) {
process.env["LGTM_INDEX_INCLUDE"] = buildIncludeExcludeEnvVar(config.paths); core.exportVariable('LGTM_INDEX_INCLUDE', buildIncludeExcludeEnvVar(config.paths));
} }
if (config.pathsIgnore.length !== 0) { if (config.pathsIgnore.length !== 0) {
process.env["LGTM_INDEX_EXCLUDE"] = buildIncludeExcludeEnvVar(config.pathsIgnore); core.exportVariable('LGTM_INDEX_EXCLUDE', buildIncludeExcludeEnvVar(config.pathsIgnore));
} }
// The 'LGTM_INDEX_FILTERS' environment variable controls which files are // The 'LGTM_INDEX_FILTERS' environment variable controls which files are
// extracted or ignored. It does not control which directories are traversed. // extracted or ignored. It does not control which directories are traversed.
// This does understand the glob and double-glob syntax. // This does understand the glob and double-glob syntax.
const filters = []; const filters = [];
filters.push(...config.paths.map((p) => `include:${p}`)); filters.push(...config.paths.map(p => 'include:' + p));
filters.push(...config.pathsIgnore.map((p) => `exclude:${p}`)); filters.push(...config.pathsIgnore.map(p => 'exclude:' + p));
if (filters.length !== 0) { if (filters.length !== 0) {
process.env["LGTM_INDEX_FILTERS"] = filters.join("\n"); core.exportVariable('LGTM_INDEX_FILTERS', filters.join('\n'));
}
// Index include/exclude/filters only work in javascript and python.
// If any other languages are detected/configured then show a warning.
if ((config.paths.length !== 0 ||
config.pathsIgnore.length !== 0 ||
filters.length !== 0) &&
!config.languages.every(isInterpretedLanguage)) {
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
} }
} }
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths; exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;AAGA,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,oEAAoE;IACpE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,4FAA4F,CAC7F,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAC3D,MAAM,CAAC,WAAW,CACnB,CAAC;KACH;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AA1BD,wEA0BC"} {"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEjD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACrE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACpF;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC;KAC1F;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACvD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC/D;IAED,oEAAoE;IACpE,sEAAsE;IACtE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;QACxB,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC;QAC/B,OAAO,CAAC,MAAM,KAAK,CAAC,CAAC;QACvB,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAClD,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC5G;AACH,CAAC;AAjCD,wEAiCC"}

View File

@@ -13,42 +13,31 @@ Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava")); const ava_1 = __importDefault(require("ava"));
const analysisPaths = __importStar(require("./analysis-paths")); const analysisPaths = __importStar(require("./analysis-paths"));
const testing_utils_1 = require("./testing-utils"); const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default); testing_utils_1.setupTests(ava_1.default);
ava_1.default("emptyPaths", async (t) => { ava_1.default("emptyPaths", async (t) => {
return await util.withTmpDir(async (tmpDir) => { const config = {
const config = { languages: [],
languages: [], queries: {},
queries: {}, pathsIgnore: [],
pathsIgnore: [], paths: [],
paths: [], originalUserInput: {},
originalUserInput: {}, };
tempDir: tmpDir, analysisPaths.includeAndExcludeAnalysisPaths(config);
toolCacheDir: tmpDir, t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
codeQLCmd: "", t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
}; t.is(process.env['LGTM_INDEX_FILTERS'], undefined);
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
t.is(process.env["LGTM_INDEX_EXCLUDE"], undefined);
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
});
}); });
ava_1.default("nonEmptyPaths", async (t) => { ava_1.default("nonEmptyPaths", async (t) => {
return await util.withTmpDir(async (tmpDir) => { const config = {
const config = { languages: [],
languages: [], queries: {},
queries: {}, paths: ['path1', 'path2', '**/path3'],
paths: ["path1", "path2", "**/path3"], pathsIgnore: ['path4', 'path5', 'path6/**'],
pathsIgnore: ["path4", "path5", "path6/**"], originalUserInput: {},
originalUserInput: {}, };
tempDir: tmpDir, analysisPaths.includeAndExcludeAnalysisPaths(config);
toolCacheDir: tmpDir, t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
codeQLCmd: "", t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path4\npath5');
}; t.is(process.env['LGTM_INDEX_FILTERS'], 'include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**');
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
t.is(process.env["LGTM_INDEX_EXCLUDE"], "path4\npath5");
t.is(process.env["LGTM_INDEX_FILTERS"], "include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**");
});
}); });
//# sourceMappingURL=analysis-paths.test.js.map //# sourceMappingURL=analysis-paths.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"} {"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA2C;AAE3C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;QACT,iBAAiB,EAAE,EAAE;KACtB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QAC3C,iBAAiB,EAAE,EAAE;KACtB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,gGAAgG,CAAC,CAAC;AAC5I,CAAC,CAAC,CAAC"}

56
lib/analyze-action.js generated
View File

@@ -1,56 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const analyze_1 = require("./analyze");
const config_utils_1 = require("./config-utils");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const util = __importStar(require("./util"));
async function sendStatusReport(startedAt, stats, error) {
var _a, _b, _c;
const status = ((_a = stats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined
? "failure"
: "success";
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
const statusReport = {
...statusReportBase,
...(stats || {}),
};
await actionsUtil.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
let stats = undefined;
try {
actionsUtil.prepareLocalRunEnvironment();
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt), true))) {
return;
}
const logger = logging_1.getActionsLogger();
const config = await config_utils_1.getConfig(actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
}
stats = await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), actionsUtil.getRequiredInput("token"), actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"), actionsUtil.getRequiredInput("upload") === "true", "actions", actionsUtil.getRequiredInput("output"), util.getMemoryFlag(actionsUtil.getOptionalInput("ram")), util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger), config, logger);
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await sendStatusReport(startedAt, stats, error);
return;
}
await sendStatusReport(startedAt, stats);
}
run().catch((e) => {
core.setFailed(`analyze action failed: ${e}`);
console.log(e);
});
//# sourceMappingURL=analyze-action.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6D;AAC7D,iDAA2C;AAC3C,uCAA6C;AAC7C,6CAAkD;AAClD,6CAA+B;AAM/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,EACD,IAAI,CACL,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,MAAM,GAAG,MAAM,wBAAS,CAC5B,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,KAAK,GAAG,MAAM,oBAAU,CACtB,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,MAAM,WAAW,CAAC,YAAY,EAAE,EAChC,WAAW,CAAC,MAAM,EAAE,EACpB,MAAM,WAAW,CAAC,cAAc,EAAE,EAClC,WAAW,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAClD,WAAW,CAAC,gBAAgB,EAAE,EAC9B,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EACjD,SAAS,EACT,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EACvD,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACpE,MAAM,EACN,MAAM,CACP,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,0BAA0B,CAAC,EAAE,CAAC,CAAC;IAC9C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

100
lib/analyze.js generated
View File

@@ -1,100 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql");
const languages_1 = require("./languages");
const sharedEnv = __importStar(require("./shared-environment"));
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
async function createdDBForScannedLanguages(config, logger) {
// Insert the LGTM_INDEX_X env vars at this point so they are set when
// we extract any scanned languages.
analysisPaths.includeAndExcludeAnalysisPaths(config);
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
if (languages_1.isScannedLanguage(language)) {
logger.startGroup(`Extracting ${language}`);
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config.tempDir, language), language);
logger.endGroup();
}
}
}
async function finalizeDatabaseCreation(config, logger) {
await createdDBForScannedLanguages(config, logger);
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
logger.startGroup(`Finalizing ${language}`);
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config.tempDir, language));
logger.endGroup();
}
}
// Runs queries and creates sarif files in the given folder
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
const statusReport = {};
for (const language of config.languages) {
logger.startGroup(`Analyzing ${language}`);
const queries = config.queries[language];
if (queries.builtin.length === 0 && queries.custom.length === 0) {
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
}
try {
for (const type of ["builtin", "custom"]) {
if (queries[type].length > 0) {
const startTime = new Date().getTime();
const databasePath = util.getCodeQLDatabasePath(config.tempDir, language);
// Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows.
const querySuitePath = `${databasePath}-queries-${type}.qls`;
const querySuiteContents = queries[type]
.map((q) => `- query: ${q}`)
.join("\n");
fs.writeFileSync(querySuitePath, querySuiteContents);
logger.debug(`Query suite file for ${language}...\n${querySuiteContents}`);
const sarifFile = path.join(sarifFolder, `${language}-${type}.sarif`);
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
await codeql.databaseAnalyze(databasePath, sarifFile, querySuitePath, memoryFlag, addSnippetsFlag, threadsFlag);
logger.debug(`SARIF results for database ${language} created at "${sarifFile}"`);
logger.endGroup();
// Record the performance
const endTime = new Date().getTime();
statusReport[`analyze_${type}_queries_${language}_duration_ms`] =
endTime - startTime;
}
}
}
catch (e) {
logger.error(`Error running analysis for ${language}: ${e}`);
logger.info(e);
statusReport.analyze_failure_language = language;
return statusReport;
}
}
return statusReport;
}
exports.runQueries = runQueries;
async function runAnalyze(repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, doUpload, mode, outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
// Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
fs.mkdirSync(outputDir, { recursive: true });
logger.info("Finalizing database creation");
await finalizeDatabaseCreation(config, logger);
logger.info("Analyzing database");
const queriesStats = await runQueries(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger);
if (!doUpload) {
logger.info("Not uploading results");
return { ...queriesStats };
}
const uploadStats = await upload_lib.upload(outputDir, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger);
return { ...queriesStats, ...uploadStats };
}
exports.runAnalyze = runAnalyze;
//# sourceMappingURL=analyze.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,gEAAkD;AAClD,qCAAqC;AAErC,2CAAgD;AAGhD,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAmC/B,KAAK,UAAU,4BAA4B,CACzC,MAA0B,EAC1B,MAAc;IAEd,sEAAsE;IACtE,oCAAoC;IACpC,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;YAC5C,MAAM,MAAM,CAAC,sBAAsB,CACjC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,CACT,CAAC;YACF,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CACrC,MAA0B,EAC1B,MAAc;IAEd,MAAM,4BAA4B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,gBAAgB,CAC3B,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CACrD,CAAC;QACF,MAAM,CAAC,QAAQ,EAAE,CAAC;KACnB;AACH,CAAC;AAED,2DAA2D;AACpD,KAAK,UAAU,UAAU,CAC9B,WAAmB,EACnB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,MAAM,YAAY,GAAwB,EAAE,CAAC;IAE7C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,aAAa,QAAQ,EAAE,CAAC,CAAC;QAE3C,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QACzC,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;YAC/D,MAAM,IAAI,KAAK,CACb,qBAAqB,QAAQ,gDAAgD,CAC9E,CAAC;SACH;QAED,IAAI;YACF,KAAK,MAAM,IAAI,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,EAAE;gBACxC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBAEvC,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAC7C,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;oBACF,uEAAuE;oBACvE,2EAA2E;oBAC3E,MAAM,cAAc,GAAG,GAAG,YAAY,YAAY,IAAI,MAAM,CAAC;oBAC7D,MAAM,kBAAkB,GAAG,OAAO,CAAC,IAAI,CAAC;yBACrC,GAAG,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC;yBACnC,IAAI,CAAC,IAAI,CAAC,CAAC;oBACd,EAAE,CAAC,aAAa,CAAC,cAAc,EAAE,kBAAkB,CAAC,CAAC;oBACrD,MAAM,CAAC,KAAK,CACV,wBAAwB,QAAQ,QAAQ,kBAAkB,EAAE,CAC7D,CAAC;oBAEF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,GAAG,QAAQ,IAAI,IAAI,QAAQ,CAAC,CAAC;oBAEtE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;oBAC3C,MAAM,MAAM,CAAC,eAAe,CAC1B,YAAY,EACZ,SAAS,EACT,cAAc,EACd,UAAU,EACV,eAAe,EACf,WAAW,CACZ,CAAC;oBAEF,MAAM,CAAC,KAAK,CACV,8BAA8B,QAAQ,gBAAgB,SAAS,GAAG,CACnE,CAAC;oBACF,MAAM,CAAC,QAAQ,EAAE,CAAC;oBAElB,yBAAyB;oBACzB,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBACrC,YAAY,CAAC,WAAW,IAAI,YAAY,QAAQ,cAAc,CAAC;wBAC7D,OAAO,GAAG,SAAS,CAAC;iBACvB;aACF;SACF;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,KAAK,CAAC,8BAA8B,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;YAC7D,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YACf,YAAY,CAAC,wBAAwB,GAAG,QAAQ,CAAC;YACjD,OAAO,YAAY,CAAC;SACrB;KACF;IAED,OAAO,YAAY,CAAC;AACtB,CAAC;AAxED,gCAwEC;AAEM,KAAK,UAAU,UAAU,CAC9B,aAA4B,EAC5B,SAAiB,EACjB,GAAW,EACX,WAA+B,EAC/B,YAAgC,EAChC,aAAiC,EACjC,YAAoB,EACpB,WAA+B,EAC/B,UAAkB,EAClB,SAAiB,EACjB,QAAiB,EACjB,IAAe,EACf,SAAiB,EACjB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,8DAA8D;IAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;IAEzD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7C,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;IAC5C,MAAM,wBAAwB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE/C,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,MAAM,UAAU,CACnC,SAAS,EACT,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,MAAM,CACP,CAAC;IAEF,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACrC,OAAO,EAAE,GAAG,YAAY,EAAE,CAAC;KAC5B;IAED,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,SAAS,EACT,aAAa,EACb,SAAS,EACT,GAAG,EACH,WAAW,EACX,YAAY,EACZ,aAAa,EACb,YAAY,EACZ,WAAW,EACX,UAAU,EACV,SAAS,EACT,IAAI,EACJ,MAAM,CACP,CAAC;IAEF,OAAO,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;AAC7C,CAAC;AA5DD,gCA4DC"}

63
lib/analyze.test.js generated
View File

@@ -1,63 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const analyze_1 = require("./analyze");
const codeql_1 = require("./codeql");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
// Checks that the duration fields are populated for the correct language
// and correct case of builtin or custom.
ava_1.default("status report fields", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
codeql_1.setCodeQL({
databaseAnalyze: async () => undefined,
});
const memoryFlag = "";
const addSnippetsFlag = "";
const threadsFlag = "";
for (const language of Object.values(languages_1.Language)) {
const config = {
languages: [language],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
};
fs.mkdirSync(util.getCodeQLDatabasePath(config.tempDir, language), {
recursive: true,
});
config.queries[language] = {
builtin: ["foo.ql"],
custom: [],
};
const builtinStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logging_1.getRunnerLogger(true));
t.deepEqual(Object.keys(builtinStatusReport).length, 1);
t.true(`analyze_builtin_queries_${language}_duration_ms` in builtinStatusReport);
config.queries[language] = {
builtin: [],
custom: ["foo.ql"],
};
const customStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logging_1.getRunnerLogger(true));
t.deepEqual(Object.keys(customStatusReport).length, 1);
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
}
});
});
//# sourceMappingURL=analyze.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analyze.test.js","sourceRoot":"","sources":["../src/analyze.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AAEzB,uCAAuC;AACvC,qCAAqC;AAErC,2CAAuC;AACvC,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,yEAAyE;AACzE,yCAAyC;AACzC,aAAI,CAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,kBAAS,CAAC;YACR,eAAe,EAAE,KAAK,IAAI,EAAE,CAAC,SAAS;SACvC,CAAC,CAAC;QAEH,MAAM,UAAU,GAAG,EAAE,CAAC;QACtB,MAAM,eAAe,GAAG,EAAE,CAAC;QAC3B,MAAM,WAAW,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,MAAM,CAAC,oBAAQ,CAAC,EAAE;YAC9C,MAAM,MAAM,GAAW;gBACrB,SAAS,EAAE,CAAC,QAAQ,CAAC;gBACrB,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,EAAE;gBACf,KAAK,EAAE,EAAE;gBACT,iBAAiB,EAAE,EAAE;gBACrB,OAAO,EAAE,MAAM;gBACf,YAAY,EAAE,MAAM;gBACpB,SAAS,EAAE,EAAE;aACd,CAAC;YACF,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE;gBACjE,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,CAAC,QAAQ,CAAC;gBACnB,MAAM,EAAE,EAAE;aACX,CAAC;YACF,MAAM,mBAAmB,GAAG,MAAM,oBAAU,CAC1C,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACxD,CAAC,CAAC,IAAI,CACJ,2BAA2B,QAAQ,cAAc,IAAI,mBAAmB,CACzE,CAAC;YAEF,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,EAAE;gBACX,MAAM,EAAE,CAAC,QAAQ,CAAC;aACnB,CAAC;YACF,MAAM,kBAAkB,GAAG,MAAM,oBAAU,CACzC,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACvD,CAAC,CAAC,IAAI,CACJ,0BAA0B,QAAQ,cAAc,IAAI,kBAAkB,CACvE,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

33
lib/api-client.js generated
View File

@@ -10,38 +10,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod }; return (mod && mod.__esModule) ? mod : { "default": mod };
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const github = __importStar(require("@actions/github")); const github = __importStar(require("@actions/github"));
const console_log_level_1 = __importDefault(require("console-log-level")); const console_log_level_1 = __importDefault(require("console-log-level"));
const path = __importStar(require("path")); exports.getApiClient = function () {
const actions_util_1 = require("./actions-util"); return new github.GitHub(core.getInput('token'), {
const util_1 = require("./util");
exports.getApiClient = function (githubAuth, githubUrl, allowLocalRun = false) {
if (util_1.isLocalRun() && !allowLocalRun) {
throw new Error("Invalid API call in local run");
}
return new github.GitHub({
auth: githubAuth,
baseUrl: getApiUrl(githubUrl),
userAgent: "CodeQL Action", userAgent: "CodeQL Action",
log: console_log_level_1.default({ level: "debug" }), log: console_log_level_1.default({ level: "debug" })
}); });
}; };
function getApiUrl(githubUrl) {
const url = new URL(githubUrl);
// If we detect this is trying to be to github.com
// then return with a fixed canonical URL.
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
return "https://api.github.com";
}
// Add the /api/v3 API prefix
url.pathname = path.join(url.pathname, "api", "v3");
return url.toString();
}
// Temporary function to aid in the transition to running on and off of github actions.
// Once all code has been coverted this function should be removed or made canonical
// and called only from the action entrypoints.
function getActionsApiClient(allowLocalRun = false) {
return exports.getApiClient(actions_util_1.getRequiredInput("token"), actions_util_1.getRequiredEnvParam("GITHUB_SERVER_URL"), allowLocalRun);
}
exports.getActionsApiClient = getActionsApiClient;
//# sourceMappingURL=api-client.js.map //# sourceMappingURL=api-client.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,wDAA0C;AAC1C,0EAAgD;AAChD,2CAA6B;AAE7B,iDAAuE;AACvE,iCAAoC;AAEvB,QAAA,YAAY,GAAG,UAC1B,UAAkB,EAClB,SAAiB,EACjB,aAAa,GAAG,KAAK;IAErB,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,MAAM,CAAC,MAAM,CAAC;QACvB,IAAI,EAAE,UAAU;QAChB,OAAO,EAAE,SAAS,CAAC,SAAS,CAAC;QAC7B,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACL,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,kDAAkD;IAClD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,oFAAoF;AACpF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,OAAO,oBAAY,CACjB,+BAAgB,CAAC,OAAO,CAAC,EACzB,kCAAmB,CAAC,mBAAmB,CAAC,EACxC,aAAa,CACd,CAAC;AACJ,CAAC;AAND,kDAMC"} {"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAEnC,QAAA,YAAY,GAAG;IAC1B,OAAO,IAAI,MAAM,CAAC,MAAM,CACtB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB;QACE,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACP,CAAC,CAAC"}

View File

@@ -1,58 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const autobuild_1 = require("./autobuild");
const config_utils = __importStar(require("./config-utils"));
const logging_1 = require("./logging");
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
var _a, _b;
const status = failingLanguage !== undefined || cause !== undefined
? "failure"
: "success";
const statusReportBase = await actionsUtil.createStatusReportBase("autobuild", status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
const statusReport = {
...statusReportBase,
autobuild_languages: allLanguages.join(","),
autobuild_failure: failingLanguage,
};
await actionsUtil.sendStatusReport(statusReport);
}
async function run() {
const logger = logging_1.getActionsLogger();
const startedAt = new Date();
let language = undefined;
try {
actionsUtil.prepareLocalRunEnvironment();
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("autobuild", "starting", startedAt), true))) {
return;
}
const config = await config_utils.getConfig(actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
}
language = autobuild_1.determineAutobuildLanguage(config, logger);
if (language !== undefined) {
await autobuild_1.runAutobuild(language, config, logger);
}
}
catch (error) {
core.setFailed(`We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error.message}`);
console.log(error);
await sendCompletedStatusReport(startedAt, language ? [language] : [], language, error);
return;
}
await sendCompletedStatusReport(startedAt, language ? [language] : []);
}
run().catch((e) => {
core.setFailed(`autobuild action failed. ${e}`);
console.log(e);
});
//# sourceMappingURL=autobuild-action.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAS7C,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,WAAW,EACX,UAAU,EACV,SAAS,CACV,EACD,IAAI,CACL,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,sCAA0B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,wBAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIAAmI,KAAK,CAAC,OAAO,EAAE,CACnJ,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,6BAA6B,CAAC,EAAE,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

84
lib/autobuild.js generated
View File

@@ -1,32 +1,64 @@
"use strict"; "use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
const languages_1 = require("./languages"); const sharedEnv = __importStar(require("./shared-environment"));
function determineAutobuildLanguage(config, logger) { const util = __importStar(require("./util"));
// Attempt to find a language to autobuild async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
// We want pick the dominant language in the repo from the ones we're able to build var _a, _b;
// The languages are sorted in order specified by user or by lines of code if we got const status = failingLanguage !== undefined || cause !== undefined ? 'failure' : 'success';
// them from the GitHub API, so try to build the first language on the list. const statusReportBase = await util.createStatusReportBase('autobuild', status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
const autobuildLanguages = config.languages.filter(languages_1.isTracedLanguage); const statusReport = {
const language = autobuildLanguages[0]; ...statusReportBase,
if (!language) { autobuild_languages: allLanguages.join(','),
logger.info("None of the languages in this project require extra build steps"); autobuild_failure: failingLanguage,
return undefined; };
} await util.sendStatusReport(statusReport);
logger.debug(`Detected dominant traced language: ${language}`);
if (autobuildLanguages.length > 1) {
logger.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages
.slice(1)
.join(" and ")}, you must replace this call with custom build steps.`);
}
return language;
} }
exports.determineAutobuildLanguage = determineAutobuildLanguage; async function run() {
async function runAutobuild(language, config, logger) { var _a;
logger.startGroup(`Attempting to automatically build ${language} code`); const startedAt = new Date();
const codeQL = codeql_1.getCodeQL(config.codeQLCmd); let language;
await codeQL.runAutobuild(language); try {
logger.endGroup(); if (util.should_abort('autobuild', true) ||
!await util.sendStatusReport(await util.createStatusReportBase('autobuild', 'starting', startedAt), true)) {
return;
}
// Attempt to find a language to autobuild
// We want pick the dominant language in the repo from the ones we're able to build
// The languages are sorted in order specified by user or by lines of code if we got
// them from the GitHub API, so try to build the first language on the list.
const autobuildLanguages = ((_a = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]) === null || _a === void 0 ? void 0 : _a.split(',')) || [];
language = autobuildLanguages[0];
if (!language) {
core.info("None of the languages in this project require extra build steps");
return;
}
core.debug(`Detected dominant traced language: ${language}`);
if (autobuildLanguages.length > 1) {
core.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages.slice(1).join(' and ')}, you must replace this block with custom build steps.`);
}
core.startGroup(`Attempting to automatically build ${language} code`);
const codeQL = codeql_1.getCodeQL();
await codeQL.runAutobuild(language);
core.endGroup();
}
catch (error) {
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
await sendCompletedStatusReport(startedAt, [language], language, error);
return;
}
await sendCompletedStatusReport(startedAt, [language]);
} }
exports.runAutobuild = runAutobuild; run().catch(e => {
core.setFailed("autobuild action failed. " + e);
console.log(e);
});
//# sourceMappingURL=autobuild.js.map //# sourceMappingURL=autobuild.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;AAAA,qCAAqC;AAErC,2CAAyD;AAGzD,SAAgB,0BAA0B,CACxC,MAA2B,EAC3B,MAAc;IAEd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IACrE,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;IAEvC,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;IAE/D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QACjC,MAAM,CAAC,OAAO,CACZ,oCAAoC,QAAQ,8BAA8B,kBAAkB;aACzF,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CAAC,OAAO,CAAC,uDAAuD,CACxE,CAAC;KACH;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AA7BD,gEA6BC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"} {"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,qCAAqC;AACrC,gEAAkD;AAClD,6CAA+B;AAS/B,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GAAG,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5F,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CAAC,CAAC;IAChB,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,CAAC;IACb,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,WAAW,EAAE,IAAI,CAAC;YACpC,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC7G,OAAO;SACR;QAED,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,OAAA,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,8BAA8B,CAAC,0CAAE,KAAK,CAAC,GAAG,MAAK,EAAE,CAAC;QACnG,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEjC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAEpC,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;QACxE,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

473
lib/codeql.js generated
View File

@@ -10,8 +10,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod }; return (mod && mod.__esModule) ? mod : { "default": mod };
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner")); const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const http = __importStar(require("@actions/http-client")); const http = __importStar(require("@actions/http-client"));
const io = __importStar(require("@actions/io"));
const toolcache = __importStar(require("@actions/tool-cache")); const toolcache = __importStar(require("@actions/tool-cache"));
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
@@ -19,213 +21,150 @@ const semver = __importStar(require("semver"));
const stream = __importStar(require("stream")); const stream = __importStar(require("stream"));
const globalutil = __importStar(require("util")); const globalutil = __importStar(require("util"));
const v4_1 = __importDefault(require("uuid/v4")); const v4_1 = __importDefault(require("uuid/v4"));
const actions_util_1 = require("./actions-util");
const api = __importStar(require("./api-client")); const api = __importStar(require("./api-client"));
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
const error_matcher_1 = require("./error-matcher");
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
/** /**
* Stores the CodeQL object, and is populated by `setupCodeQL` or `getCodeQL`. * Stores the CodeQL object, and is populated by `setupCodeQL` or `getCodeQL`.
* Can be overridden in tests using `setCodeQL`. * Can be overridden in tests using `setCodeQL`.
*/ */
let cachedCodeQL = undefined; let cachedCodeQL = undefined;
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion; /**
* Environment variable used to store the location of the CodeQL CLI executable.
* Value is set by setupCodeQL and read by getCodeQL.
*/
const CODEQL_ACTION_CMD = "CODEQL_ACTION_CMD";
const CODEQL_BUNDLE_VERSION = "codeql-bundle-20200630";
const CODEQL_BUNDLE_NAME = "codeql-bundle.tar.gz"; const CODEQL_BUNDLE_NAME = "codeql-bundle.tar.gz";
const GITHUB_DOTCOM_API_URL = "https://api.github.com";
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action"; const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
function getCodeQLActionRepository(mode) { function getInstanceAPIURL() {
if (mode !== "actions") { return process.env["GITHUB_API_URL"] || GITHUB_DOTCOM_API_URL;
return CODEQL_DEFAULT_ACTION_REPOSITORY; }
} function getCodeQLActionRepository() {
// Actions do not know their own repository name, // Actions do not know their own repository name,
// so we currently use this hack to find the name based on where our files are. // so we currently use this hack to find the name based on where our files are.
// This can be removed once the change to the runner in https://github.com/actions/runner/pull/585 is deployed. // This can be removed once the change to the runner in https://github.com/actions/runner/pull/585 is deployed.
const runnerTemp = actions_util_1.getRequiredEnvParam("RUNNER_TEMP"); const runnerTemp = util.getRequiredEnvParam("RUNNER_TEMP");
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions"); const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
const relativeScriptPath = path.relative(actionsDirectory, __filename); const relativeScriptPath = path.relative(actionsDirectory, __filename);
// This handles the case where the Action does not come from an Action repository, // This handles the case where the Action does not come from an Action repository,
// e.g. our integration tests which use the Action code from the current checkout. // e.g. our integration tests which use the Action code from the current checkout.
if (relativeScriptPath.startsWith("..") || if (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath)) {
path.isAbsolute(relativeScriptPath)) {
return CODEQL_DEFAULT_ACTION_REPOSITORY; return CODEQL_DEFAULT_ACTION_REPOSITORY;
} }
const relativeScriptPathParts = relativeScriptPath.split(path.sep); const relativeScriptPathParts = relativeScriptPath.split(path.sep);
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`; return relativeScriptPathParts[0] + "/" + relativeScriptPathParts[1];
} }
async function getCodeQLBundleDownloadURL(bundleNames, githubAuth, githubUrl, mode, logger) { async function getCodeQLBundleDownloadURL() {
const codeQLActionRepository = getCodeQLActionRepository(mode); const codeQLActionRepository = getCodeQLActionRepository();
const potentialDownloadSources = [ const potentialDownloadSources = [
// This GitHub instance, and this Action. // This GitHub instance, and this Action.
[githubUrl, codeQLActionRepository], [getInstanceAPIURL(), codeQLActionRepository],
// This GitHub instance, and the canonical Action. // This GitHub instance, and the canonical Action.
[githubUrl, CODEQL_DEFAULT_ACTION_REPOSITORY], [getInstanceAPIURL(), CODEQL_DEFAULT_ACTION_REPOSITORY],
// GitHub.com, and the canonical Action. // GitHub.com, and the canonical Action.
[util.GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY], [GITHUB_DOTCOM_API_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
]; ];
// We now filter out any duplicates. // We now filter out any duplicates.
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork. // Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
const uniqueDownloadSources = potentialDownloadSources.filter((url, index, self) => index === self.indexOf(url)); const uniqueDownloadSources = potentialDownloadSources.filter((url, index, self) => index === self.indexOf(url));
for (const downloadSource of uniqueDownloadSources) { for (let downloadSource of uniqueDownloadSources) {
const [apiURL, repository] = downloadSource; let [apiURL, repository] = downloadSource;
// If we've reached the final case, short-circuit the API check since we know the bundles exist and are public. // If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
if (apiURL === util.GITHUB_DOTCOM_URL && if (apiURL === GITHUB_DOTCOM_API_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
repository === CODEQL_DEFAULT_ACTION_REPOSITORY) { break;
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${bundleNames[0]}`;
} }
const [repositoryOwner, repositoryName] = repository.split("/"); let [repositoryOwner, repositoryName] = repository.split("/");
try { try {
const release = await api const release = await api.getApiClient().repos.getReleaseByTag({
.getApiClient(githubAuth, githubUrl)
.repos.getReleaseByTag({
owner: repositoryOwner, owner: repositoryOwner,
repo: repositoryName, repo: repositoryName,
tag: CODEQL_BUNDLE_VERSION, tag: CODEQL_BUNDLE_VERSION
}); });
// See if any of the bundles appears in the assets list for (let asset of release.data.assets) {
const assetMap = new Map(release.data.assets.map((x) => [x.name, x])); if (asset.name === CODEQL_BUNDLE_NAME) {
for (const bundleName of bundleNames) { core.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
logger.debug(`Looking for ${bundleName}`);
const asset = assetMap.get(bundleName);
if (asset) {
logger.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
return asset.url; return asset.url;
} }
} }
} }
catch (e) { catch (e) {
logger.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`); core.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
} }
} }
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${CODEQL_BUNDLE_NAME}`; return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${CODEQL_BUNDLE_NAME}`;
} }
exports.getCodeQLBundleDownloadURL = getCodeQLBundleDownloadURL;
// We have to download CodeQL manually because the toolcache doesn't support Accept headers. // We have to download CodeQL manually because the toolcache doesn't support Accept headers.
// This can be removed once https://github.com/actions/toolkit/pull/530 is merged and released. // This can be removed once https://github.com/actions/toolkit/pull/530 is merged and released.
async function toolcacheDownloadTool(url, headers, tempDir, logger) { async function toolcacheDownloadTool(url, headers) {
const client = new http.HttpClient("CodeQL Action"); const client = new http.HttpClient('CodeQL Action');
const dest = path.join(tempDir, v4_1.default()); const dest = path.join(util.getRequiredEnvParam('RUNNER_TEMP'), v4_1.default());
const response = await client.get(url, headers); const response = await client.get(url, headers);
if (response.message.statusCode !== 200) { if (response.message.statusCode !== 200) {
logger.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); const err = new toolcache.HTTPError(response.message.statusCode);
throw new Error(`Unexpected HTTP response: ${response.message.statusCode}`); core.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
throw err;
} }
const pipeline = globalutil.promisify(stream.pipeline); const pipeline = globalutil.promisify(stream.pipeline);
fs.mkdirSync(path.dirname(dest), { recursive: true }); await io.mkdirP(path.dirname(dest));
await pipeline(response.message, fs.createWriteStream(dest)); await pipeline(response.message, fs.createWriteStream(dest));
return dest; return dest;
} }
async function setupCodeQL(codeqlURL, languages, githubAuth, githubUrl, tempDir, toolsDir, mode, logger) { async function setupCodeQL() {
// Setting these two env vars makes the toolcache code safe to use outside,
// of actions but this is obviously not a great thing we're doing and it would
// be better to write our own implementation to use outside of actions.
process.env["RUNNER_TEMP"] = tempDir;
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
// The URL identifies the release version. E.g., codeql-20200901 .
// The plVersion identifies the platform-language combination of the package
// within the release. E.g., `linux64-cpp` in `codeql-linux64-cpp.tar.gz`.
// We expect the codeqlUrl (when given) to always point to the main bundle
// `codeql-bundle.tar.gz`
//
// The logic is as follows:
// - Always use the Toolcache if available.
// - If we would like a platform-language package, but have the
// full bundle in the cache, use that.
// - If codeqlURL is specified, use that.
// - If a single language is being analyzed, try to download the platform-language package.
// - If it is not available in the release assets, fallback to the full bundle
// - If multiple languages are being anlyzed, use the full bundle
let plVersion = undefined;
let platform;
if (process.platform === "win32") {
platform = "win64";
}
else if (process.platform === "linux") {
platform = "linux64";
}
else if (process.platform === "darwin") {
platform = "osx64";
}
else {
throw new Error(`Unsupported platform: ${process.platform}`);
}
if (languages.length === 1) {
plVersion = `${platform}-${languages[0]}`;
}
try { try {
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`, logger); let codeqlURL = core.getInput('tools');
let codeqlFolder; const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
logger.debug(`PL Version ${plVersion}`); let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
if (plVersion) { if (codeqlFolder) {
codeqlFolder = toolcache.find("CodeQL", `${codeqlURLVersion}-${plVersion}`); core.debug(`CodeQL found in cache ${codeqlFolder}`);
if (codeqlFolder) {
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
}
} }
if (!codeqlFolder) { else {
codeqlFolder = toolcache.find("CodeQL", codeqlURLVersion);
if (codeqlFolder) {
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
}
}
if (!codeqlFolder) {
const codeqlToolcacheVersion = plVersion
? `${codeqlURLVersion}-${plVersion}`
: codeqlURLVersion;
logger.debug(`CodeQL not found in cache`);
if (!codeqlURL) { if (!codeqlURL) {
// Provide a few options, from smaller to bigger codeqlURL = await getCodeQLBundleDownloadURL();
const bundles = [];
if (plVersion) {
bundles.push(CODEQL_BUNDLE_NAME.replace("-bundle", `-bundle-${plVersion}`));
}
bundles.push(CODEQL_BUNDLE_NAME.replace("-bundle", `-bundle-${platform}`));
bundles.push(CODEQL_BUNDLE_NAME);
codeqlURL = await getCodeQLBundleDownloadURL(bundles, githubAuth, githubUrl, mode, logger);
} }
logger.debug(`Using CodeQL URL: ${codeqlURL}`); const headers = { accept: 'application/octet-stream' };
const headers = { accept: "application/octet-stream" };
// We only want to provide an authorization header if we are downloading // We only want to provide an authorization header if we are downloading
// from the same GitHub instance the Action is running on. // from the same GitHub instance the Action is running on.
// This avoids leaking Enterprise tokens to dotcom. // This avoids leaking Enterprise tokens to dotcom.
if (codeqlURL.startsWith(`${githubUrl}/`)) { if (codeqlURL.startsWith(getInstanceAPIURL() + "/")) {
logger.debug("Downloading CodeQL bundle with token."); core.debug('Downloading CodeQL bundle with token.');
headers.authorization = `token ${githubAuth}`; let token = core.getInput('token', { required: true });
headers.authorization = `token ${token}`;
} }
else { else {
logger.debug("Downloading CodeQL bundle without token."); core.debug('Downloading CodeQL bundle without token.');
} }
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`); let codeqlPath = await toolcacheDownloadTool(codeqlURL, headers);
const codeqlPath = await toolcacheDownloadTool(codeqlURL, headers, tempDir, logger); core.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
const codeqlExtracted = await toolcache.extractTar(codeqlPath); const codeqlExtracted = await toolcache.extractTar(codeqlPath);
logger.debug(`Caching ${codeqlToolcacheVersion}`); codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlToolcacheVersion);
} }
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql"); let codeqlCmd = path.join(codeqlFolder, 'codeql', 'codeql');
if (process.platform === "win32") { if (process.platform === 'win32') {
codeqlCmd += ".exe"; codeqlCmd += ".exe";
} }
else if (process.platform !== "linux" && process.platform !== "darwin") { else if (process.platform !== 'linux' && process.platform !== 'darwin') {
throw new Error(`Unsupported platform: ${process.platform}`); throw new Error("Unsupported platform: " + process.platform);
} }
cachedCodeQL = getCodeQLForCmd(codeqlCmd); cachedCodeQL = getCodeQLForCmd(codeqlCmd);
core.exportVariable(CODEQL_ACTION_CMD, codeqlCmd);
return cachedCodeQL; return cachedCodeQL;
} }
catch (e) { catch (e) {
logger.error(e); core.error(e);
throw new Error("Unable to download and extract CodeQL CLI"); throw new Error("Unable to download and extract CodeQL CLI");
} }
} }
exports.setupCodeQL = setupCodeQL; exports.setupCodeQL = setupCodeQL;
function getCodeQLURLVersion(url, logger) { function getCodeQLURLVersion(url) {
const match = url.match(/\/codeql-bundle-(.*)\//); const match = url.match(/\/codeql-bundle-(.*)\//);
if (match === null || match.length < 2) { if (match === null || match.length < 2) {
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`); throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
} }
let version = match[1]; let version = match[1];
if (!semver.valid(version)) { if (!semver.valid(version)) {
logger.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`); core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
version = `0.0.0-${version}`; version = '0.0.0-' + version;
} }
const s = semver.clean(version); const s = semver.clean(version);
if (!s) { if (!s) {
@@ -234,23 +173,18 @@ function getCodeQLURLVersion(url, logger) {
return s; return s;
} }
exports.getCodeQLURLVersion = getCodeQLURLVersion; exports.getCodeQLURLVersion = getCodeQLURLVersion;
/** function getCodeQL() {
* Use the CodeQL executable located at the given path.
*/
function getCodeQL(cmd) {
if (cachedCodeQL === undefined) { if (cachedCodeQL === undefined) {
cachedCodeQL = getCodeQLForCmd(cmd); const codeqlCmd = util.getRequiredEnvParam(CODEQL_ACTION_CMD);
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
} }
return cachedCodeQL; return cachedCodeQL;
} }
exports.getCodeQL = getCodeQL; exports.getCodeQL = getCodeQL;
function resolveFunction(partialCodeql, methodName, defaultImplementation) { function resolveFunction(partialCodeql, methodName) {
if (typeof partialCodeql[methodName] !== "function") { if (typeof partialCodeql[methodName] !== 'function') {
if (defaultImplementation !== undefined) {
return defaultImplementation;
}
const dummyMethod = () => { const dummyMethod = () => {
throw new Error(`CodeQL ${methodName} method not correctly defined`); throw new Error('CodeQL ' + methodName + ' method not correctly defined');
}; };
return dummyMethod; return dummyMethod;
} }
@@ -264,218 +198,131 @@ function resolveFunction(partialCodeql, methodName, defaultImplementation) {
*/ */
function setCodeQL(partialCodeql) { function setCodeQL(partialCodeql) {
cachedCodeQL = { cachedCodeQL = {
getPath: resolveFunction(partialCodeql, "getPath", () => "/tmp/dummy-path"), getDir: resolveFunction(partialCodeql, 'getDir'),
printVersion: resolveFunction(partialCodeql, "printVersion"), printVersion: resolveFunction(partialCodeql, 'printVersion'),
getTracerEnv: resolveFunction(partialCodeql, "getTracerEnv"), getTracerEnv: resolveFunction(partialCodeql, 'getTracerEnv'),
databaseInit: resolveFunction(partialCodeql, "databaseInit"), databaseInit: resolveFunction(partialCodeql, 'databaseInit'),
runAutobuild: resolveFunction(partialCodeql, "runAutobuild"), runAutobuild: resolveFunction(partialCodeql, 'runAutobuild'),
extractScannedLanguage: resolveFunction(partialCodeql, "extractScannedLanguage"), extractScannedLanguage: resolveFunction(partialCodeql, 'extractScannedLanguage'),
finalizeDatabase: resolveFunction(partialCodeql, "finalizeDatabase"), finalizeDatabase: resolveFunction(partialCodeql, 'finalizeDatabase'),
resolveQueries: resolveFunction(partialCodeql, "resolveQueries"), resolveQueries: resolveFunction(partialCodeql, 'resolveQueries'),
databaseAnalyze: resolveFunction(partialCodeql, "databaseAnalyze"), databaseAnalyze: resolveFunction(partialCodeql, 'databaseAnalyze')
}; };
return cachedCodeQL;
} }
exports.setCodeQL = setCodeQL; exports.setCodeQL = setCodeQL;
/**
* Get the cached CodeQL object. Should only be used from tests.
*
* TODO: Work out a good way for tests to get this from the test context
* instead of having to have this method.
*/
function getCachedCodeQL() {
if (cachedCodeQL === undefined) {
// Should never happen as setCodeQL is called by testing-utils.setupTests
throw new Error("cachedCodeQL undefined");
}
return cachedCodeQL;
}
exports.getCachedCodeQL = getCachedCodeQL;
function getCodeQLForCmd(cmd) { function getCodeQLForCmd(cmd) {
return { return {
getPath() { getDir: function () {
return cmd; return path.dirname(cmd);
}, },
async printVersion() { printVersion: async function () {
await new toolrunnner.ToolRunner(cmd, [ await exec.exec(cmd, [
"version", 'version',
"--format=json", '--format=json'
]).exec(); ]);
}, },
async getTracerEnv(databasePath) { getTracerEnv: async function (databasePath, compilerSpec) {
// Write tracer-env.js to a temp location. let envFile = path.resolve(databasePath, 'working', 'env.tmp');
const tracerEnvJs = path.resolve(databasePath, "working", "tracer-env.js"); const compilerSpecArg = compilerSpec ? ["--compiler-spec=" + compilerSpec] : [];
fs.mkdirSync(path.dirname(tracerEnvJs), { recursive: true }); await exec.exec(cmd, [
fs.writeFileSync(tracerEnvJs, ` 'database',
const fs = require('fs'); 'trace-command',
const env = {};
for (let entry of Object.entries(process.env)) {
const key = entry[0];
const value = entry[1];
if (typeof value !== 'undefined' && key !== '_' && !key.startsWith('JAVA_MAIN_CLASS_')) {
env[key] = value;
}
}
process.stdout.write(process.argv[2]);
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');`);
const envFile = path.resolve(databasePath, "working", "env.tmp");
await new toolrunnner.ToolRunner(cmd, [
"database",
"trace-command",
databasePath, databasePath,
...getExtraOptionsFromEnv(["database", "trace-command"]), ...compilerSpecArg,
process.execPath, process.execPath,
tracerEnvJs, path.resolve(__dirname, 'tracer-env.js'),
envFile, envFile
]).exec(); ]);
return JSON.parse(fs.readFileSync(envFile, "utf-8")); return JSON.parse(fs.readFileSync(envFile, 'utf-8'));
}, },
async databaseInit(databasePath, language, sourceRoot) { databaseInit: async function (databasePath, language, sourceRoot) {
await new toolrunnner.ToolRunner(cmd, [ await exec.exec(cmd, [
"database", 'database',
"init", 'init',
databasePath, databasePath,
`--language=${language}`, '--language=' + language,
`--source-root=${sourceRoot}`, '--source-root=' + sourceRoot,
...getExtraOptionsFromEnv(["database", "init"]), ]);
]).exec();
}, },
async runAutobuild(language) { runAutobuild: async function (language) {
const cmdName = process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh"; const cmdName = process.platform === 'win32' ? 'autobuild.cmd' : 'autobuild.sh';
const autobuildCmd = path.join(path.dirname(cmd), language, "tools", cmdName); const autobuildCmd = path.join(path.dirname(cmd), language, 'tools', cmdName);
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false' // Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
// This is because of an issue with Azure pipelines timing out connections after 4 minutes // This is because of an issue with Azure pipelines timing out connections after 4 minutes
// and Maven not properly handling closed connections // and Maven not properly handling closed connections
// Otherwise long build processes will timeout when pulling down Java packages // Otherwise long build processes will timeout when pulling down Java packages
// https://developercommunity.visualstudio.com/content/problem/292284/maven-hosted-agent-connection-timeout.html // https://developercommunity.visualstudio.com/content/problem/292284/maven-hosted-agent-connection-timeout.html
const javaToolOptions = process.env["JAVA_TOOL_OPTIONS"] || ""; let javaToolOptions = process.env['JAVA_TOOL_OPTIONS'] || "";
process.env["JAVA_TOOL_OPTIONS"] = [ process.env['JAVA_TOOL_OPTIONS'] = [...javaToolOptions.split(/\s+/), '-Dhttp.keepAlive=false', '-Dmaven.wagon.http.pool=false'].join(' ');
...javaToolOptions.split(/\s+/), await exec.exec(autobuildCmd);
"-Dhttp.keepAlive=false",
"-Dmaven.wagon.http.pool=false",
].join(" ");
await new toolrunnner.ToolRunner(autobuildCmd).exec();
}, },
async extractScannedLanguage(databasePath, language) { extractScannedLanguage: async function (databasePath, language) {
// Get extractor location // Get extractor location
let extractorPath = ""; let extractorPath = '';
await new toolrunnner.ToolRunner(cmd, [ await exec.exec(cmd, [
"resolve", 'resolve',
"extractor", 'extractor',
"--format=json", '--format=json',
`--language=${language}`, '--language=' + language
...getExtraOptionsFromEnv(["resolve", "extractor"]),
], { ], {
silent: true, silent: true,
listeners: { listeners: {
stdout: (data) => { stdout: (data) => { extractorPath += data.toString(); },
extractorPath += data.toString(); stderr: (data) => { process.stderr.write(data); }
}, }
stderr: (data) => { });
process.stderr.write(data);
},
},
}).exec();
// Set trace command // Set trace command
const ext = process.platform === "win32" ? ".cmd" : ".sh"; const ext = process.platform === 'win32' ? '.cmd' : '.sh';
const traceCommand = path.resolve(JSON.parse(extractorPath), "tools", `autobuild${ext}`); const traceCommand = path.resolve(JSON.parse(extractorPath), 'tools', 'autobuild' + ext);
// Run trace command // Run trace command
await toolrunner_error_catcher_1.toolrunnerErrorCatcher(cmd, [ await exec.exec(cmd, [
"database", 'database',
"trace-command", 'trace-command',
...getExtraOptionsFromEnv(["database", "trace-command"]),
databasePath, databasePath,
"--", '--',
traceCommand, traceCommand
], error_matcher_1.errorMatchers); ]);
}, },
async finalizeDatabase(databasePath) { finalizeDatabase: async function (databasePath) {
await toolrunner_error_catcher_1.toolrunnerErrorCatcher(cmd, [ await exec.exec(cmd, [
"database", 'database',
"finalize", 'finalize',
...getExtraOptionsFromEnv(["database", "finalize"]), databasePath
databasePath, ]);
], error_matcher_1.errorMatchers);
}, },
async resolveQueries(queries, extraSearchPath) { resolveQueries: async function (queries, extraSearchPath) {
const codeqlArgs = [ const codeqlArgs = [
"resolve", 'resolve',
"queries", 'queries',
...queries, ...queries,
"--format=bylanguage", '--format=bylanguage'
...getExtraOptionsFromEnv(["resolve", "queries"]),
]; ];
if (extraSearchPath !== undefined) { if (extraSearchPath !== undefined) {
codeqlArgs.push("--search-path", extraSearchPath); codeqlArgs.push('--search-path', extraSearchPath);
} }
let output = ""; let output = '';
await new toolrunnner.ToolRunner(cmd, codeqlArgs, { await exec.exec(cmd, codeqlArgs, {
listeners: { listeners: {
stdout: (data) => { stdout: (data) => {
output += data.toString(); output += data.toString();
}, }
}, }
}).exec(); });
return JSON.parse(output); return JSON.parse(output);
}, },
async databaseAnalyze(databasePath, sarifFile, querySuite, memoryFlag, addSnippetsFlag, threadsFlag) { databaseAnalyze: async function (databasePath, sarifFile, querySuite) {
await new toolrunnner.ToolRunner(cmd, [ await exec.exec(cmd, [
"database", 'database',
"analyze", 'analyze',
memoryFlag, util.getMemoryFlag(),
threadsFlag, util.getThreadsFlag(),
databasePath, databasePath,
"--format=sarif-latest", '--format=sarif-latest',
`--output=${sarifFile}`, '--output=' + sarifFile,
addSnippetsFlag, '--no-sarif-add-snippets',
...getExtraOptionsFromEnv(["database", "analyze"]), querySuite
querySuite, ]);
]).exec(); }
},
}; };
} }
/**
* Gets the options for `path` of `options` as an array of extra option strings.
*/
function getExtraOptionsFromEnv(path) {
const options = util.getExtraOptionsEnvParam();
return getExtraOptions(options, path, []);
}
/**
* Gets the options for `path` of `options` as an array of extra option strings.
*
* - the special terminal step name '*' in `options` matches all path steps
* - throws an exception if this conversion is impossible.
*/
function getExtraOptions(options, path, pathInfo) {
var _a, _b, _c;
/**
* Gets `options` as an array of extra option strings.
*
* - throws an exception mentioning `pathInfo` if this conversion is impossible.
*/
function asExtraOptions(options, pathInfo) {
if (options === undefined) {
return [];
}
if (!Array.isArray(options)) {
const msg = `The extra options for '${pathInfo.join(".")}' ('${JSON.stringify(options)}') are not in an array.`;
throw new Error(msg);
}
return options.map((o) => {
const t = typeof o;
if (t !== "string" && t !== "number" && t !== "boolean") {
const msg = `The extra option for '${pathInfo.join(".")}' ('${JSON.stringify(o)}') is not a primitive value.`;
throw new Error(msg);
}
return `${o}`;
});
}
const all = asExtraOptions((_a = options) === null || _a === void 0 ? void 0 : _a["*"], pathInfo.concat("*"));
const specific = path.length === 0
? asExtraOptions(options, pathInfo)
: getExtraOptions((_b = options) === null || _b === void 0 ? void 0 : _b[path[0]], (_c = path) === null || _c === void 0 ? void 0 : _c.slice(1), pathInfo.concat(path[0]));
return all.concat(specific);
}
exports.getExtraOptions = getExtraOptions;
//# sourceMappingURL=codeql.js.map //# sourceMappingURL=codeql.js.map

File diff suppressed because one or more lines are too long

176
lib/codeql.test.js generated
View File

@@ -10,160 +10,46 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod }; return (mod && mod.__esModule) ? mod : { "default": mod };
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const github = __importStar(require("@actions/github"));
const toolcache = __importStar(require("@actions/tool-cache")); const toolcache = __importStar(require("@actions/tool-cache"));
const ava_1 = __importDefault(require("ava")); const ava_1 = __importDefault(require("ava"));
const nock_1 = __importDefault(require("nock")); const nock_1 = __importDefault(require("nock"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const sinon_1 = __importDefault(require("sinon"));
const api = __importStar(require("./api-client"));
const codeql = __importStar(require("./codeql")); const codeql = __importStar(require("./codeql"));
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils"); const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default); testing_utils_1.setupTests(ava_1.default);
ava_1.default("download and populate codeql bundle cache", async (t) => { ava_1.default('download codeql bundle cache', async (t) => {
await util.withTmpDir(async (tmpDir) => { await util.withTmpDir(async (tmpDir) => {
const versions = ["20200601", "20200610"]; process.env['GITHUB_WORKSPACE'] = tmpDir;
const languages = [ process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
[languages_1.Language.cpp], process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
[languages_1.Language.cpp, languages_1.Language.python], const versions = ['20200601', '20200610'];
];
const platform = process.platform === "win32"
? "win64"
: process.platform === "linux"
? "linux64"
: process.platform === "darwin"
? "osx64"
: undefined;
for (let i = 0; i < versions.length; i++) { for (let i = 0; i < versions.length; i++) {
for (let j = 0; j < languages.length; j++) { const version = versions[i];
const version = versions[i]; nock_1.default('https://example.com')
const plVersion = languages[j].length === 1 .get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
? `${platform}-${languages[j][0]}`
: undefined;
nock_1.default("https://example.com")
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, languages[j], "token", "https://github.example.com", tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
const toolcacheVersion = plVersion
? `0.0.0-${version}-${plVersion}`
: `0.0.0-${version}`;
t.assert(toolcache.find("CodeQL", toolcacheVersion), `Looking for ${toolcacheVersion}`);
}
}
const cachedVersions = toolcache.findAllVersions("CodeQL");
// We should now have 4 cached versions: e.g.,
// 20200601, 20200601-linux64-cpp, 20200610, 20200610-linux64-cpp
t.is(cachedVersions.length, 4);
});
});
ava_1.default("download small codeql bundle if analyzing only one language", async (t) => {
// Note: We do not specify a codeqlURL in this test, thus testing that
// the logic for constructing the URL takes into account the
// language being analyzed
await util.withTmpDir(async (tmpDir) => {
const languages = [
[languages_1.Language.cpp],
[languages_1.Language.cpp, languages_1.Language.python],
];
const platform = process.platform === "win32"
? "win64"
: process.platform === "linux"
? "linux64"
: process.platform === "darwin"
? "osx64"
: undefined;
for (let i = 0; i < languages.length; i++) {
const plVersion = languages[i].length === 1
? `${platform}-${languages[i][0]}`
: undefined;
const pkg = plVersion
? `codeql-bundle-${plVersion}.tar.gz`
: "codeql-bundle.tar.gz";
// Mock the API client
const client = new github.GitHub("123");
const response = {
data: {
assets: [
{
name: `codeql-bundle-${platform}-cpp.tar.gz`,
url: `https://github.example.com/url/codeql-bundle-${platform}-cpp.tar.gz`,
},
{
name: "codeql-bundle.tar.gz",
url: "https://github.example.com/url/codeql-bundle.tar.gz",
},
],
},
};
sinon_1.default.stub(client.repos, "getReleaseByTag").resolves(response);
sinon_1.default.stub(api, "getApiClient").value(() => client);
nock_1.default("https://github.example.com")
.get(`/url/${pkg}`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`)); .replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL(undefined, languages[i], "token", "https://github.example.com", tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true)); process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
const parsedVersion = codeql.getCodeQLURLVersion(`/${defaults.bundleVersion}/`, logging_1.getRunnerLogger(true)); await codeql.setupCodeQL();
const toolcacheVersion = plVersion t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
? `${parsedVersion}-${plVersion}`
: parsedVersion;
t.assert(toolcache.find("CodeQL", toolcacheVersion), `Looking for ${toolcacheVersion} - ${plVersion}`);
} }
const cachedVersions = toolcache.findAllVersions("CodeQL"); const cachedVersions = toolcache.findAllVersions('CodeQL');
t.is(cachedVersions.length, 2); t.is(cachedVersions.length, 2);
}); });
}); });
ava_1.default("use full codeql bundle cache if smaller bundle is not available", async (t) => { ava_1.default('parse codeql bundle url version', t => {
// If we look for a platform-language version but find the full bundle in the cache,
// we use the full bundle
await util.withTmpDir(async (tmpDir) => {
const version = "20200601";
nock_1.default("https://example.com")
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, [], "token", "https://github.example.com", tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
t.is(toolcache.findAllVersions("CodeQL").length, 1);
// Now try to request the cpp version, and see that we do not change the cache
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, [languages_1.Language.cpp], "token", "https://github.example.com", tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
t.is(toolcache.findAllVersions("CodeQL").length, 1);
});
});
ava_1.default("use larger bundles if smaller ones are not released", async (t) => {
// Mock the API client
const client = new github.GitHub("123");
const response = {
data: {
assets: [{ name: "full-bundle", url: "url/file.gz" }],
},
};
const getReleaseByTagMock = sinon_1.default
.stub(client.repos, "getReleaseByTag")
.resolves(response);
sinon_1.default.stub(api, "getApiClient").value(() => client);
// Setting this env is required by a dependency of getCodeQLBundleDownloadURL
process.env["RUNNER_TEMP"] = "abc";
const codeqlURL = await codeql.getCodeQLBundleDownloadURL(["small-bundle", "full-bundle"], "", "", "actions", logging_1.getRunnerLogger(true));
t.deepEqual(codeqlURL, "url/file.gz");
t.assert(getReleaseByTagMock.called);
});
ava_1.default("parse codeql bundle url version", (t) => {
const tests = { const tests = {
"20200601": "0.0.0-20200601", '20200601': '0.0.0-20200601',
"20200601.0": "0.0.0-20200601.0", '20200601.0': '0.0.0-20200601.0',
"20200601.0.0": "20200601.0.0", '20200601.0.0': '20200601.0.0',
"1.2.3": "1.2.3", '1.2.3': '1.2.3',
"1.2.3-alpha": "1.2.3-alpha", '1.2.3-alpha': '1.2.3-alpha',
"1.2.3-beta.1": "1.2.3-beta.1", '1.2.3-beta.1': '1.2.3-beta.1',
"20200601-linux64-python": "0.0.0-20200601-linux64-python",
}; };
for (const [version, expectedVersion] of Object.entries(tests)) { for (const [version, expectedVersion] of Object.entries(tests)) {
const url = `https://github.com/.../codeql-bundle-${version}/...`; const url = `https://github.com/.../codeql-bundle-${version}/...`;
try { try {
const parsedVersion = codeql.getCodeQLURLVersion(url, logging_1.getRunnerLogger(true)); const parsedVersion = codeql.getCodeQLURLVersion(url);
t.deepEqual(parsedVersion, expectedVersion); t.deepEqual(parsedVersion, expectedVersion);
} }
catch (e) { catch (e) {
@@ -171,26 +57,4 @@ ava_1.default("parse codeql bundle url version", (t) => {
} }
} }
}); });
ava_1.default("getExtraOptions works for explicit paths", (t) => {
t.deepEqual(codeql.getExtraOptions({}, ["foo"], []), []);
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ["foo"], []), ["42"]);
t.deepEqual(codeql.getExtraOptions({ foo: { bar: [42] } }, ["foo", "bar"], []), ["42"]);
});
ava_1.default("getExtraOptions works for wildcards", (t) => {
t.deepEqual(codeql.getExtraOptions({ "*": [42] }, ["foo"], []), ["42"]);
});
ava_1.default("getExtraOptions works for wildcards and explicit paths", (t) => {
const o1 = { "*": [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o1, ["foo"], []), ["42", "87"]);
const o2 = { "*": [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o2, ["foo", "bar"], []), ["42"]);
const o3 = { "*": [42], foo: { "*": [87], bar: [99] } };
const p = ["foo", "bar"];
t.deepEqual(codeql.getExtraOptions(o3, p, []), ["42", "87", "99"]);
});
ava_1.default("getExtraOptions throws for bad content", (t) => {
t.throws(() => codeql.getExtraOptions({ "*": 42 }, ["foo"], []));
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ["foo"], []));
t.throws(() => codeql.getExtraOptions({ "*": [42], foo: { "*": 87, bar: [99] } }, ["foo", "bar"], []));
});
//# sourceMappingURL=codeql.test.js.map //# sourceMappingURL=codeql.test.js.map

File diff suppressed because one or more lines are too long

403
lib/config-utils.js generated
View File

@@ -7,18 +7,22 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const io = __importStar(require("@actions/io"));
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const yaml = __importStar(require("js-yaml")); const yaml = __importStar(require("js-yaml"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const api = __importStar(require("./api-client")); const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
const externalQueries = __importStar(require("./external-queries")); const externalQueries = __importStar(require("./external-queries"));
const util = __importStar(require("./util"));
// Property names from the user-supplied config file. // Property names from the user-supplied config file.
const NAME_PROPERTY = "name"; const NAME_PROPERTY = 'name';
const DISABLE_DEFAULT_QUERIES_PROPERTY = "disable-default-queries"; const DISABLE_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
const QUERIES_PROPERTY = "queries"; const QUERIES_PROPERTY = 'queries';
const QUERIES_USES_PROPERTY = "uses"; const QUERIES_USES_PROPERTY = 'uses';
const PATHS_IGNORE_PROPERTY = "paths-ignore"; const PATHS_IGNORE_PROPERTY = 'paths-ignore';
const PATHS_PROPERTY = "paths"; const PATHS_PROPERTY = 'paths';
/** /**
* A list of queries from https://github.com/github/codeql that * A list of queries from https://github.com/github/codeql that
* we don't want to run. Disabling them here is a quicker alternative to * we don't want to run. Disabling them here is a quicker alternative to
@@ -29,13 +33,14 @@ const PATHS_PROPERTY = "paths";
* Format is a map from language to an array of path suffixes of .ql files. * Format is a map from language to an array of path suffixes of .ql files.
*/ */
const DISABLED_BUILTIN_QUERIES = { const DISABLED_BUILTIN_QUERIES = {
csharp: [ 'csharp': [
"ql/src/Security Features/CWE-937/VulnerablePackage.ql", 'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
"ql/src/Security Features/CWE-451/MissingXFrameOptions.ql", 'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
], ]
}; };
function queryIsDisabled(language, query) { function queryIsDisabled(language, query) {
return (DISABLED_BUILTIN_QUERIES[language] || []).some((disabledQuery) => query.endsWith(disabledQuery)); return (DISABLED_BUILTIN_QUERIES[language] || [])
.some(disabledQuery => query.endsWith(disabledQuery));
} }
/** /**
* Asserts that the noDeclaredLanguage and multipleDeclaredLanguages fields are * Asserts that the noDeclaredLanguage and multipleDeclaredLanguages fields are
@@ -45,73 +50,63 @@ function validateQueries(resolvedQueries) {
const noDeclaredLanguage = resolvedQueries.noDeclaredLanguage; const noDeclaredLanguage = resolvedQueries.noDeclaredLanguage;
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage); const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
if (noDeclaredLanguageQueries.length !== 0) { if (noDeclaredLanguageQueries.length !== 0) {
throw new Error(`${"The following queries do not declare a language. " + throw new Error('The following queries do not declare a language. ' +
"Their qlpack.yml files are either missing or is invalid.\n"}${noDeclaredLanguageQueries.join("\n")}`); 'Their qlpack.yml files are either missing or is invalid.\n' +
noDeclaredLanguageQueries.join('\n'));
} }
const multipleDeclaredLanguages = resolvedQueries.multipleDeclaredLanguages; const multipleDeclaredLanguages = resolvedQueries.multipleDeclaredLanguages;
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages); const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
if (multipleDeclaredLanguagesQueries.length !== 0) { if (multipleDeclaredLanguagesQueries.length !== 0) {
throw new Error(`${"The following queries declare multiple languages. " + throw new Error('The following queries declare multiple languages. ' +
"Their qlpack.yml files are either missing or is invalid.\n"}${multipleDeclaredLanguagesQueries.join("\n")}`); 'Their qlpack.yml files are either missing or is invalid.\n' +
multipleDeclaredLanguagesQueries.join('\n'));
} }
} }
/** /**
* Run 'codeql resolve queries' and add the results to resultMap * Run 'codeql resolve queries' and add the results to resultMap
*
* If a checkout path is given then the queries are assumed to be custom queries
* and an error will be thrown if there is anything invalid about the queries.
* If a checkout path is not given then the queries are assumed to be builtin
* queries, and error checking will be suppressed.
*/ */
async function runResolveQueries(codeQL, resultMap, toResolve, extraSearchPath) { async function runResolveQueries(resultMap, toResolve, extraSearchPath, errorOnInvalidQueries) {
const resolvedQueries = await codeQL.resolveQueries(toResolve, extraSearchPath); const codeQl = codeql_1.getCodeQL();
if (extraSearchPath !== undefined) { const resolvedQueries = await codeQl.resolveQueries(toResolve, extraSearchPath);
validateQueries(resolvedQueries); for (const [language, queries] of Object.entries(resolvedQueries.byLanguage)) {
}
for (const [language, queryPaths] of Object.entries(resolvedQueries.byLanguage)) {
if (resultMap[language] === undefined) { if (resultMap[language] === undefined) {
resultMap[language] = { resultMap[language] = [];
builtin: [],
custom: [],
};
}
const queries = Object.keys(queryPaths).filter((q) => !queryIsDisabled(language, q));
if (extraSearchPath !== undefined) {
resultMap[language].custom.push(...queries);
}
else {
resultMap[language].builtin.push(...queries);
} }
resultMap[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
}
if (errorOnInvalidQueries) {
validateQueries(resolvedQueries);
} }
} }
/** /**
* Get the set of queries included by default. * Get the set of queries included by default.
*/ */
async function addDefaultQueries(codeQL, languages, resultMap) { async function addDefaultQueries(languages, resultMap) {
const suites = languages.map((l) => `${l}-code-scanning.qls`); const suites = languages.map(l => l + '-code-scanning.qls');
await runResolveQueries(codeQL, resultMap, suites, undefined); await runResolveQueries(resultMap, suites, undefined, false);
} }
// The set of acceptable values for built-in suites from the codeql bundle // The set of acceptable values for built-in suites from the codeql bundle
const builtinSuites = ["security-extended", "security-and-quality"]; const builtinSuites = ['security-extended', 'security-and-quality'];
/** /**
* Determine the set of queries associated with suiteName's suites and add them to resultMap. * Determine the set of queries associated with suiteName's suites and add them to resultMap.
* Throws an error if suiteName is not a valid builtin suite. * Throws an error if suiteName is not a valid builtin suite.
*/ */
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, suiteName, configFile) { async function addBuiltinSuiteQueries(configFile, languages, resultMap, suiteName) {
const suite = builtinSuites.find((suite) => suite === suiteName); const suite = builtinSuites.find((suite) => suite === suiteName);
if (!suite) { if (!suite) {
throw new Error(getQueryUsesInvalid(configFile, suiteName)); throw new Error(getQueryUsesInvalid(configFile, suiteName));
} }
const suites = languages.map((l) => `${l}-${suiteName}.qls`); const suites = languages.map(l => l + '-' + suiteName + '.qls');
await runResolveQueries(codeQL, resultMap, suites, undefined); await runResolveQueries(resultMap, suites, undefined, false);
} }
/** /**
* Retrieve the set of queries at localQueryPath and add them to resultMap. * Retrieve the set of queries at localQueryPath and add them to resultMap.
*/ */
async function addLocalQueries(codeQL, resultMap, localQueryPath, checkoutPath, configFile) { async function addLocalQueries(configFile, resultMap, localQueryPath) {
// Resolve the local path against the workspace so that when this is // Resolve the local path against the workspace so that when this is
// passed to codeql it resolves to exactly the path we expect it to resolve to. // passed to codeql it resolves to exactly the path we expect it to resolve to.
let absoluteQueryPath = path.join(checkoutPath, localQueryPath); const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
// Check the file exists // Check the file exists
if (!fs.existsSync(absoluteQueryPath)) { if (!fs.existsSync(absoluteQueryPath)) {
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath)); throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
@@ -119,21 +114,23 @@ async function addLocalQueries(codeQL, resultMap, localQueryPath, checkoutPath,
// Call this after checking file exists, because it'll fail if file doesn't exist // Call this after checking file exists, because it'll fail if file doesn't exist
absoluteQueryPath = fs.realpathSync(absoluteQueryPath); absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
// Check the local path doesn't jump outside the repo using '..' or symlinks // Check the local path doesn't jump outside the repo using '..' or symlinks
if (!(absoluteQueryPath + path.sep).startsWith(fs.realpathSync(checkoutPath) + path.sep)) { if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath)); throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
} }
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], checkoutPath); // Get the root of the current repo to use when resolving query dependencies
const rootOfRepo = util.getRequiredEnvParam('GITHUB_WORKSPACE');
await runResolveQueries(resultMap, [absoluteQueryPath], rootOfRepo, true);
} }
/** /**
* Retrieve the set of queries at the referenced remote repo and add them to resultMap. * Retrieve the set of queries at the referenced remote repo and add them to resultMap.
*/ */
async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile) { async function addRemoteQueries(configFile, resultMap, queryUses) {
let tok = queryUses.split("@"); let tok = queryUses.split('@');
if (tok.length !== 2) { if (tok.length !== 2) {
throw new Error(getQueryUsesInvalid(configFile, queryUses)); throw new Error(getQueryUsesInvalid(configFile, queryUses));
} }
const ref = tok[1]; const ref = tok[1];
tok = tok[0].split("/"); tok = tok[0].split('/');
// The first token is the owner // The first token is the owner
// The second token is the repo // The second token is the repo
// The rest is a path, if there is more than one token combine them to form the full path // The rest is a path, if there is more than one token combine them to form the full path
@@ -141,16 +138,16 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl
throw new Error(getQueryUsesInvalid(configFile, queryUses)); throw new Error(getQueryUsesInvalid(configFile, queryUses));
} }
// Check none of the parts of the repository name are empty // Check none of the parts of the repository name are empty
if (tok[0].trim() === "" || tok[1].trim() === "") { if (tok[0].trim() === '' || tok[1].trim() === '') {
throw new Error(getQueryUsesInvalid(configFile, queryUses)); throw new Error(getQueryUsesInvalid(configFile, queryUses));
} }
const nwo = `${tok[0]}/${tok[1]}`; const nwo = tok[0] + '/' + tok[1];
// Checkout the external repository // Checkout the external repository
const checkoutPath = await externalQueries.checkoutExternalRepository(nwo, ref, githubUrl, tempDir, logger); const rootOfRepo = await externalQueries.checkoutExternalRepository(nwo, ref);
const queryPath = tok.length > 2 const queryPath = tok.length > 2
? path.join(checkoutPath, tok.slice(2).join("/")) ? path.join(rootOfRepo, tok.slice(2).join('/'))
: checkoutPath; : rootOfRepo;
await runResolveQueries(codeQL, resultMap, [queryPath], checkoutPath); await runResolveQueries(resultMap, [queryPath], rootOfRepo, true);
} }
/** /**
* Parse a query 'uses' field to a discrete set of query files and update resultMap. * Parse a query 'uses' field to a discrete set of query files and update resultMap.
@@ -160,23 +157,23 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl
* local paths starting with './', or references to remote repos, or * local paths starting with './', or references to remote repos, or
* a finite set of hardcoded terms for builtin suites. * a finite set of hardcoded terms for builtin suites.
*/ */
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, checkoutPath, githubUrl, logger, configFile) { async function parseQueryUses(configFile, languages, resultMap, queryUses) {
queryUses = queryUses.trim(); queryUses = queryUses.trim();
if (queryUses === "") { if (queryUses === "") {
throw new Error(getQueryUsesInvalid(configFile)); throw new Error(getQueryUsesInvalid(configFile));
} }
// Check for the local path case before we start trying to parse the repository name // Check for the local path case before we start trying to parse the repository name
if (queryUses.startsWith("./")) { if (queryUses.startsWith("./")) {
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), checkoutPath, configFile); await addLocalQueries(configFile, resultMap, queryUses.slice(2));
return; return;
} }
// Check for one of the builtin suites // Check for one of the builtin suites
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) { if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
await addBuiltinSuiteQueries(languages, codeQL, resultMap, queryUses, configFile); await addBuiltinSuiteQueries(configFile, languages, resultMap, queryUses);
return; return;
} }
// Otherwise, must be a reference to another repo // Otherwise, must be a reference to another repo
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile); await addRemoteQueries(configFile, resultMap, queryUses);
} }
// Regex validating stars in paths or paths-ignore entries. // Regex validating stars in paths or paths-ignore entries.
// The intention is to only allow ** to appear when immediately // The intention is to only allow ** to appear when immediately
@@ -187,158 +184,194 @@ const pathStarsRegex = /.*(?:\*\*[^/].*|\*\*$|[^/]\*\*.*)/;
const filterPatternCharactersRegex = /.*[\?\+\[\]!].*/; const filterPatternCharactersRegex = /.*[\?\+\[\]!].*/;
// Checks that a paths of paths-ignore entry is valid, possibly modifying it // Checks that a paths of paths-ignore entry is valid, possibly modifying it
// to make it valid, or if not possible then throws an error. // to make it valid, or if not possible then throws an error.
function validateAndSanitisePath(originalPath, propertyName, configFile, logger) { function validateAndSanitisePath(originalPath, propertyName, configFile) {
// Take a copy so we don't modify the original path, so we can still construct error messages // Take a copy so we don't modify the original path, so we can still construct error messages
let path = originalPath; let path = originalPath;
// All paths are relative to the src root, so strip off leading slashes. // All paths are relative to the src root, so strip off leading slashes.
while (path.charAt(0) === "/") { while (path.charAt(0) === '/') {
path = path.substring(1); path = path.substring(1);
} }
// Trailing ** are redundant, so strip them off // Trailing ** are redundant, so strip them off
if (path.endsWith("/**")) { if (path.endsWith('/**')) {
path = path.substring(0, path.length - 2); path = path.substring(0, path.length - 2);
} }
// An empty path is not allowed as it's meaningless // An empty path is not allowed as it's meaningless
if (path === "") { if (path === '') {
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" is not an invalid path. ` + throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" is not an invalid path. ' +
`It is not necessary to include it, and it is not allowed to exclude it.`)); 'It is not necessary to include it, and it is not allowed to exclude it.'));
} }
// Check for illegal uses of ** // Check for illegal uses of **
if (path.match(pathStarsRegex)) { if (path.match(pathStarsRegex)) {
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an invalid "**" wildcard. ` + throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an invalid "**" wildcard. ' +
`They must be immediately preceeded and followed by a slash as in "/**/", or come at the start or end.`)); 'They must be immediately preceeded and followed by a slash as in "/**/", or come at the start or end.'));
} }
// Check for other regex characters that we don't support. // Check for other regex characters that we don't support.
// Output a warning so the user knows, but otherwise continue normally. // Output a warning so the user knows, but otherwise continue normally.
if (path.match(filterPatternCharactersRegex)) { if (path.match(filterPatternCharactersRegex)) {
logger.warning(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an unsupported character. ` + core.warning(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an unsupported character. ' +
`The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.`)); 'The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.'));
} }
// Ban any uses of backslash for now. // Ban any uses of backslash for now.
// This may not play nicely with project layouts. // This may not play nicely with project layouts.
// This restriction can be lifted later if we determine they are ok. // This restriction can be lifted later if we determine they are ok.
if (path.indexOf("\\") !== -1) { if (path.indexOf('\\') !== -1) {
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an "\\" character. These are not allowed in filters. ` + throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an "\\" character. These are not allowed in filters. ' +
`If running on windows we recommend using "/" instead for path filters.`)); 'If running on windows we recommend using "/" instead for path filters.'));
} }
return path; return path;
} }
exports.validateAndSanitisePath = validateAndSanitisePath; exports.validateAndSanitisePath = validateAndSanitisePath;
// An undefined configFile in some of these functions indicates that
// the property was in a workflow file, not a config file
function getNameInvalid(configFile) { function getNameInvalid(configFile) {
return getConfigFilePropertyError(configFile, NAME_PROPERTY, "must be a non-empty string"); return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
} }
exports.getNameInvalid = getNameInvalid; exports.getNameInvalid = getNameInvalid;
function getDisableDefaultQueriesInvalid(configFile) { function getDisableDefaultQueriesInvalid(configFile) {
return getConfigFilePropertyError(configFile, DISABLE_DEFAULT_QUERIES_PROPERTY, "must be a boolean"); return getConfigFilePropertyError(configFile, DISABLE_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
} }
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid; exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
function getQueriesInvalid(configFile) { function getQueriesInvalid(configFile) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, "must be an array"); return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
} }
exports.getQueriesInvalid = getQueriesInvalid; exports.getQueriesInvalid = getQueriesInvalid;
function getQueryUsesInvalid(configFile, queryUses) { function getQueryUsesInvalid(configFile, queryUses) {
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `must be a built-in suite (${builtinSuites.join(" or ")}), a relative path, or be of the form "owner/repo[/path]@ref"${queryUses !== undefined ? `\n Found: ${queryUses}` : ""}`); return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'must be a built-in suite (' + builtinSuites.join(' or ') +
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
} }
exports.getQueryUsesInvalid = getQueryUsesInvalid; exports.getQueryUsesInvalid = getQueryUsesInvalid;
function getPathsIgnoreInvalid(configFile) { function getPathsIgnoreInvalid(configFile) {
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, "must be an array of non-empty strings"); return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
} }
exports.getPathsIgnoreInvalid = getPathsIgnoreInvalid; exports.getPathsIgnoreInvalid = getPathsIgnoreInvalid;
function getPathsInvalid(configFile) { function getPathsInvalid(configFile) {
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, "must be an array of non-empty strings"); return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
} }
exports.getPathsInvalid = getPathsInvalid; exports.getPathsInvalid = getPathsInvalid;
function getLocalPathOutsideOfRepository(configFile, localPath) { function getLocalPathOutsideOfRepository(configFile, localPath) {
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `is invalid as the local path "${localPath}" is outside of the repository`); return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" is outside of the repository');
} }
exports.getLocalPathOutsideOfRepository = getLocalPathOutsideOfRepository; exports.getLocalPathOutsideOfRepository = getLocalPathOutsideOfRepository;
function getLocalPathDoesNotExist(configFile, localPath) { function getLocalPathDoesNotExist(configFile, localPath) {
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `is invalid as the local path "${localPath}" does not exist in the repository`); return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" does not exist in the repository');
} }
exports.getLocalPathDoesNotExist = getLocalPathDoesNotExist; exports.getLocalPathDoesNotExist = getLocalPathDoesNotExist;
function getConfigFileOutsideWorkspaceErrorMessage(configFile) { function getConfigFileOutsideWorkspaceErrorMessage(configFile) {
return `The configuration file "${configFile}" is outside of the workspace`; return 'The configuration file "' + configFile + '" is outside of the workspace';
} }
exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage; exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage;
function getConfigFileDoesNotExistErrorMessage(configFile) { function getConfigFileDoesNotExistErrorMessage(configFile) {
return `The configuration file "${configFile}" does not exist`; return 'The configuration file "' + configFile + '" does not exist';
} }
exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage; exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage;
function getConfigFileRepoFormatInvalidMessage(configFile) { function getConfigFileRepoFormatInvalidMessage(configFile) {
let error = `The configuration file "${configFile}" is not a supported remote file reference.`; let error = 'The configuration file "' + configFile + '" is not a supported remote file reference.';
error += " Expected format <owner>/<repository>/<file-path>@<ref>"; error += ' Expected format <owner>/<repository>/<file-path>@<ref>';
return error; return error;
} }
exports.getConfigFileRepoFormatInvalidMessage = getConfigFileRepoFormatInvalidMessage; exports.getConfigFileRepoFormatInvalidMessage = getConfigFileRepoFormatInvalidMessage;
function getConfigFileFormatInvalidMessage(configFile) { function getConfigFileFormatInvalidMessage(configFile) {
return `The configuration file "${configFile}" could not be read`; return 'The configuration file "' + configFile + '" could not be read';
} }
exports.getConfigFileFormatInvalidMessage = getConfigFileFormatInvalidMessage; exports.getConfigFileFormatInvalidMessage = getConfigFileFormatInvalidMessage;
function getConfigFileDirectoryGivenMessage(configFile) { function getConfigFileDirectoryGivenMessage(configFile) {
return `The configuration file "${configFile}" looks like a directory, not a file`; return 'The configuration file "' + configFile + '" looks like a directory, not a file';
} }
exports.getConfigFileDirectoryGivenMessage = getConfigFileDirectoryGivenMessage; exports.getConfigFileDirectoryGivenMessage = getConfigFileDirectoryGivenMessage;
function getConfigFilePropertyError(configFile, property, error) { function getConfigFilePropertyError(configFile, property, error) {
if (configFile === undefined) { return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
return `The workflow property "${property}" is invalid: ${error}`; }
/**
* Gets the set of languages in the current repository
*/
async function getLanguagesInRepo() {
var _a;
// Translate between GitHub's API names for languages and ours
const codeqlLanguages = {
'C': 'cpp',
'C++': 'cpp',
'C#': 'csharp',
'Go': 'go',
'Java': 'java',
'JavaScript': 'javascript',
'TypeScript': 'javascript',
'Python': 'python',
};
let repo_nwo = (_a = process.env['GITHUB_REPOSITORY']) === null || _a === void 0 ? void 0 : _a.split("/");
if (repo_nwo) {
let owner = repo_nwo[0];
let repo = repo_nwo[1];
core.debug(`GitHub repo ${owner} ${repo}`);
const response = await api.getApiClient().request("GET /repos/:owner/:repo/languages", ({
owner,
repo
}));
core.debug("Languages API response: " + JSON.stringify(response));
// The GitHub API is going to return languages in order of popularity,
// When we pick a language to autobuild we want to pick the most popular traced language
// Since sets in javascript maintain insertion order, using a set here and then splatting it
// into an array gives us an array of languages ordered by popularity
let languages = new Set();
for (let lang in response.data) {
if (lang in codeqlLanguages) {
languages.add(codeqlLanguages[lang]);
}
}
return [...languages];
} }
else { else {
return `The configuration file "${configFile}" is invalid: property "${property}" ${error}`; return [];
} }
} }
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, checkoutPath, githubUrl, logger) { /**
queriesInput = queriesInput.trim(); * Get the languages to analyse.
// "+" means "don't override config file" - see shouldAddConfigFileQueries *
queriesInput = queriesInput.replace(/^\+/, ""); * The result is obtained from the action input parameter 'languages' if that
for (const query of queriesInput.split(",")) { * has been set, otherwise it is deduced as all languages in the repo that
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, githubUrl, logger); * can be analysed.
*/
async function getLanguages() {
// Obtain from action input 'languages' if set
let languages = core.getInput('languages', { required: false })
.split(',')
.map(x => x.trim())
.filter(x => x.length > 0);
core.info("Languages from configuration: " + JSON.stringify(languages));
if (languages.length === 0) {
// Obtain languages as all languages in the repo that can be analysed
languages = await getLanguagesInRepo();
core.info("Automatically detected languages: " + JSON.stringify(languages));
} }
} return languages;
// Returns true if either no queries were provided in the workflow.
// or if the queries in the workflow were provided in "additive" mode,
// indicating that they shouldn't override the config queries but
// should instead be added in addition
function shouldAddConfigFileQueries(queriesInput) {
if (queriesInput) {
return queriesInput.trimStart().substr(0, 1) === "+";
}
return true;
} }
/** /**
* Get the default config for when the user has not supplied one. * Get the default config for when the user has not supplied one.
*/ */
async function getDefaultConfig(languages, queriesInput, tempDir, toolCacheDir, codeQL, checkoutPath, githubUrl, logger) { async function getDefaultConfig() {
const languages = await getLanguages();
const queries = {}; const queries = {};
await addDefaultQueries(codeQL, languages, queries); await addDefaultQueries(languages, queries);
if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, githubUrl, logger);
}
return { return {
languages, languages: languages,
queries, queries: queries,
pathsIgnore: [], pathsIgnore: [],
paths: [], paths: [],
originalUserInput: {}, originalUserInput: {},
tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(),
}; };
} }
exports.getDefaultConfig = getDefaultConfig; exports.getDefaultConfig = getDefaultConfig;
/** /**
* Load the config from the given file. * Load the config from the given file.
*/ */
async function loadConfig(languages, queriesInput, configFile, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) { async function loadConfig(configFile) {
let parsedYAML; let parsedYAML;
if (isLocal(configFile)) { if (isLocal(configFile)) {
// Treat the config file as relative to the workspace // Treat the config file as relative to the workspace
configFile = path.resolve(checkoutPath, configFile); const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
parsedYAML = getLocalConfig(configFile, checkoutPath); configFile = path.resolve(workspacePath, configFile);
parsedYAML = getLocalConfig(configFile, workspacePath);
} }
else { else {
parsedYAML = await getRemoteConfig(configFile, githubAuth, githubUrl); parsedYAML = await getRemoteConfig(configFile);
} }
// Validate that the 'name' property is syntactically correct, // Validate that the 'name' property is syntactically correct,
// even though we don't use the value yet. // even though we don't use the value yet.
@@ -350,6 +383,12 @@ async function loadConfig(languages, queriesInput, configFile, tempDir, toolCach
throw new Error(getNameInvalid(configFile)); throw new Error(getNameInvalid(configFile));
} }
} }
const languages = await getLanguages();
// If the languages parameter was not given and no languages were
// detected then fail here as this is a workflow configuration error.
if (languages.length === 0) {
throw new Error("Did not detect any languages to analyze. Please update input in workflow.");
}
const queries = {}; const queries = {};
const pathsIgnore = []; const pathsIgnore = [];
const paths = []; const paths = [];
@@ -361,69 +400,47 @@ async function loadConfig(languages, queriesInput, configFile, tempDir, toolCach
disableDefaultQueries = parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY]; disableDefaultQueries = parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY];
} }
if (!disableDefaultQueries) { if (!disableDefaultQueries) {
await addDefaultQueries(codeQL, languages, queries); await addDefaultQueries(languages, queries);
} }
// If queries were provided using `with` in the action configuration, if (QUERIES_PROPERTY in parsedYAML) {
// they should take precedence over the queries in the config file
// unless they're prefixed with "+", in which case they supplement those
// in the config file.
if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, githubUrl, logger);
}
if (shouldAddConfigFileQueries(queriesInput) &&
QUERIES_PROPERTY in parsedYAML) {
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) { if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
throw new Error(getQueriesInvalid(configFile)); throw new Error(getQueriesInvalid(configFile));
} }
for (const query of parsedYAML[QUERIES_PROPERTY]) { for (const query of parsedYAML[QUERIES_PROPERTY]) {
if (!(QUERIES_USES_PROPERTY in query) || if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueryUsesInvalid(configFile)); throw new Error(getQueryUsesInvalid(configFile));
} }
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, checkoutPath, githubUrl, logger, configFile); await parseQueryUses(configFile, languages, queries, query[QUERIES_USES_PROPERTY]);
} }
} }
if (PATHS_IGNORE_PROPERTY in parsedYAML) { if (PATHS_IGNORE_PROPERTY in parsedYAML) {
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) { if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
throw new Error(getPathsIgnoreInvalid(configFile)); throw new Error(getPathsIgnoreInvalid(configFile));
} }
parsedYAML[PATHS_IGNORE_PROPERTY].forEach((path) => { parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
if (typeof path !== "string" || path === "") { if (typeof path !== "string" || path === '') {
throw new Error(getPathsIgnoreInvalid(configFile)); throw new Error(getPathsIgnoreInvalid(configFile));
} }
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile, logger)); pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile));
}); });
} }
if (PATHS_PROPERTY in parsedYAML) { if (PATHS_PROPERTY in parsedYAML) {
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) { if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
throw new Error(getPathsInvalid(configFile)); throw new Error(getPathsInvalid(configFile));
} }
parsedYAML[PATHS_PROPERTY].forEach((path) => { parsedYAML[PATHS_PROPERTY].forEach(path => {
if (typeof path !== "string" || path === "") { if (typeof path !== "string" || path === '') {
throw new Error(getPathsInvalid(configFile)); throw new Error(getPathsInvalid(configFile));
} }
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile, logger)); paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile));
}); });
} }
// The list of queries should not be empty for any language. If it is then
// it is a user configuration error.
for (const language of languages) {
if (queries[language] === undefined ||
(queries[language].builtin.length === 0 &&
queries[language].custom.length === 0)) {
throw new Error(`Did not detect any queries to run for ${language}. ` +
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
}
}
return { return {
languages, languages,
queries, queries,
pathsIgnore, pathsIgnore,
paths, paths,
originalUserInput: parsedYAML, originalUserInput: parsedYAML
tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(),
}; };
} }
/** /**
@@ -432,18 +449,19 @@ async function loadConfig(languages, queriesInput, configFile, tempDir, toolCach
* This will parse the config from the user input if present, or generate * This will parse the config from the user input if present, or generate
* a default config. The parsed config is then stored to a known location. * a default config. The parsed config is then stored to a known location.
*/ */
async function initConfig(languages, queriesInput, configFile, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) { async function initConfig() {
const configFile = core.getInput('config-file');
let config; let config;
// If no config file was provided create an empty one // If no config file was provided create an empty one
if (!configFile) { if (configFile === '') {
logger.debug("No configuration file was provided"); core.debug('No configuration file was provided');
config = await getDefaultConfig(languages, queriesInput, tempDir, toolCacheDir, codeQL, checkoutPath, githubUrl, logger); config = await getDefaultConfig();
} }
else { else {
config = await loadConfig(languages, queriesInput, configFile, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger); config = await loadConfig(configFile);
} }
// Save the config so we can easily access it again in the future // Save the config so we can easily access it again in the future
await saveConfig(config, logger); await saveConfig(config);
return config; return config;
} }
exports.initConfig = initConfig; exports.initConfig = initConfig;
@@ -452,30 +470,28 @@ function isLocal(configPath) {
if (configPath.indexOf("./") === 0) { if (configPath.indexOf("./") === 0) {
return true; return true;
} }
return configPath.indexOf("@") === -1; return (configPath.indexOf("@") === -1);
} }
function getLocalConfig(configFile, checkoutPath) { function getLocalConfig(configFile, workspacePath) {
// Error if the config file is now outside of the workspace // Error if the config file is now outside of the workspace
if (!(configFile + path.sep).startsWith(checkoutPath + path.sep)) { if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile)); throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
} }
// Error if the file does not exist // Error if the file does not exist
if (!fs.existsSync(configFile)) { if (!fs.existsSync(configFile)) {
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile)); throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
} }
return yaml.safeLoad(fs.readFileSync(configFile, "utf8")); return yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
} }
async function getRemoteConfig(configFile, githubAuth, githubUrl) { async function getRemoteConfig(configFile) {
// retrieve the various parts of the config location, and ensure they're present // retrieve the various parts of the config location, and ensure they're present
const format = new RegExp("(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)"); const format = new RegExp('(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)');
const pieces = format.exec(configFile); const pieces = format.exec(configFile);
// 5 = 4 groups + the whole expression // 5 = 4 groups + the whole expression
if (pieces === null || pieces.groups === undefined || pieces.length < 5) { if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile)); throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
} }
const response = await api const response = await api.getApiClient().repos.getContents({
.getApiClient(githubAuth, githubUrl, true)
.repos.getContents({
owner: pieces.groups.owner, owner: pieces.groups.owner,
repo: pieces.groups.repo, repo: pieces.groups.repo,
path: pieces.groups.path, path: pieces.groups.path,
@@ -491,38 +507,47 @@ async function getRemoteConfig(configFile, githubAuth, githubUrl) {
else { else {
throw new Error(getConfigFileFormatInvalidMessage(configFile)); throw new Error(getConfigFileFormatInvalidMessage(configFile));
} }
return yaml.safeLoad(Buffer.from(fileContents, "base64").toString("binary")); return yaml.safeLoad(Buffer.from(fileContents, 'base64').toString('binary'));
}
/**
* Get the directory where the parsed config will be stored.
*/
function getPathToParsedConfigFolder() {
return util.getRequiredEnvParam('RUNNER_TEMP');
} }
/** /**
* Get the file path where the parsed config will be stored. * Get the file path where the parsed config will be stored.
*/ */
function getPathToParsedConfigFile(tempDir) { function getPathToParsedConfigFile() {
return path.join(tempDir, "config"); return path.join(getPathToParsedConfigFolder(), 'config');
} }
exports.getPathToParsedConfigFile = getPathToParsedConfigFile; exports.getPathToParsedConfigFile = getPathToParsedConfigFile;
/** /**
* Store the given config to the path returned from getPathToParsedConfigFile. * Store the given config to the path returned from getPathToParsedConfigFile.
*/ */
async function saveConfig(config, logger) { async function saveConfig(config) {
const configString = JSON.stringify(config); const configString = JSON.stringify(config);
const configFile = getPathToParsedConfigFile(config.tempDir); await io.mkdirP(getPathToParsedConfigFolder());
fs.mkdirSync(path.dirname(configFile), { recursive: true }); fs.writeFileSync(getPathToParsedConfigFile(), configString, 'utf8');
fs.writeFileSync(configFile, configString, "utf8"); core.debug('Saved config:');
logger.debug("Saved config:"); core.debug(configString);
logger.debug(configString);
} }
/** /**
* Get the config that has been saved to the given temp dir. * Get the config.
* If the config could not be found then returns undefined. *
* If this is the first time in a workflow that this is being called then
* this will parse the config from the user input. The parsed config is then
* stored to a known location. On the second and further calls, this will
* return the contents of the parsed config from the known location.
*/ */
async function getConfig(tempDir, logger) { async function getConfig() {
const configFile = getPathToParsedConfigFile(tempDir); const configFile = getPathToParsedConfigFile();
if (!fs.existsSync(configFile)) { if (!fs.existsSync(configFile)) {
return undefined; throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
} }
const configString = fs.readFileSync(configFile, "utf8"); const configString = fs.readFileSync(configFile, 'utf8');
logger.debug("Loaded config:"); core.debug('Loaded config:');
logger.debug(configString); core.debug(configString);
return JSON.parse(configString); return JSON.parse(configString);
} }
exports.getConfig = getConfig; exports.getConfig = getConfig;

File diff suppressed because one or more lines are too long

543
lib/config-utils.test.js generated
View File

@@ -16,49 +16,40 @@ const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const sinon_1 = __importDefault(require("sinon")); const sinon_1 = __importDefault(require("sinon"));
const api = __importStar(require("./api-client")); const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql"); const CodeQL = __importStar(require("./codeql"));
const configUtils = __importStar(require("./config-utils")); const configUtils = __importStar(require("./config-utils"));
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils"); const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default); testing_utils_1.setupTests(ava_1.default);
// Returns the filepath of the newly-created file function setInput(name, value) {
function createConfigFile(inputFileContents, tmpDir) { // Transformation copied from
const configFilePath = path.join(tmpDir, "input"); // https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
fs.writeFileSync(configFilePath, inputFileContents, "utf8"); const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
return configFilePath; if (value !== undefined) {
process.env[envVar] = value;
}
else {
delete process.env[envVar];
}
} }
function mockGetContents(content) { function mockGetContents(content) {
// Passing an auth token is required, so we just use a dummy value // Passing an auth token is required, so we just use a dummy value
const client = new github.GitHub("123"); let client = new github.GitHub('123');
const response = { const response = {
data: content, data: content
}; };
const spyGetContents = sinon_1.default const spyGetContents = sinon_1.default.stub(client.repos, "getContents").resolves(response);
.stub(client.repos, "getContents")
.resolves(response);
sinon_1.default.stub(api, "getApiClient").value(() => client); sinon_1.default.stub(api, "getApiClient").value(() => client);
return spyGetContents; return spyGetContents;
} }
function mockListLanguages(languages) {
// Passing an auth token is required, so we just use a dummy value
const client = new github.GitHub("123");
const response = {
data: {},
};
for (const language of languages) {
response.data[language] = 123;
}
sinon_1.default.stub(client.repos, "listLanguages").resolves(response);
sinon_1.default.stub(api, "getApiClient").value(() => client);
}
ava_1.default("load empty config", async (t) => { ava_1.default("load empty config", async (t) => {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
const logger = logging_1.getRunnerLogger(true); process.env['RUNNER_TEMP'] = tmpDir;
const languages = [languages_1.Language.javascript, languages_1.Language.python]; process.env['GITHUB_WORKSPACE'] = tmpDir;
const codeQL = codeql_1.setCodeQL({ setInput('config-file', undefined);
async resolveQueries() { setInput('languages', 'javascript,python');
CodeQL.setCodeQL({
resolveQueries: async function () {
return { return {
byLanguage: {}, byLanguage: {},
noDeclaredLanguage: {}, noDeclaredLanguage: {},
@@ -66,15 +57,18 @@ ava_1.default("load empty config", async (t) => {
}; };
}, },
}); });
const config = await configUtils.initConfig(languages, undefined, undefined, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logger); const config = await configUtils.initConfig();
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, tmpDir, tmpDir, codeQL, tmpDir, "https://github.example.com", logger)); t.deepEqual(config, await configUtils.getDefaultConfig());
}); });
}); });
ava_1.default("loading config saves config", async (t) => { ava_1.default("loading config saves config", async (t) => {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
const logger = logging_1.getRunnerLogger(true); process.env['RUNNER_TEMP'] = tmpDir;
const codeQL = codeql_1.setCodeQL({ process.env['GITHUB_WORKSPACE'] = tmpDir;
async resolveQueries() { setInput('config-file', undefined);
setInput('languages', 'javascript,python');
CodeQL.setCodeQL({
resolveQueries: async function () {
return { return {
byLanguage: {}, byLanguage: {},
noDeclaredLanguage: {}, noDeclaredLanguage: {},
@@ -83,64 +77,73 @@ ava_1.default("loading config saves config", async (t) => {
}, },
}); });
// Sanity check the saved config file does not already exist // Sanity check the saved config file does not already exist
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir))); t.false(fs.existsSync(configUtils.getPathToParsedConfigFile()));
// Sanity check that getConfig returns undefined before we have called initConfig // Sanity check that getConfig throws before we have called initConfig
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined); await t.throwsAsync(configUtils.getConfig);
const config1 = await configUtils.initConfig([languages_1.Language.javascript, languages_1.Language.python], undefined, undefined, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logger); const config1 = await configUtils.initConfig();
// The saved config file should now exist // The saved config file should now exist
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir))); t.true(fs.existsSync(configUtils.getPathToParsedConfigFile()));
// And that same newly-initialised config should now be returned by getConfig // And that same newly-initialised config should now be returned by getConfig
const config2 = await configUtils.getConfig(tmpDir, logger); const config2 = await configUtils.getConfig();
t.deepEqual(config1, config2); t.deepEqual(config1, config2);
}); });
}); });
ava_1.default("load input outside of workspace", async (t) => { ava_1.default("load input outside of workspace", async (t) => {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', '../input');
try { try {
await configUtils.initConfig([], undefined, "../input", tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true)); await configUtils.initConfig();
throw new Error("initConfig did not throw error"); throw new Error('initConfig did not throw error');
} }
catch (err) { catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, "../input")))); t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
} }
}); });
}); });
ava_1.default("load non-local input with invalid repo syntax", async (t) => { ava_1.default("load non-local input with invalid repo syntax", async (t) => {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// no filename given, just a repo // no filename given, just a repo
const configFile = "octo-org/codeql-config@main"; setInput('config-file', 'octo-org/codeql-config@main');
try { try {
await configUtils.initConfig([], undefined, configFile, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true)); await configUtils.initConfig();
throw new Error("initConfig did not throw error"); throw new Error('initConfig did not throw error');
} }
catch (err) { catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage("octo-org/codeql-config@main"))); t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage('octo-org/codeql-config@main')));
} }
}); });
}); });
ava_1.default("load non-existent input", async (t) => { ava_1.default("load non-existent input", async (t) => {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
const languages = [languages_1.Language.javascript]; process.env['RUNNER_TEMP'] = tmpDir;
const configFile = "input"; process.env['GITHUB_WORKSPACE'] = tmpDir;
t.false(fs.existsSync(path.join(tmpDir, configFile))); t.false(fs.existsSync(path.join(tmpDir, 'input')));
setInput('config-file', 'input');
setInput('languages', 'javascript');
try { try {
await configUtils.initConfig(languages, undefined, configFile, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true)); await configUtils.initConfig();
throw new Error("initConfig did not throw error"); throw new Error('initConfig did not throw error');
} }
catch (err) { catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, "input")))); t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
} }
}); });
}); });
ava_1.default("load non-empty input", async (t) => { ava_1.default("load non-empty input", async (t) => {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
const codeQL = codeql_1.setCodeQL({ process.env['RUNNER_TEMP'] = tmpDir;
async resolveQueries() { process.env['GITHUB_WORKSPACE'] = tmpDir;
CodeQL.setCodeQL({
resolveQueries: async function () {
return { return {
byLanguage: { byLanguage: {
javascript: { 'javascript': {
"/foo/a.ql": {}, '/foo/a.ql': {},
"/bar/b.ql": {}, '/bar/b.ql': {},
}, },
}, },
noDeclaredLanguage: {}, noDeclaredLanguage: {},
@@ -159,52 +162,45 @@ ava_1.default("load non-empty input", async (t) => {
- b - b
paths: paths:
- c/d`; - c/d`;
fs.mkdirSync(path.join(tmpDir, "foo")); fs.mkdirSync(path.join(tmpDir, 'foo'));
// And the config we expect it to parse to // And the config we expect it to parse to
const expectedConfig = { const expectedConfig = {
languages: [languages_1.Language.javascript], languages: ['javascript'],
queries: { queries: { 'javascript': ['/foo/a.ql', '/bar/b.ql'] },
javascript: { pathsIgnore: ['a', 'b'],
builtin: [], paths: ['c/d'],
custom: ["/foo/a.ql", "/bar/b.ql"],
},
},
pathsIgnore: ["a", "b"],
paths: ["c/d"],
originalUserInput: { originalUserInput: {
name: "my config", name: 'my config',
"disable-default-queries": true, 'disable-default-queries': true,
queries: [{ uses: "./foo" }], queries: [{ uses: './foo' }],
"paths-ignore": ["a", "b"], 'paths-ignore': ['a', 'b'],
paths: ["c/d"], paths: ['c/d'],
}, },
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: codeQL.getPath(),
}; };
const languages = [languages_1.Language.javascript]; fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
const configFilePath = createConfigFile(inputFileContents, tmpDir); setInput('config-file', 'input');
const actualConfig = await configUtils.initConfig(languages, undefined, configFilePath, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true)); setInput('languages', 'javascript');
const actualConfig = await configUtils.initConfig();
// Should exactly equal the object we constructed earlier // Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig); t.deepEqual(actualConfig, expectedConfig);
}); });
}); });
ava_1.default("Default queries are used", async (t) => { ava_1.default("default queries are used", async (t) => {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// Check that the default behaviour is to add the default queries. // Check that the default behaviour is to add the default queries.
// In this case if a config file is specified but does not include // In this case if a config file is specified but does not include
// the disable-default-queries field. // the disable-default-queries field.
// We determine this by whether CodeQL.resolveQueries is called // We determine this by whether CodeQL.resolveQueries is called
// with the correct arguments. // with the correct arguments.
const resolveQueriesArgs = []; const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({ CodeQL.setCodeQL({
async resolveQueries(queries, extraSearchPath) { resolveQueries: async function (queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath }); resolveQueriesArgs.push({ queries, extraSearchPath });
return { return {
byLanguage: { byLanguage: {
javascript: { 'javascript': {},
"foo.ql": {},
},
}, },
noDeclaredLanguage: {}, noDeclaredLanguage: {},
multipleDeclaredLanguages: {}, multipleDeclaredLanguages: {},
@@ -217,239 +213,25 @@ ava_1.default("Default queries are used", async (t) => {
const inputFileContents = ` const inputFileContents = `
paths: paths:
- foo`; - foo`;
fs.mkdirSync(path.join(tmpDir, "foo")); fs.mkdirSync(path.join(tmpDir, 'foo'));
const languages = [languages_1.Language.javascript]; fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
const configFilePath = createConfigFile(inputFileContents, tmpDir); setInput('config-file', 'input');
await configUtils.initConfig(languages, undefined, configFilePath, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true)); setInput('languages', 'javascript');
await configUtils.initConfig();
// Check resolve queries was called correctly // Check resolve queries was called correctly
t.deepEqual(resolveQueriesArgs.length, 1); t.deepEqual(resolveQueriesArgs.length, 1);
t.deepEqual(resolveQueriesArgs[0].queries, [ t.deepEqual(resolveQueriesArgs[0].queries, ['javascript-code-scanning.qls']);
"javascript-code-scanning.qls",
]);
t.deepEqual(resolveQueriesArgs[0].extraSearchPath, undefined); t.deepEqual(resolveQueriesArgs[0].extraSearchPath, undefined);
}); });
}); });
/**
* Returns the provided queries, just in the right format for a resolved query
* This way we can test by seeing which returned items are in the final
* configuration.
*/
function queriesToResolvedQueryForm(queries) {
const dummyResolvedQueries = {};
queries.forEach((q) => {
dummyResolvedQueries[q] = {};
});
return {
byLanguage: {
javascript: dummyResolvedQueries,
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
}
ava_1.default("Queries can be specified in config file", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const inputFileContents = `
name: my config
queries:
- uses: ./foo`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
fs.mkdirSync(path.join(tmpDir, "foo"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = [languages_1.Language.javascript];
const config = await configUtils.initConfig(languages, undefined, configFilePath, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries
// and once for `./foo` from the config file.
t.deepEqual(resolveQueriesArgs.length, 2);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/foo$/);
// Now check that the end result contains the default queries and the query from config
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 1);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/foo$/);
});
});
ava_1.default("Queries from config file can be overridden in workflow file", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const inputFileContents = `
name: my config
queries:
- uses: ./foo`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
// This config item should take precedence over the config file but shouldn't affect the default queries.
const queries = "./override";
fs.mkdirSync(path.join(tmpDir, "foo"));
fs.mkdirSync(path.join(tmpDir, "override"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = [languages_1.Language.javascript];
const config = await configUtils.initConfig(languages, queries, configFilePath, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries and once for `./override`,
// but won't be called for './foo' from the config file.
t.deepEqual(resolveQueriesArgs.length, 2);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override$/);
// Now check that the end result contains only the default queries and the override query
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 1);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/override$/);
});
});
ava_1.default("Queries in workflow file can be used in tandem with the 'disable default queries' option", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
process.env["GITHUB_WORKSPACE"] = tmpDir;
const inputFileContents = `
name: my config
disable-default-queries: true`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
const queries = "./workflow-query";
fs.mkdirSync(path.join(tmpDir, "workflow-query"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = [languages_1.Language.javascript];
const config = await configUtils.initConfig(languages, queries, configFilePath, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for `./workflow-query`,
// but won't be called for the default one since that was disabled
t.deepEqual(resolveQueriesArgs.length, 1);
t.deepEqual(resolveQueriesArgs[0].queries.length, 1);
t.regex(resolveQueriesArgs[0].queries[0], /.*\/workflow-query$/);
// Now check that the end result contains only the workflow query, and not the default one
t.deepEqual(config.queries["javascript"].builtin.length, 0);
t.deepEqual(config.queries["javascript"].custom.length, 1);
t.regex(config.queries["javascript"].custom[0], /.*\/workflow-query$/);
});
});
ava_1.default("Multiple queries can be specified in workflow file, no config file required", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
fs.mkdirSync(path.join(tmpDir, "override1"));
fs.mkdirSync(path.join(tmpDir, "override2"));
const queries = "./override1,./override2";
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = [languages_1.Language.javascript];
const config = await configUtils.initConfig(languages, queries, undefined, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly:
// It'll be called once for the default queries,
// and then once for each of the two queries from the workflow
t.deepEqual(resolveQueriesArgs.length, 3);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.deepEqual(resolveQueriesArgs[2].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override1$/);
t.regex(resolveQueriesArgs[2].queries[0], /.*\/override2$/);
// Now check that the end result contains both the queries from the workflow, as well as the defaults
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 2);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/override1$/);
t.regex(config.queries["javascript"].custom[1], /.*\/override2$/);
});
});
ava_1.default("Queries in workflow file can be added to the set of queries without overriding config file", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
process.env["GITHUB_WORKSPACE"] = tmpDir;
const inputFileContents = `
name: my config
queries:
- uses: ./foo`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
// These queries shouldn't override anything, because the value is prefixed with "+"
const queries = "+./additional1,./additional2";
fs.mkdirSync(path.join(tmpDir, "foo"));
fs.mkdirSync(path.join(tmpDir, "additional1"));
fs.mkdirSync(path.join(tmpDir, "additional2"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = [languages_1.Language.javascript];
const config = await configUtils.initConfig(languages, queries, configFilePath, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries,
// once for each of additional1 and additional2,
// and once for './foo' from the config file
t.deepEqual(resolveQueriesArgs.length, 4);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/additional1$/);
t.deepEqual(resolveQueriesArgs[2].queries.length, 1);
t.regex(resolveQueriesArgs[2].queries[0], /.*\/additional2$/);
t.deepEqual(resolveQueriesArgs[3].queries.length, 1);
t.regex(resolveQueriesArgs[3].queries[0], /.*\/foo$/);
// Now check that the end result contains all the queries
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 3);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/additional1$/);
t.regex(config.queries["javascript"].custom[1], /.*\/additional2$/);
t.regex(config.queries["javascript"].custom[2], /.*\/foo$/);
});
});
ava_1.default("Invalid queries in workflow file handled correctly", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const queries = "foo/bar@v1@v3";
const languages = [languages_1.Language.javascript];
// This function just needs to be type-correct; it doesn't need to do anything,
// since we're deliberately passing in invalid data
const codeQL = codeql_1.setCodeQL({
async resolveQueries(_queries, _extraSearchPath) {
return {
byLanguage: {
javascript: {},
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
},
});
try {
await configUtils.initConfig(languages, queries, undefined, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
t.fail("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getQueryUsesInvalid(undefined, "foo/bar@v1@v3")));
}
});
});
ava_1.default("API client used when reading remote config", async (t) => { ava_1.default("API client used when reading remote config", async (t) => {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
const codeQL = codeql_1.setCodeQL({ process.env['RUNNER_TEMP'] = tmpDir;
async resolveQueries() { process.env['GITHUB_WORKSPACE'] = tmpDir;
CodeQL.setCodeQL({
resolveQueries: async function () {
return { return {
byLanguage: { byLanguage: {},
javascript: {
"foo.ql": {},
},
},
noDeclaredLanguage: {}, noDeclaredLanguage: {},
multipleDeclaredLanguages: {}, multipleDeclaredLanguages: {},
}; };
@@ -472,21 +254,24 @@ ava_1.default("API client used when reading remote config", async (t) => {
}; };
const spyGetContents = mockGetContents(dummyResponse); const spyGetContents = mockGetContents(dummyResponse);
// Create checkout directory for remote queries repository // Create checkout directory for remote queries repository
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true }); fs.mkdirSync(path.join(tmpDir, 'foo/bar'), { recursive: true });
const configFile = "octo-org/codeql-config/config.yaml@main"; setInput('config-file', 'octo-org/codeql-config/config.yaml@main');
const languages = [languages_1.Language.javascript]; setInput('languages', 'javascript');
await configUtils.initConfig(languages, undefined, configFile, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true)); await configUtils.initConfig();
t.assert(spyGetContents.called); t.assert(spyGetContents.called);
}); });
}); });
ava_1.default("Remote config handles the case where a directory is provided", async (t) => { ava_1.default("Remote config handles the case where a directory is provided", async (t) => {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
const dummyResponse = []; // directories are returned as arrays const dummyResponse = []; // directories are returned as arrays
mockGetContents(dummyResponse); mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main"; const repoReference = 'octo-org/codeql-config/config.yaml@main';
setInput('config-file', repoReference);
try { try {
await configUtils.initConfig([], undefined, repoReference, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true)); await configUtils.initConfig();
throw new Error("initConfig did not throw error"); throw new Error('initConfig did not throw error');
} }
catch (err) { catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileDirectoryGivenMessage(repoReference))); t.deepEqual(err, new Error(configUtils.getConfigFileDirectoryGivenMessage(repoReference)));
@@ -495,45 +280,30 @@ ava_1.default("Remote config handles the case where a directory is provided", as
}); });
ava_1.default("Invalid format of remote config handled correctly", async (t) => { ava_1.default("Invalid format of remote config handled correctly", async (t) => {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
const dummyResponse = { const dummyResponse = {
// note no "content" property here // note no "content" property here
}; };
mockGetContents(dummyResponse); mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main"; const repoReference = 'octo-org/codeql-config/config.yaml@main';
setInput('config-file', repoReference);
try { try {
await configUtils.initConfig([], undefined, repoReference, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true)); await configUtils.initConfig();
throw new Error("initConfig did not throw error"); throw new Error('initConfig did not throw error');
} }
catch (err) { catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference))); t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference)));
} }
}); });
}); });
ava_1.default("No detected languages", async (t) => {
mockListLanguages([]);
try {
await languages_1.getLanguages(undefined, { owner: "github", repo: "example " }, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(languages_1.getNoLanguagesError()));
}
});
ava_1.default("Unknown languages", async (t) => {
const languages = "ruby,english";
try {
await languages_1.getLanguages(languages, { owner: "github", repo: "example " }, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(languages_1.getUnknownLanguagesError(["ruby", "english"])));
}
});
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) { function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
ava_1.default(`load invalid input - ${testName}`, async (t) => { ava_1.default("load invalid input - " + testName, async (t) => {
return await util.withTmpDir(async (tmpDir) => { return await util.withTmpDir(async (tmpDir) => {
const codeQL = codeql_1.setCodeQL({ process.env['RUNNER_TEMP'] = tmpDir;
async resolveQueries() { process.env['GITHUB_WORKSPACE'] = tmpDir;
CodeQL.setCodeQL({
resolveQueries: async function () {
return { return {
byLanguage: {}, byLanguage: {},
noDeclaredLanguage: {}, noDeclaredLanguage: {},
@@ -541,13 +311,13 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
}; };
}, },
}); });
const languages = [languages_1.Language.javascript]; const inputFile = path.join(tmpDir, 'input');
const configFile = "input"; fs.writeFileSync(inputFile, inputFileContents, 'utf8');
const inputFile = path.join(tmpDir, configFile); setInput('config-file', 'input');
fs.writeFileSync(inputFile, inputFileContents, "utf8"); setInput('languages', 'javascript');
try { try {
await configUtils.initConfig(languages, undefined, configFile, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true)); await configUtils.initConfig();
throw new Error("initConfig did not throw error"); throw new Error('initConfig did not throw error');
} }
catch (err) { catch (err) {
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile))); t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
@@ -555,14 +325,14 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
}); });
}); });
} }
doInvalidInputTest("name invalid type", ` doInvalidInputTest('name invalid type', `
name: name:
- foo: bar`, configUtils.getNameInvalid); - foo: bar`, configUtils.getNameInvalid);
doInvalidInputTest("disable-default-queries invalid type", `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid); doInvalidInputTest('disable-default-queries invalid type', `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
doInvalidInputTest("queries invalid type", `queries: foo`, configUtils.getQueriesInvalid); doInvalidInputTest('queries invalid type', `queries: foo`, configUtils.getQueriesInvalid);
doInvalidInputTest("paths-ignore invalid type", `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid); doInvalidInputTest('paths-ignore invalid type', `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
doInvalidInputTest("paths invalid type", `paths: 17`, configUtils.getPathsInvalid); doInvalidInputTest('paths invalid type', `paths: 17`, configUtils.getPathsInvalid);
doInvalidInputTest("queries uses invalid type", ` doInvalidInputTest('queries uses invalid type', `
queries: queries:
- uses: - uses:
- hello: world`, configUtils.getQueryUsesInvalid); - hello: world`, configUtils.getQueryUsesInvalid);
@@ -573,47 +343,52 @@ function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) {
name: my config name: my config
queries: queries:
- name: foo - name: foo
uses: ${input}`; uses: ` + input;
doInvalidInputTest(`queries uses "${input}"`, inputFileContents, expectedErrorMessageGenerator); doInvalidInputTest("queries uses \"" + input + "\"", inputFileContents, expectedErrorMessageGenerator);
} }
// Various "uses" fields, and the errors they should produce // Various "uses" fields, and the errors they should produce
doInvalidQueryUsesTest("''", (c) => configUtils.getQueryUsesInvalid(c, undefined)); doInvalidQueryUsesTest("''", c => configUtils.getQueryUsesInvalid(c, undefined));
doInvalidQueryUsesTest("foo/bar", (c) => configUtils.getQueryUsesInvalid(c, "foo/bar")); doInvalidQueryUsesTest("foo/bar", c => configUtils.getQueryUsesInvalid(c, "foo/bar"));
doInvalidQueryUsesTest("foo/bar@v1@v2", (c) => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2")); doInvalidQueryUsesTest("foo/bar@v1@v2", c => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
doInvalidQueryUsesTest("foo@master", (c) => configUtils.getQueryUsesInvalid(c, "foo@master")); doInvalidQueryUsesTest("foo@master", c => configUtils.getQueryUsesInvalid(c, "foo@master"));
doInvalidQueryUsesTest("https://github.com/foo/bar@master", (c) => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master")); doInvalidQueryUsesTest("https://github.com/foo/bar@master", c => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
doInvalidQueryUsesTest("./foo", (c) => configUtils.getLocalPathDoesNotExist(c, "foo")); doInvalidQueryUsesTest("./foo", c => configUtils.getLocalPathDoesNotExist(c, "foo"));
doInvalidQueryUsesTest("./..", (c) => configUtils.getLocalPathOutsideOfRepository(c, "..")); doInvalidQueryUsesTest("./..", c => configUtils.getLocalPathOutsideOfRepository(c, ".."));
const validPaths = [ const validPaths = [
"foo", 'foo',
"foo/", 'foo/',
"foo/**", 'foo/**',
"foo/**/", 'foo/**/',
"foo/**/**", 'foo/**/**',
"foo/**/bar/**/baz", 'foo/**/bar/**/baz',
"**/", '**/',
"**/foo", '**/foo',
"/foo", '/foo',
]; ];
const invalidPaths = ["a/***/b", "a/**b", "a/b**", "**"]; const invalidPaths = [
ava_1.default("path validations", (t) => { 'a/***/b',
'a/**b',
'a/b**',
'**',
];
ava_1.default('path validations', t => {
// Dummy values to pass to validateAndSanitisePath // Dummy values to pass to validateAndSanitisePath
const propertyName = "paths"; const propertyName = 'paths';
const configFile = "./.github/codeql/config.yml"; const configFile = './.github/codeql/config.yml';
for (const path of validPaths) { for (const path of validPaths) {
t.truthy(configUtils.validateAndSanitisePath(path, propertyName, configFile, logging_1.getRunnerLogger(true))); t.truthy(configUtils.validateAndSanitisePath(path, propertyName, configFile));
} }
for (const path of invalidPaths) { for (const path of invalidPaths) {
t.throws(() => configUtils.validateAndSanitisePath(path, propertyName, configFile, logging_1.getRunnerLogger(true))); t.throws(() => configUtils.validateAndSanitisePath(path, propertyName, configFile));
} }
}); });
ava_1.default("path sanitisation", (t) => { ava_1.default('path sanitisation', t => {
// Dummy values to pass to validateAndSanitisePath // Dummy values to pass to validateAndSanitisePath
const propertyName = "paths"; const propertyName = 'paths';
const configFile = "./.github/codeql/config.yml"; const configFile = './.github/codeql/config.yml';
// Valid paths are not modified // Valid paths are not modified
t.deepEqual(configUtils.validateAndSanitisePath("foo/bar", propertyName, configFile, logging_1.getRunnerLogger(true)), "foo/bar"); t.deepEqual(configUtils.validateAndSanitisePath('foo/bar', propertyName, configFile), 'foo/bar');
// Trailing stars are stripped // Trailing stars are stripped
t.deepEqual(configUtils.validateAndSanitisePath("foo/**", propertyName, configFile, logging_1.getRunnerLogger(true)), "foo/"); t.deepEqual(configUtils.validateAndSanitisePath('foo/**', propertyName, configFile), 'foo/');
}); });
//# sourceMappingURL=config-utils.test.js.map //# sourceMappingURL=config-utils.test.js.map

File diff suppressed because one or more lines are too long

View File

@@ -1,3 +0,0 @@
{
"bundleVersion": "codeql-bundle-20200826"
}

17
lib/error-matcher.js generated
View File

@@ -1,17 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// exported only for testing purposes
exports.namedMatchersForTesting = {
/*
In due course it may be possible to remove the regex, if/when javascript also exits with code 32.
*/
noSourceCodeFound: {
exitCode: 32,
outputRegex: new RegExp("No JavaScript or TypeScript code found\\."),
message: "No code found during the build. Please see:\n" +
"https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning#no-code-found-during-the-build",
},
};
// we collapse the matches into an array for use in execErrorCatcher
exports.errorMatchers = Object.values(exports.namedMatchersForTesting);
//# sourceMappingURL=error-matcher.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"error-matcher.js","sourceRoot":"","sources":["../src/error-matcher.ts"],"names":[],"mappings":";;AAQA,qCAAqC;AACxB,QAAA,uBAAuB,GAAoC;IACtE;;MAEE;IACF,iBAAiB,EAAE;QACjB,QAAQ,EAAE,EAAE;QACZ,WAAW,EAAE,IAAI,MAAM,CAAC,2CAA2C,CAAC;QACpE,OAAO,EACL,+CAA+C;YAC/C,yJAAyJ;KAC5J;CACF,CAAC;AAEF,oEAAoE;AACvD,QAAA,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC,+BAAuB,CAAC,CAAC"}

View File

@@ -1,29 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const error_matcher_1 = require("./error-matcher");
/*
NB We test the regexes for all the matchers against example log output snippets.
*/
ava_1.default("noSourceCodeFound matches against example javascript output", async (t) => {
t.assert(testErrorMatcher("noSourceCodeFound", `
2020-09-07T17:39:53.9050522Z [2020-09-07 17:39:53] [build] Done extracting /opt/hostedtoolcache/CodeQL/0.0.0-20200630/x64/codeql/javascript/tools/data/externs/web/ie_vml.js (3 ms)
2020-09-07T17:39:53.9051849Z [2020-09-07 17:39:53] [build-err] No JavaScript or TypeScript code found.
2020-09-07T17:39:53.9052444Z [2020-09-07 17:39:53] [build-err] No JavaScript or TypeScript code found.
2020-09-07T17:39:53.9251124Z [2020-09-07 17:39:53] [ERROR] Spawned process exited abnormally (code 255; tried to run: [/opt/hostedtoolcache/CodeQL/0.0.0-20200630/x64/codeql/javascript/tools/autobuild.sh])
`));
});
function testErrorMatcher(matcherName, logSample) {
if (!(matcherName in error_matcher_1.namedMatchersForTesting)) {
throw new Error(`Unknown matcher ${matcherName}`);
}
const regex = error_matcher_1.namedMatchersForTesting[matcherName].outputRegex;
if (regex === undefined) {
throw new Error(`Cannot test matcher ${matcherName} with null regex`);
}
return regex.test(logSample);
}
//# sourceMappingURL=error-matcher.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"error-matcher.test.js","sourceRoot":"","sources":["../src/error-matcher.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,mDAA0D;AAE1D;;EAEE;AAEF,aAAI,CAAC,6DAA6D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9E,CAAC,CAAC,MAAM,CACN,gBAAgB,CACd,mBAAmB,EACnB;;;;;GAKH,CACE,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,gBAAgB,CAAC,WAAmB,EAAE,SAAiB;IAC9D,IAAI,CAAC,CAAC,WAAW,IAAI,uCAAuB,CAAC,EAAE;QAC7C,MAAM,IAAI,KAAK,CAAC,mBAAmB,WAAW,EAAE,CAAC,CAAC;KACnD;IACD,MAAM,KAAK,GAAG,uCAAuB,CAAC,WAAW,CAAC,CAAC,WAAW,CAAC;IAC/D,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,uBAAuB,WAAW,kBAAkB,CAAC,CAAC;KACvE;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AAC/B,CAAC"}

View File

@@ -7,32 +7,26 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner")); const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const util = __importStar(require("./util"));
/** /**
* Check out repository at the given ref, and return the directory of the checkout. * Check out repository at the given ref, and return the directory of the checkout.
*/ */
async function checkoutExternalRepository(repository, ref, githubUrl, tempDir, logger) { async function checkoutExternalRepository(repository, ref) {
logger.info(`Checking out ${repository}`); const folder = util.getRequiredEnvParam('RUNNER_TEMP');
const checkoutLocation = path.join(tempDir, repository, ref); core.info('Checking out ' + repository);
if (!checkoutLocation.startsWith(tempDir)) { const checkoutLocation = path.join(folder, repository);
// this still permits locations that mess with sibling repositories in `tempDir`, but that is acceptable
throw new Error(`'${repository}@${ref}' is not a valid repository and reference.`);
}
if (!fs.existsSync(checkoutLocation)) { if (!fs.existsSync(checkoutLocation)) {
const repoURL = `${githubUrl}/${repository}`; const repoURL = 'https://github.com/' + repository + '.git';
await new toolrunnner.ToolRunner("git", [ await exec.exec('git', ['clone', repoURL, checkoutLocation]);
"clone", await exec.exec('git', [
repoURL, '--work-tree=' + checkoutLocation,
checkoutLocation, '--git-dir=' + checkoutLocation + '/.git',
]).exec(); 'checkout', ref,
await new toolrunnner.ToolRunner("git", [ ]);
`--work-tree=${checkoutLocation}`,
`--git-dir=${checkoutLocation}/.git`,
"checkout",
ref,
]).exec();
} }
return checkoutLocation; return checkoutLocation;
} }

View File

@@ -1 +1 @@
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,0EAA4D;AAC5D,uCAAyB;AACzB,2CAA6B;AAI7B;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,SAAiB,EACjB,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,GAAG,SAAS,IAAI,UAAU,EAAE,CAAC;QAC7C,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,KAAK,EAAE;YACtC,OAAO;YACP,OAAO;YACP,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,KAAK,EAAE;YACtC,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC"} {"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAE7B,6CAA+B;AAE/B;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAAC,UAAkB,EAAE,GAAW;IAC9E,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC;IAEvD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,UAAU,CAAC,CAAC;IAExC,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;IACvD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,UAAU,GAAG,MAAM,CAAC;QAC5D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;QAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YACrB,cAAc,GAAG,gBAAgB;YACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;YACzC,UAAU,EAAE,GAAG;SAChB,CAAC,CAAC;KACJ;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAjBD,gEAiBC"}

View File

@@ -1,4 +1,7 @@
"use strict"; "use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) { var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod; if (mod && mod.__esModule) return mod;
var result = {}; var result = {};
@@ -6,90 +9,20 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod; result["default"] = mod;
return result; return result;
}; };
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
const ava_1 = __importDefault(require("ava")); const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const externalQueries = __importStar(require("./external-queries")); const externalQueries = __importStar(require("./external-queries"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils"); const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default); testing_utils_1.setupTests(ava_1.default);
ava_1.default("checkoutExternalQueries", async (t) => { ava_1.default("checkoutExternalQueries", async (t) => {
await util.withTmpDir(async (tmpDir) => { await util.withTmpDir(async (tmpDir) => {
// Create a test repo in a subdir of the temp dir. process.env["RUNNER_TEMP"] = tmpDir;
// It should have a default branch with two commits after the initial commit, where await externalQueries.checkoutExternalRepository("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b");
// - the first commit contains files 'a' and 'b' // COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in the default branch
// - the second commit contains only 'a' t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
// Place the repo in a subdir because we're going to checkout a copy in tmpDir
const testRepoBaseDir = path.join(tmpDir, "test-repo-dir");
const repoName = "some/repo";
const repoPath = path.join(testRepoBaseDir, repoName);
const repoGitDir = path.join(repoPath, ".git");
// Run the given git command, and return the output.
// Passes --git-dir and --work-tree.
// Any stderr output is suppressed until the command fails.
const runGit = async function (command) {
let stdout = "";
let stderr = "";
command = [
`--git-dir=${repoGitDir}`,
`--work-tree=${repoPath}`,
...command,
];
console.log(`Running: git ${command.join(" ")}`);
try {
await new toolrunnner.ToolRunner("git", command, {
silent: true,
listeners: {
stdout: (data) => {
stdout += data.toString();
},
stderr: (data) => {
stderr += data.toString();
},
},
}).exec();
}
catch (e) {
console.log(`Command failed: git ${command.join(" ")}`);
process.stderr.write(stderr);
throw e;
}
return stdout.trim();
};
fs.mkdirSync(repoPath, { recursive: true });
await runGit(["init", repoPath]);
await runGit(["config", "user.email", "test@github.com"]);
await runGit(["config", "user.name", "Test Test"]);
fs.writeFileSync(path.join(repoPath, "a"), "a content");
await runGit(["add", "a"]);
await runGit(["commit", "-m", "commit1"]);
fs.writeFileSync(path.join(repoPath, "b"), "b content");
await runGit(["add", "b"]);
await runGit(["commit", "-m", "commit1"]);
const commit1Sha = await runGit(["rev-parse", "HEAD"]);
fs.unlinkSync(path.join(repoPath, "b"));
await runGit(["add", "b"]);
await runGit(["commit", "-m", "commit2"]);
const commit2Sha = await runGit(["rev-parse", "HEAD"]);
// Checkout the first commit, which should contain 'a' and 'b'
t.false(fs.existsSync(path.join(tmpDir, repoName)));
await externalQueries.checkoutExternalRepository(repoName, commit1Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true));
t.true(fs.existsSync(path.join(tmpDir, repoName)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "a")));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "b")));
// Checkout the second commit as well, which should only contain 'a'
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
await externalQueries.checkoutExternalRepository(repoName, commit2Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "a")));
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
}); });
}); });
//# sourceMappingURL=external-queries.test.js.map //# sourceMappingURL=external-queries.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,0EAA4D;AAC5D,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,KAAK,EAAE,OAAO,EAAE;oBAC/C,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CAAC,CAAC,IAAI,EAAE,CAAC;aACX;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QAEnD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"} {"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,oEAAsD;AACtD,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACxC,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QACpC,MAAM,eAAe,CAAC,0BAA0B,CAAC,kBAAkB,EAAE,0CAA0C,CAAC,CAAC;QAEjH,mGAAmG;QACnG,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IAC/E,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

114
lib/finalize-db.js generated Normal file
View File

@@ -0,0 +1,114 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const io = __importStar(require("@actions/io"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const sharedEnv = __importStar(require("./shared-environment"));
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
async function sendStatusReport(startedAt, queriesStats, uploadStats, error) {
var _a, _b, _c;
const status = ((_a = queriesStats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined ? 'failure' : 'success';
const statusReportBase = await util.createStatusReportBase('finish', status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
const statusReport = {
...statusReportBase,
...(queriesStats || {}),
...(uploadStats || {}),
};
await util.sendStatusReport(statusReport);
}
async function createdDBForScannedLanguages(databaseFolder) {
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
if (scannedLanguages) {
const codeql = codeql_1.getCodeQL();
for (const language of scannedLanguages.split(',')) {
core.startGroup('Extracting ' + language);
await codeql.extractScannedLanguage(path.join(databaseFolder, language), language);
core.endGroup();
}
}
}
async function finalizeDatabaseCreation(databaseFolder, config) {
await createdDBForScannedLanguages(databaseFolder);
const codeql = codeql_1.getCodeQL();
for (const language of config.languages) {
core.startGroup('Finalizing ' + language);
await codeql.finalizeDatabase(path.join(databaseFolder, language));
core.endGroup();
}
}
// Runs queries and creates sarif files in the given folder
async function runQueries(databaseFolder, sarifFolder, config) {
const codeql = codeql_1.getCodeQL();
for (let language of fs.readdirSync(databaseFolder)) {
core.startGroup('Analyzing ' + language);
const queries = config.queries[language] || [];
if (queries.length === 0) {
throw new Error('Unable to analyse ' + language + ' as no queries were selected for this language');
}
try {
// Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows.
const querySuite = path.join(databaseFolder, language + '-queries.qls');
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
fs.writeFileSync(querySuite, querySuiteContents);
core.debug('Query suite file for ' + language + '...\n' + querySuiteContents);
const sarifFile = path.join(sarifFolder, language + '.sarif');
await codeql.databaseAnalyze(path.join(databaseFolder, language), sarifFile, querySuite);
core.debug('SARIF results for database ' + language + ' created at "' + sarifFile + '"');
core.endGroup();
}
catch (e) {
// For now the fields about query performance are not populated
return {
analyze_failure_language: language,
};
}
}
return {};
}
async function run() {
const startedAt = new Date();
let queriesStats = undefined;
let uploadStats = undefined;
try {
if (util.should_abort('finish', true) ||
!await util.sendStatusReport(await util.createStatusReportBase('finish', 'starting', startedAt), true)) {
return;
}
const config = await configUtils.getConfig();
core.exportVariable(sharedEnv.ODASA_TRACER_CONFIGURATION, '');
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
const databaseFolder = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_DATABASE_DIR);
const sarifFolder = core.getInput('output');
await io.mkdirP(sarifFolder);
core.info('Finalizing database creation');
await finalizeDatabaseCreation(databaseFolder, config);
core.info('Analyzing database');
queriesStats = await runQueries(databaseFolder, sarifFolder, config);
if ('true' === core.getInput('upload')) {
uploadStats = await upload_lib.upload(sarifFolder);
}
}
catch (error) {
core.setFailed(error.message);
await sendStatusReport(startedAt, queriesStats, uploadStats, error);
return;
}
await sendStatusReport(startedAt, queriesStats, uploadStats);
}
run().catch(e => {
core.setFailed("analyze action failed: " + e);
console.log(e);
});
//# sourceMappingURL=finalize-db.js.map

1
lib/finalize-db.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"finalize-db.js","sourceRoot":"","sources":["../src/finalize-db.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,gDAAkC;AAClC,uCAAyB;AACzB,2CAA6B;AAE7B,qCAAqC;AACrC,4DAA8C;AAC9C,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAiC/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,YAA6C,EAC7C,WAAsD,EACtD,KAAa;;IAEb,MAAM,MAAM,GAAG,OAAA,YAAY,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IACnH,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,QAAE,KAAK,0CAAE,OAAO,QAAE,KAAK,0CAAE,KAAK,CAAC,CAAC;IACtH,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,YAAY,IAAI,EAAE,CAAC;QACvB,GAAG,CAAC,WAAW,IAAI,EAAE,CAAC;KACvB,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,4BAA4B,CAAC,cAAsB;IAChE,MAAM,gBAAgB,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,+BAA+B,CAAC,CAAC;IAChF,IAAI,gBAAgB,EAAE;QACpB,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,KAAK,MAAM,QAAQ,IAAI,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE;YAClD,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;YAC1C,MAAM,MAAM,CAAC,sBAAsB,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,EAAE,QAAQ,CAAC,CAAC;YACnF,IAAI,CAAC,QAAQ,EAAE,CAAC;SACjB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CAAC,cAAsB,EAAE,MAA0B;IACxF,MAAM,4BAA4B,CAAC,cAAc,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;QAC1C,MAAM,MAAM,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC,CAAC;QACnE,IAAI,CAAC,QAAQ,EAAE,CAAC;KACjB;AACH,CAAC;AAED,2DAA2D;AAC3D,KAAK,UAAU,UAAU,CACvB,cAAsB,EACtB,WAAmB,EACnB,MAA0B;IAE1B,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,IAAI,QAAQ,IAAI,EAAE,CAAC,WAAW,CAAC,cAAc,CAAC,EAAE;QACnD,IAAI,CAAC,UAAU,CAAC,YAAY,GAAG,QAAQ,CAAC,CAAC;QAEzC,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;QAC/C,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,oBAAoB,GAAG,QAAQ,GAAG,gDAAgD,CAAC,CAAC;SACrG;QAED,IAAI;YACF,uEAAuE;YACvE,2EAA2E;YAC3E,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,GAAG,cAAc,CAAC,CAAC;YACxE,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACxE,EAAE,CAAC,aAAa,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;YACjD,IAAI,CAAC,KAAK,CAAC,uBAAuB,GAAG,QAAQ,GAAG,OAAO,GAAG,kBAAkB,CAAC,CAAC;YAE9E,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,QAAQ,GAAG,QAAQ,CAAC,CAAC;YAE9D,MAAM,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,EAAE,SAAS,EAAE,UAAU,CAAC,CAAC;YAEzF,IAAI,CAAC,KAAK,CAAC,6BAA6B,GAAG,QAAQ,GAAG,eAAe,GAAG,SAAS,GAAG,GAAG,CAAC,CAAC;YACzF,IAAI,CAAC,QAAQ,EAAE,CAAC;SAEjB;QAAC,OAAO,CAAC,EAAE;YACV,+DAA+D;YAC/D,OAAO;gBACL,wBAAwB,EAAE,QAAQ;aACnC,CAAC;SACH;KACF;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAoC,SAAS,CAAC;IAC9D,IAAI,WAAW,GAA8C,SAAS,CAAC;IACvE,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,QAAQ,EAAE,IAAI,CAAC;YACnC,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YACxG,OAAO;SACR;QACD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,EAAE,CAAC;QAE7C,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,0BAA0B,EAAE,EAAE,CAAC,CAAC;QAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;QAEzD,MAAM,cAAc,GAAG,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;QAEtF,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;QAC5C,MAAM,EAAE,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;QAE7B,IAAI,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;QAC1C,MAAM,wBAAwB,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;QAEvD,IAAI,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QAChC,YAAY,GAAG,MAAM,UAAU,CAAC,cAAc,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;QAErE,IAAI,MAAM,KAAK,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACtC,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;SACpD;KAEF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,gBAAgB,CAAC,SAAS,EAAE,YAAY,EAAE,WAAW,EAAE,KAAK,CAAC,CAAC;QACpE,OAAO;KACR;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,YAAY,EAAE,WAAW,CAAC,CAAC;AAC/D,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,yBAAyB,GAAG,CAAC,CAAC,CAAC;IAC9C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

2
lib/finalize-db.test.js generated Normal file
View File

@@ -0,0 +1,2 @@
"use strict";
//# sourceMappingURL=finalize-db.test.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"finalize-db.test.js","sourceRoot":"","sources":["../src/finalize-db.test.ts"],"names":[],"mappings":""}

64
lib/fingerprints.js generated
View File

@@ -10,12 +10,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod }; return (mod && mod.__esModule) ? mod : { "default": mod };
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const long_1 = __importDefault(require("long")); const long_1 = __importDefault(require("long"));
const tab = "\t".charCodeAt(0); const tab = '\t'.charCodeAt(0);
const space = " ".charCodeAt(0); const space = ' '.charCodeAt(0);
const lf = "\n".charCodeAt(0); const lf = '\n'.charCodeAt(0);
const cr = "\r".charCodeAt(0); const cr = '\r'.charCodeAt(0);
const BLOCK_SIZE = 100; const BLOCK_SIZE = 100;
const MOD = long_1.default.fromInt(37); // L const MOD = long_1.default.fromInt(37); // L
// Compute the starting point for the hash mod // Compute the starting point for the hash mod
@@ -47,7 +48,7 @@ function hash(callback, input) {
const lineNumbers = Array(BLOCK_SIZE).fill(-1); const lineNumbers = Array(BLOCK_SIZE).fill(-1);
// The current hash value, updated as we read each character // The current hash value, updated as we read each character
let hash = long_1.default.ZERO; let hash = long_1.default.ZERO;
const firstMod = computeFirstMod(); let firstMod = computeFirstMod();
// The current index in the window, will wrap around to zero when we reach BLOCK_SIZE // The current index in the window, will wrap around to zero when we reach BLOCK_SIZE
let index = 0; let index = 0;
// The line number of the character we are currently processing from the input // The line number of the character we are currently processing from the input
@@ -61,12 +62,12 @@ function hash(callback, input) {
const hashCounts = {}; const hashCounts = {};
// Output the current hash and line number to the callback function // Output the current hash and line number to the callback function
const outputHash = function () { const outputHash = function () {
const hashValue = hash.toUnsigned().toString(16); let hashValue = hash.toUnsigned().toString(16);
if (!hashCounts[hashValue]) { if (!hashCounts[hashValue]) {
hashCounts[hashValue] = 0; hashCounts[hashValue] = 0;
} }
hashCounts[hashValue]++; hashCounts[hashValue]++;
callback(lineNumbers[index], `${hashValue}:${hashCounts[hashValue]}`); callback(lineNumbers[index], hashValue + ":" + hashCounts[hashValue]);
lineNumbers[index] = -1; lineNumbers[index] = -1;
}; };
// Update the current hash value and increment the index in the window // Update the current hash value and increment the index in the window
@@ -121,7 +122,7 @@ function hash(callback, input) {
exports.hash = hash; exports.hash = hash;
// Generate a hash callback function that updates the given result in-place // Generate a hash callback function that updates the given result in-place
// when it recieves a hash for the correct line number. Ignores hashes for other lines. // when it recieves a hash for the correct line number. Ignores hashes for other lines.
function locationUpdateCallback(result, location, logger) { function locationUpdateCallback(result, location) {
var _a, _b; var _a, _b;
let locationStartLine = (_b = (_a = location.physicalLocation) === null || _a === void 0 ? void 0 : _a.region) === null || _b === void 0 ? void 0 : _b.startLine; let locationStartLine = (_b = (_a = location.physicalLocation) === null || _a === void 0 ? void 0 : _a.region) === null || _b === void 0 ? void 0 : _b.startLine;
if (locationStartLine === undefined) { if (locationStartLine === undefined) {
@@ -145,7 +146,10 @@ function locationUpdateCallback(result, location, logger) {
result.partialFingerprints.primaryLocationLineHash = hash; result.partialFingerprints.primaryLocationLineHash = hash;
} }
else if (existingFingerprint !== hash) { else if (existingFingerprint !== hash) {
logger.warning(`Calculated fingerprint of ${hash} for file ${location.physicalLocation.artifactLocation.uri} line ${lineNumber}, but found existing inconsistent fingerprint value ${existingFingerprint}`); core.warning('Calculated fingerprint of ' + hash +
' for file ' + location.physicalLocation.artifactLocation.uri +
' line ' + lineNumber +
', but found existing inconsistent fingerprint value ' + existingFingerprint);
} }
}; };
} }
@@ -153,48 +157,48 @@ function locationUpdateCallback(result, location, logger) {
// the source file so we can hash it. // the source file so we can hash it.
// If possible returns a absolute file path for the source file, // If possible returns a absolute file path for the source file,
// or if not possible then returns undefined. // or if not possible then returns undefined.
function resolveUriToFile(location, artifacts, checkoutPath, logger) { function resolveUriToFile(location, artifacts) {
// This may be referencing an artifact // This may be referencing an artifact
if (!location.uri && location.index !== undefined) { if (!location.uri && location.index !== undefined) {
if (typeof location.index !== "number" || if (typeof location.index !== 'number' ||
location.index < 0 || location.index < 0 ||
location.index >= artifacts.length || location.index >= artifacts.length ||
typeof artifacts[location.index].location !== "object") { typeof artifacts[location.index].location !== 'object') {
logger.debug(`Ignoring location as URI "${location.index}" is invalid`); core.debug(`Ignoring location as URI "${location.index}" is invalid`);
return undefined; return undefined;
} }
location = artifacts[location.index].location; location = artifacts[location.index].location;
} }
// Get the URI and decode // Get the URI and decode
if (typeof location.uri !== "string") { if (typeof location.uri !== 'string') {
logger.debug(`Ignoring location as index "${location.uri}" is invalid`); core.debug(`Ignoring location as index "${location.uri}" is invalid`);
return undefined; return undefined;
} }
let uri = decodeURIComponent(location.uri); let uri = decodeURIComponent(location.uri);
// Remove a file scheme, and abort if the scheme is anything else // Remove a file scheme, and abort if the scheme is anything else
const fileUriPrefix = "file://"; const fileUriPrefix = 'file://';
if (uri.startsWith(fileUriPrefix)) { if (uri.startsWith(fileUriPrefix)) {
uri = uri.substring(fileUriPrefix.length); uri = uri.substring(fileUriPrefix.length);
} }
if (uri.indexOf("://") !== -1) { if (uri.indexOf('://') !== -1) {
logger.debug(`Ignoring location URI "${uri}" as the scheme is not recognised`); core.debug(`Ignoring location URI "${uri}" as the scheme is not recognised`);
return undefined; return undefined;
} }
// Discard any absolute paths that aren't in the src root // Discard any absolute paths that aren't in the src root
const srcRootPrefix = `${checkoutPath}/`; const srcRootPrefix = process.env['GITHUB_WORKSPACE'] + '/';
if (uri.startsWith("/") && !uri.startsWith(srcRootPrefix)) { if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
logger.debug(`Ignoring location URI "${uri}" as it is outside of the src root`); core.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
return undefined; return undefined;
} }
// Just assume a relative path is relative to the src root. // Just assume a relative path is relative to the src root.
// This is not necessarily true but should be a good approximation // This is not necessarily true but should be a good approximation
// and here we likely want to err on the side of handling more cases. // and here we likely want to err on the side of handling more cases.
if (!uri.startsWith("/")) { if (!uri.startsWith('/')) {
uri = srcRootPrefix + uri; uri = srcRootPrefix + uri;
} }
// Check the file exists // Check the file exists
if (!fs.existsSync(uri)) { if (!fs.existsSync(uri)) {
logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`); core.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
return undefined; return undefined;
} }
return uri; return uri;
@@ -202,37 +206,37 @@ function resolveUriToFile(location, artifacts, checkoutPath, logger) {
exports.resolveUriToFile = resolveUriToFile; exports.resolveUriToFile = resolveUriToFile;
// Compute fingerprints for results in the given sarif file // Compute fingerprints for results in the given sarif file
// and return an updated sarif file contents. // and return an updated sarif file contents.
function addFingerprints(sarifContents, checkoutPath, logger) { function addFingerprints(sarifContents) {
var _a, _b; var _a, _b;
const sarif = JSON.parse(sarifContents); let sarif = JSON.parse(sarifContents);
// Gather together results for the same file and construct // Gather together results for the same file and construct
// callbacks to accept hashes for that file and update the location // callbacks to accept hashes for that file and update the location
const callbacksByFile = {}; const callbacksByFile = {};
for (const run of sarif.runs || []) { for (const run of sarif.runs || []) {
// We may need the list of artifacts to resolve against // We may need the list of artifacts to resolve against
const artifacts = run.artifacts || []; let artifacts = run.artifacts || [];
for (const result of run.results || []) { for (const result of run.results || []) {
// Check the primary location is defined correctly and is in the src root // Check the primary location is defined correctly and is in the src root
const primaryLocation = (result.locations || [])[0]; const primaryLocation = (result.locations || [])[0];
if (!((_b = (_a = primaryLocation) === null || _a === void 0 ? void 0 : _a.physicalLocation) === null || _b === void 0 ? void 0 : _b.artifactLocation)) { if (!((_b = (_a = primaryLocation) === null || _a === void 0 ? void 0 : _a.physicalLocation) === null || _b === void 0 ? void 0 : _b.artifactLocation)) {
logger.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`); core.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`);
continue; continue;
} }
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts, checkoutPath, logger); const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts);
if (!filepath) { if (!filepath) {
continue; continue;
} }
if (!callbacksByFile[filepath]) { if (!callbacksByFile[filepath]) {
callbacksByFile[filepath] = []; callbacksByFile[filepath] = [];
} }
callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation, logger)); callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation));
} }
} }
// Now hash each file that was found // Now hash each file that was found
Object.entries(callbacksByFile).forEach(([filepath, callbacks]) => { Object.entries(callbacksByFile).forEach(([filepath, callbacks]) => {
// A callback that forwards the hash to all other callbacks for that file // A callback that forwards the hash to all other callbacks for that file
const teeCallback = function (lineNumber, hash) { const teeCallback = function (lineNumber, hash) {
Object.values(callbacks).forEach((c) => c(lineNumber, hash)); Object.values(callbacks).forEach(c => c(lineNumber, hash));
}; };
const fileContents = fs.readFileSync(filepath).toString(); const fileContents = fs.readFileSync(filepath).toString();
hash(teeCallback, fileContents); hash(teeCallback, fileContents);

File diff suppressed because one or more lines are too long

View File

@@ -14,12 +14,11 @@ const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const fingerprints = __importStar(require("./fingerprints")); const fingerprints = __importStar(require("./fingerprints"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils"); const testing_utils_1 = require("./testing-utils");
testing_utils_1.setupTests(ava_1.default); testing_utils_1.setupTests(ava_1.default);
function testHash(t, input, expectedHashes) { function testHash(t, input, expectedHashes) {
let index = 0; let index = 0;
const callback = function (lineNumber, hash) { let callback = function (lineNumber, hash) {
t.is(lineNumber, index + 1); t.is(lineNumber, index + 1);
t.is(hash, expectedHashes[index]); t.is(hash, expectedHashes[index]);
index++; index++;
@@ -27,7 +26,7 @@ function testHash(t, input, expectedHashes) {
fingerprints.hash(callback, input); fingerprints.hash(callback, input);
t.is(index, input.split(/\r\n|\r|\n/).length); t.is(index, input.split(/\r\n|\r|\n/).length);
} }
ava_1.default("hash", (t) => { ava_1.default('hash', (t) => {
// Try empty file // Try empty file
testHash(t, "", ["c129715d7a2bc9a3:1"]); testHash(t, "", ["c129715d7a2bc9a3:1"]);
// Try various combinations of newline characters // Try various combinations of newline characters
@@ -35,7 +34,7 @@ ava_1.default("hash", (t) => {
"271789c17abda88f:1", "271789c17abda88f:1",
"54703d4cd895b18:1", "54703d4cd895b18:1",
"180aee12dab6264:1", "180aee12dab6264:1",
"a23a3dc5e078b07b:1", "a23a3dc5e078b07b:1"
]); ]);
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [ testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
"8b7cf3e952e7aeb2:1", "8b7cf3e952e7aeb2:1",
@@ -93,85 +92,68 @@ ava_1.default("hash", (t) => {
"a9cf91f7bbf1862b:1", "a9cf91f7bbf1862b:1",
"55ec222b86bcae53:1", "55ec222b86bcae53:1",
"cc97dc7b1d7d8f7b:1", "cc97dc7b1d7d8f7b:1",
"c129715d7a2bc9a3:1", "c129715d7a2bc9a3:1"
]);
testHash(t, "x = 2\nx = 1\nprint(x)\nx = 3\nprint(x)\nx = 4\nprint(x)\n", [
"e54938cc54b302f1:1",
"bb609acbe9138d60:1",
"1131fd5871777f34:1",
"5c482a0f8b35ea28:1",
"54517377da7028d2:1",
"2c644846cb18d53e:1",
"f1b89f20de0d133:1",
"c129715d7a2bc9a3:1",
]); ]);
}); });
function testResolveUriToFile(uri, index, artifactsURIs) { function testResolveUriToFile(uri, index, artifactsURIs) {
const location = { uri, index }; const location = { "uri": uri, "index": index };
const artifacts = artifactsURIs.map((uri) => ({ location: { uri } })); const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
return fingerprints.resolveUriToFile(location, artifacts, process.cwd(), logging_1.getRunnerLogger(true)); return fingerprints.resolveUriToFile(location, artifacts);
} }
ava_1.default("resolveUriToFile", (t) => { ava_1.default('resolveUriToFile', t => {
// The resolveUriToFile method checks that the file exists and is in the right directory // The resolveUriToFile method checks that the file exists and is in the right directory
// so we need to give it real files to look at. We will use this file as an example. // so we need to give it real files to look at. We will use this file as an example.
// For this to work we require the current working directory to be a parent, but this // For this to work we require the current working directory to be a parent, but this
// should generally always be the case so this is fine. // should generally always be the case so this is fine.
const cwd = process.cwd(); const cwd = process.cwd();
const filepath = __filename; const filepath = __filename;
t.true(filepath.startsWith(`${cwd}/`)); t.true(filepath.startsWith(cwd + '/'));
const relativeFilepaht = filepath.substring(cwd.length + 1); const relativeFilepaht = filepath.substring(cwd.length + 1);
process.env['GITHUB_WORKSPACE'] = cwd;
// Absolute paths are unmodified // Absolute paths are unmodified
t.is(testResolveUriToFile(filepath, undefined, []), filepath); t.is(testResolveUriToFile(filepath, undefined, []), filepath);
t.is(testResolveUriToFile(`file://${filepath}`, undefined, []), filepath); t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
// Relative paths are made absolute // Relative paths are made absolute
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath); t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
t.is(testResolveUriToFile(`file://${relativeFilepaht}`, undefined, []), filepath); t.is(testResolveUriToFile('file://' + relativeFilepaht, undefined, []), filepath);
// Absolute paths outside the src root are discarded // Absolute paths outside the src root are discarded
t.is(testResolveUriToFile("/src/foo/bar.js", undefined, []), undefined); t.is(testResolveUriToFile('/src/foo/bar.js', undefined, []), undefined);
t.is(testResolveUriToFile("file:///src/foo/bar.js", undefined, []), undefined); t.is(testResolveUriToFile('file:///src/foo/bar.js', undefined, []), undefined);
// Other schemes are discarded // Other schemes are discarded
t.is(testResolveUriToFile(`https://${filepath}`, undefined, []), undefined); t.is(testResolveUriToFile('https://' + filepath, undefined, []), undefined);
t.is(testResolveUriToFile(`ftp://${filepath}`, undefined, []), undefined); t.is(testResolveUriToFile('ftp://' + filepath, undefined, []), undefined);
// Invalid URIs are discarded // Invalid URIs are discarded
t.is(testResolveUriToFile(1, undefined, []), undefined); t.is(testResolveUriToFile(1, undefined, []), undefined);
t.is(testResolveUriToFile(undefined, undefined, []), undefined); t.is(testResolveUriToFile(undefined, undefined, []), undefined);
// Non-existant files are discarded // Non-existant files are discarded
t.is(testResolveUriToFile(`${filepath}2`, undefined, []), undefined); t.is(testResolveUriToFile(filepath + '2', undefined, []), undefined);
// Index is resolved // Index is resolved
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath); t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
t.is(testResolveUriToFile(undefined, 1, ["foo", filepath]), filepath); t.is(testResolveUriToFile(undefined, 1, ['foo', filepath]), filepath);
// Invalid indexes are discarded // Invalid indexes are discarded
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined); t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
t.is(testResolveUriToFile(undefined, "0", [filepath]), undefined); t.is(testResolveUriToFile(undefined, '0', [filepath]), undefined);
}); });
ava_1.default("addFingerprints", (t) => { ava_1.default('addFingerprints', t => {
// Run an end-to-end test on a test file // Run an end-to-end test on a test file
let input = fs let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.input.sarif').toString();
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`) let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.expected.sarif').toString();
.toString();
let expected = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.expected.sarif`)
.toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON // The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input)); input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected)); expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory // The URIs in the SARIF files resolve to files in the testdata directory
const checkoutPath = path.normalize(`${__dirname}/../src/testdata`); process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, logging_1.getRunnerLogger(true)), expected); t.deepEqual(fingerprints.addFingerprints(input), expected);
}); });
ava_1.default("missingRegions", (t) => { ava_1.default('missingRegions', t => {
// Run an end-to-end test on a test file // Run an end-to-end test on a test file
let input = fs let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.input.sarif').toString();
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`) let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.expected.sarif').toString();
.toString();
let expected = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.expected.sarif`)
.toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON // The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input)); input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected)); expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory // The URIs in the SARIF files resolve to files in the testdata directory
const checkoutPath = path.normalize(`${__dirname}/../src/testdata`); process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, logging_1.getRunnerLogger(true)), expected); t.deepEqual(fingerprints.addFingerprints(input), expected);
}); });
//# sourceMappingURL=fingerprints.test.js.map //# sourceMappingURL=fingerprints.test.js.map

File diff suppressed because one or more lines are too long

99
lib/init-action.js generated
View File

@@ -1,99 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const init_1 = require("./init");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
async function sendSuccessStatusReport(startedAt, config) {
var _a;
const statusReportBase = await actionsUtil.createStatusReportBase("init", "success", startedAt);
const languages = config.languages.join(",");
const workflowLanguages = actionsUtil.getOptionalInput("languages");
const paths = (config.originalUserInput.paths || []).join(",");
const pathsIgnore = (config.originalUserInput["paths-ignore"] || []).join(",");
const disableDefaultQueries = config.originalUserInput["disable-default-queries"]
? languages
: "";
const queries = [];
let queriesInput = (_a = actionsUtil.getOptionalInput("queries")) === null || _a === void 0 ? void 0 : _a.trim();
if (queriesInput === undefined || queriesInput.startsWith("+")) {
queries.push(...(config.originalUserInput.queries || []).map((q) => q.uses));
}
if (queriesInput !== undefined) {
queriesInput = queriesInput.startsWith("+")
? queriesInput.substr(1)
: queriesInput;
queries.push(...queriesInput.split(","));
}
const statusReport = {
...statusReportBase,
languages,
workflow_languages: workflowLanguages || "",
paths,
paths_ignore: pathsIgnore,
disable_default_queries: disableDefaultQueries,
queries: queries.join(","),
};
await actionsUtil.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
const logger = logging_1.getActionsLogger();
let config;
let codeql;
try {
actionsUtil.prepareLocalRunEnvironment();
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "starting", startedAt), true))) {
return;
}
const repositoryNWO = repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY"));
const languages = await languages_1.getLanguages(actionsUtil.getOptionalInput("languages"), repositoryNWO, actionsUtil.getRequiredInput("token"), actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"), logger);
codeql = await init_1.initCodeQL(actionsUtil.getOptionalInput("tools"), languages, actionsUtil.getRequiredInput("token"), actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"), actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), "actions", logger);
config = await init_1.initConfig(languages, actionsUtil.getOptionalInput("queries"), actionsUtil.getOptionalInput("config-file"), actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), codeql, actionsUtil.getRequiredEnvParam("GITHUB_WORKSPACE"), actionsUtil.getRequiredInput("token"), actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"), logger);
}
catch (e) {
core.setFailed(e.message);
console.log(e);
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "aborted", startedAt, e.message));
return;
}
try {
// Forward Go flags
const goFlags = process.env["GOFLAGS"];
if (goFlags) {
core.exportVariable("GOFLAGS", goFlags);
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
}
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
const codeqlRam = process.env["CODEQL_RAM"] || "6500";
core.exportVariable("CODEQL_RAM", codeqlRam);
const tracerConfig = await init_1.runInit(codeql, config);
if (tracerConfig !== undefined) {
Object.entries(tracerConfig.env).forEach(([key, value]) => core.exportVariable(key, value));
if (process.platform === "win32") {
await init_1.injectWindowsTracer("Runner.Worker.exe", undefined, config, codeql, tracerConfig);
}
}
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "failure", startedAt, error.message, error.stack));
return;
}
await sendSuccessStatusReport(startedAt, config);
}
run().catch((e) => {
core.setFailed(`init action failed: ${e}`);
console.log(e);
});
//# sourceMappingURL=init-action.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"init-action.js","sourceRoot":"","sources":["../src/init-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAG9C,iCAA8E;AAC9E,2CAA2C;AAC3C,uCAA6C;AAC7C,6CAAkD;AAkBlD,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,MAA0B;;IAE1B,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,MAAM,EACN,SAAS,EACT,SAAS,CACV,CAAC;IAEF,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC7C,MAAM,iBAAiB,GAAG,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAC;IACpE,MAAM,KAAK,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,WAAW,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CACvE,GAAG,CACJ,CAAC;IACF,MAAM,qBAAqB,GAAG,MAAM,CAAC,iBAAiB,CACpD,yBAAyB,CAC1B;QACC,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,EAAE,CAAC;IAEP,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,SAAG,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,0CAAE,IAAI,EAAE,CAAC;IACnE,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;QAC9D,OAAO,CAAC,IAAI,CACV,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,OAAO,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAC/D,CAAC;KACH;IACD,IAAI,YAAY,KAAK,SAAS,EAAE;QAC9B,YAAY,GAAG,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC;YACzC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC;YACxB,CAAC,CAAC,YAAY,CAAC;QACjB,OAAO,CAAC,IAAI,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;KAC1C;IAED,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,SAAS;QACT,kBAAkB,EAAE,iBAAiB,IAAI,EAAE;QAC3C,KAAK;QACL,YAAY,EAAE,WAAW;QACzB,uBAAuB,EAAE,qBAAqB;QAC9C,OAAO,EAAE,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC;KAC3B,CAAC;IAEF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,IAAI,MAA0B,CAAC;IAC/B,IAAI,MAAc,CAAC;IAEnB,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CAAC,MAAM,EAAE,UAAU,EAAE,SAAS,CAAC,EACvE,IAAI,CACL,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,aAAa,GAAG,+BAAkB,CACtC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CACrD,CAAC;QAEF,MAAM,SAAS,GAAG,MAAM,wBAAY,CAClC,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,EACzC,aAAa,EACb,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,MAAM,CACP,CAAC;QAEF,MAAM,GAAG,MAAM,iBAAU,CACvB,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,SAAS,EACT,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,SAAS,EACT,MAAM,CACP,CAAC;QAEF,MAAM,GAAG,MAAM,iBAAU,CACvB,SAAS,EACT,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EACvC,WAAW,CAAC,gBAAgB,CAAC,aAAa,CAAC,EAC3C,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,MAAM,EACN,WAAW,CAAC,mBAAmB,CAAC,kBAAkB,CAAC,EACnD,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,MAAM,CACP,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,CAAC,CAAC,OAAO,CACV,CACF,CAAC;QACF,OAAO;KACR;IAED,IAAI;QACF,mBAAmB;QACnB,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QACvC,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YACxC,IAAI,CAAC,OAAO,CACV,6GAA6G,CAC9G,CAAC;SACH;QAED,mGAAmG;QACnG,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,MAAM,CAAC;QACtD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;QAE7C,MAAM,YAAY,GAAG,MAAM,cAAO,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACnD,IAAI,YAAY,KAAK,SAAS,EAAE;YAC9B,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CACxD,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,KAAK,CAAC,CAChC,CAAC;YAEF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,0BAAmB,CACvB,mBAAmB,EACnB,SAAS,EACT,MAAM,EACN,MAAM,EACN,YAAY,CACb,CAAC;aACH;SACF;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;AACnD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,uBAAuB,CAAC,EAAE,CAAC,CAAC;IAC3C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

124
lib/init.js generated
View File

@@ -1,124 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const tracer_config_1 = require("./tracer-config");
const util = __importStar(require("./util"));
async function initCodeQL(codeqlURL, languages, githubAuth, githubUrl, tempDir, toolsDir, mode, logger) {
logger.startGroup("Setup CodeQL tools");
const codeql = await codeql_1.setupCodeQL(codeqlURL, languages, githubAuth, githubUrl, tempDir, toolsDir, mode, logger);
await codeql.printVersion();
logger.endGroup();
return codeql;
}
exports.initCodeQL = initCodeQL;
async function initConfig(languages, queriesInput, configFile, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) {
logger.startGroup("Load language configuration");
const config = await configUtils.initConfig(languages, queriesInput, configFile, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger);
analysisPaths.printPathFiltersWarning(config, logger);
logger.endGroup();
return config;
}
exports.initConfig = initConfig;
async function runInit(codeql, config) {
const sourceRoot = path.resolve();
fs.mkdirSync(util.getCodeQLDatabasesDir(config.tempDir), { recursive: true });
// TODO: replace this code once CodeQL supports multi-language tracing
for (const language of config.languages) {
// Init language database
await codeql.databaseInit(util.getCodeQLDatabasePath(config.tempDir, language), language, sourceRoot);
}
return await tracer_config_1.getCombinedTracerConfig(config, codeql);
}
exports.runInit = runInit;
// Runs a powershell script to inject the tracer into a parent process
// so it can tracer future processes, hopefully including the build process.
// If processName is given then injects into the nearest parent process with
// this name, otherwise uses the processLevel-th parent if defined, otherwise
// defaults to the 3rd parent as a rough guess.
async function injectWindowsTracer(processName, processLevel, config, codeql, tracerConfig) {
let script;
if (processName !== undefined) {
script = `
Param(
[Parameter(Position=0)]
[String]
$tracer
)
$id = $PID
while ($true) {
$p = Get-CimInstance -Class Win32_Process -Filter "ProcessId = $id"
Write-Host "Found process: $p"
if ($p -eq $null) {
throw "Could not determine ${processName} process"
}
if ($p[0].Name -eq "${processName}") {
Break
} else {
$id = $p[0].ParentProcessId
}
}
Write-Host "Final process: $p"
Invoke-Expression "&$tracer --inject=$id"`;
}
else {
// If the level is not defined then guess at the 3rd parent process.
// This won't be correct in every setting but it should be enough in most settings,
// and overestimating is likely better in this situation so we definitely trace
// what we want, though this does run the risk of interfering with future CI jobs.
// Note that the default of 3 doesn't work on github actions, so we include a
// special case in the script that checks for Runner.Worker.exe so we can still work
// on actions if the runner is invoked there.
processLevel = processLevel || 3;
script = `
Param(
[Parameter(Position=0)]
[String]
$tracer
)
$id = $PID
for ($i = 0; $i -le ${processLevel}; $i++) {
$p = Get-CimInstance -Class Win32_Process -Filter "ProcessId = $id"
Write-Host "Parent process \${i}: $p"
if ($p -eq $null) {
throw "Process tree ended before reaching required level"
}
# Special case just in case the runner is used on actions
if ($p[0].Name -eq "Runner.Worker.exe") {
Write-Host "Found Runner.Worker.exe process which means we are running on GitHub Actions"
Write-Host "Aborting search early and using process: $p"
Break
} else {
$id = $p[0].ParentProcessId
}
}
Write-Host "Final process: $p"
Invoke-Expression "&$tracer --inject=$id"`;
}
const injectTracerPath = path.join(config.tempDir, "inject-tracer.ps1");
fs.writeFileSync(injectTracerPath, script);
await new toolrunnner.ToolRunner("powershell", [
"-ExecutionPolicy",
"Bypass",
"-file",
injectTracerPath,
path.resolve(path.dirname(codeql.getPath()), "tools", "win64", "tracer.exe"),
], { env: { ODASA_TRACER_CONFIGURATION: tracerConfig.spec } }).exec();
}
exports.injectWindowsTracer = injectWindowsTracer;
//# sourceMappingURL=init.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,0EAA4D;AAC5D,uCAAyB;AACzB,2CAA6B;AAE7B,gEAAkD;AAClD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,SAAqB,EACrB,UAAkB,EAClB,SAAiB,EACjB,OAAe,EACf,QAAgB,EAChB,IAAe,EACf,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IAExC,MAAM,MAAM,GAAG,MAAM,oBAAW,CAC9B,SAAS,EACT,SAAS,EACT,UAAU,EACV,SAAS,EACT,OAAO,EACP,QAAQ,EACR,IAAI,EACJ,MAAM,CACP,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAzBD,gCAyBC;AAEM,KAAK,UAAU,UAAU,CAC9B,SAAqB,EACrB,YAAgC,EAChC,UAA8B,EAC9B,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,UAAkB,EAClB,SAAiB,EACjB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,SAAS,EACT,YAAY,EACZ,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,UAAU,EACV,SAAS,EACT,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA5BD,gCA4BC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE9E,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnBD,0BAmBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;gDAiBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,WAAW,CAAC,UAAU,CAC9B,YAAY,EACZ;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AAxFD,kDAwFC"}

130
lib/languages.js generated
View File

@@ -1,130 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const api = __importStar(require("./api-client"));
// All the languages supported by CodeQL
var Language;
(function (Language) {
Language["csharp"] = "csharp";
Language["cpp"] = "cpp";
Language["go"] = "go";
Language["java"] = "java";
Language["javascript"] = "javascript";
Language["python"] = "python";
})(Language = exports.Language || (exports.Language = {}));
// Additional names for languages
const LANGUAGE_ALIASES = {
c: Language.cpp,
"c++": Language.cpp,
"c#": Language.csharp,
typescript: Language.javascript,
};
// Translate from user input or GitHub's API names for languages to CodeQL's names for languages
function parseLanguage(language) {
// Normalise to lower case
language = language.toLowerCase();
// See if it's an exact match
if (language in Language) {
return language;
}
// Check language aliases
if (language in LANGUAGE_ALIASES) {
return LANGUAGE_ALIASES[language];
}
return undefined;
}
exports.parseLanguage = parseLanguage;
function isTracedLanguage(language) {
return ["cpp", "java", "csharp"].includes(language);
}
exports.isTracedLanguage = isTracedLanguage;
function isScannedLanguage(language) {
return !isTracedLanguage(language);
}
exports.isScannedLanguage = isScannedLanguage;
function getNoLanguagesError() {
return ("Did not detect any languages to analyze. " +
"Please update input in workflow or check that GitHub detects the correct languages in your repository.");
}
exports.getNoLanguagesError = getNoLanguagesError;
function getUnknownLanguagesError(languages) {
return `Did not recognise the following languages: ${languages.join(", ")}`;
}
exports.getUnknownLanguagesError = getUnknownLanguagesError;
/**
* Get the languages to analyse.
*
* The result is obtained from the action input parameter 'languages' if that
* has been set, otherwise it is deduced as all languages in the repo that
* can be analysed.
*
* If no languages could be detected from either the workflow or the repository
* then throw an error.
*/
async function getLanguages(languagesInput, repository, githubAuth, githubUrl, logger) {
// Obtain from action input 'languages' if set
let languages = (languagesInput || "")
.split(",")
.map((x) => x.trim())
.filter((x) => x.length > 0);
logger.info(`Languages from configuration: ${JSON.stringify(languages)}`);
if (languages.length === 0) {
// Obtain languages as all languages in the repo that can be analysed
languages = await getLanguagesInRepo(repository, githubAuth, githubUrl, logger);
logger.info(`Automatically detected languages: ${JSON.stringify(languages)}`);
}
// If the languages parameter was not given and no languages were
// detected then fail here as this is a workflow configuration error.
if (languages.length === 0) {
throw new Error(getNoLanguagesError());
}
// Make sure they are supported
const parsedLanguages = [];
const unknownLanguages = [];
for (const language of languages) {
const parsedLanguage = parseLanguage(language);
if (parsedLanguage === undefined) {
unknownLanguages.push(language);
}
else if (parsedLanguages.indexOf(parsedLanguage) === -1) {
parsedLanguages.push(parsedLanguage);
}
}
if (unknownLanguages.length > 0) {
throw new Error(getUnknownLanguagesError(unknownLanguages));
}
return parsedLanguages;
}
exports.getLanguages = getLanguages;
/**
* Gets the set of languages in the current repository
*/
async function getLanguagesInRepo(repository, githubAuth, githubUrl, logger) {
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
const response = await api
.getApiClient(githubAuth, githubUrl, true)
.repos.listLanguages({
owner: repository.owner,
repo: repository.repo,
});
logger.debug(`Languages API response: ${JSON.stringify(response)}`);
// The GitHub API is going to return languages in order of popularity,
// When we pick a language to autobuild we want to pick the most popular traced language
// Since sets in javascript maintain insertion order, using a set here and then splatting it
// into an array gives us an array of languages ordered by popularity
const languages = new Set();
for (const lang of Object.keys(response.data)) {
const parsedLang = parseLanguage(lang);
if (parsedLang !== undefined) {
languages.add(parsedLang);
}
}
return [...languages];
}
//# sourceMappingURL=languages.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;;;;;;;AAAA,kDAAoC;AAIpC,wCAAwC;AACxC,IAAY,QAOX;AAPD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;AACnB,CAAC,EAPW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAOnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAAiC;IACrD,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACtD,CAAC;AAFD,4CAEC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC;AAED,SAAgB,mBAAmB;IACjC,OAAO,CACL,2CAA2C;QAC3C,wGAAwG,CACzG,CAAC;AACJ,CAAC;AALD,kDAKC;AAED,SAAgB,wBAAwB,CAAC,SAAmB;IAC1D,OAAO,8CAA8C,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;AAC9E,CAAC;AAFD,4DAEC;AAED;;;;;;;;;GASG;AACI,KAAK,UAAU,YAAY,CAChC,cAAkC,EAClC,UAAyB,EACzB,UAAkB,EAClB,SAAiB,EACjB,MAAc;IAEd,8CAA8C;IAC9C,IAAI,SAAS,GAAG,CAAC,cAAc,IAAI,EAAE,CAAC;SACnC,KAAK,CAAC,GAAG,CAAC;SACV,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC;SACpB,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IAC/B,MAAM,CAAC,IAAI,CAAC,iCAAiC,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;IAE1E,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;QAC1B,qEAAqE;QACrE,SAAS,GAAG,MAAM,kBAAkB,CAClC,UAAU,EACV,UAAU,EACV,SAAS,EACT,MAAM,CACP,CAAC;QACF,MAAM,CAAC,IAAI,CACT,qCAAqC,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE,CACjE,CAAC;KACH;IAED,iEAAiE;IACjE,qEAAqE;IACrE,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;QAC1B,MAAM,IAAI,KAAK,CAAC,mBAAmB,EAAE,CAAC,CAAC;KACxC;IAED,+BAA+B;IAC/B,MAAM,eAAe,GAAe,EAAE,CAAC;IACvC,MAAM,gBAAgB,GAAa,EAAE,CAAC;IACtC,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;QAChC,MAAM,cAAc,GAAG,aAAa,CAAC,QAAQ,CAAC,CAAC;QAC/C,IAAI,cAAc,KAAK,SAAS,EAAE;YAChC,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;SACjC;aAAM,IAAI,eAAe,CAAC,OAAO,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE;YACzD,eAAe,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;SACtC;KACF;IACD,IAAI,gBAAgB,CAAC,MAAM,GAAG,CAAC,EAAE;QAC/B,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,CAAC,CAAC;KAC7D;IAED,OAAO,eAAe,CAAC;AACzB,CAAC;AAjDD,oCAiDC;AAED;;GAEG;AACH,KAAK,UAAU,kBAAkB,CAC/B,UAAyB,EACzB,UAAkB,EAClB,SAAiB,EACjB,MAAc;IAEd,MAAM,CAAC,KAAK,CAAC,eAAe,UAAU,CAAC,KAAK,IAAI,UAAU,CAAC,IAAI,EAAE,CAAC,CAAC;IACnE,MAAM,QAAQ,GAAG,MAAM,GAAG;SACvB,YAAY,CAAC,UAAU,EAAE,SAAS,EAAE,IAAI,CAAC;SACzC,KAAK,CAAC,aAAa,CAAC;QACnB,KAAK,EAAE,UAAU,CAAC,KAAK;QACvB,IAAI,EAAE,UAAU,CAAC,IAAI;KACtB,CAAC,CAAC;IAEL,MAAM,CAAC,KAAK,CAAC,2BAA2B,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;IAEpE,sEAAsE;IACtE,wFAAwF;IACxF,4FAA4F;IAC5F,qEAAqE;IACrE,MAAM,SAAS,GAAkB,IAAI,GAAG,EAAE,CAAC;IAC3C,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;QAC7C,MAAM,UAAU,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC;QACvC,IAAI,UAAU,KAAK,SAAS,EAAE;YAC5B,SAAS,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;SAC3B;KACF;IACD,OAAO,CAAC,GAAG,SAAS,CAAC,CAAC;AACxB,CAAC"}

44
lib/languages.test.js generated
View File

@@ -1,44 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const languages_1 = require("./languages");
const testing_utils_1 = require("./testing-utils");
testing_utils_1.setupTests(ava_1.default);
ava_1.default("parseLangauge", async (t) => {
// Exact matches
t.deepEqual(languages_1.parseLanguage("csharp"), languages_1.Language.csharp);
t.deepEqual(languages_1.parseLanguage("cpp"), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage("go"), languages_1.Language.go);
t.deepEqual(languages_1.parseLanguage("java"), languages_1.Language.java);
t.deepEqual(languages_1.parseLanguage("javascript"), languages_1.Language.javascript);
t.deepEqual(languages_1.parseLanguage("python"), languages_1.Language.python);
// Aliases
t.deepEqual(languages_1.parseLanguage("c"), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage("c++"), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage("c#"), languages_1.Language.csharp);
t.deepEqual(languages_1.parseLanguage("typescript"), languages_1.Language.javascript);
// Not matches
t.deepEqual(languages_1.parseLanguage("foo"), undefined);
t.deepEqual(languages_1.parseLanguage(" "), undefined);
t.deepEqual(languages_1.parseLanguage(""), undefined);
});
ava_1.default("isTracedLanguage", async (t) => {
t.true(languages_1.isTracedLanguage(languages_1.Language.cpp));
t.true(languages_1.isTracedLanguage(languages_1.Language.java));
t.true(languages_1.isTracedLanguage(languages_1.Language.csharp));
t.false(languages_1.isTracedLanguage(languages_1.Language.go));
t.false(languages_1.isTracedLanguage(languages_1.Language.javascript));
t.false(languages_1.isTracedLanguage(languages_1.Language.python));
});
ava_1.default("isScannedLanguage", async (t) => {
t.false(languages_1.isScannedLanguage(languages_1.Language.cpp));
t.false(languages_1.isScannedLanguage(languages_1.Language.java));
t.false(languages_1.isScannedLanguage(languages_1.Language.csharp));
t.true(languages_1.isScannedLanguage(languages_1.Language.go));
t.true(languages_1.isScannedLanguage(languages_1.Language.javascript));
t.true(languages_1.isScannedLanguage(languages_1.Language.python));
});
//# sourceMappingURL=languages.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"languages.test.js","sourceRoot":"","sources":["../src/languages.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,2CAKqB;AACrB,mDAA6C;AAE7C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,gBAAgB;IAChB,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IACtD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,MAAM,CAAC,EAAE,oBAAQ,CAAC,IAAI,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAC9D,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAEtD,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAE9D,cAAc;IACd,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC7C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,SAAS,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAC;AAC5C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IACxC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE1C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mBAAmB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACpC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACzC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE5C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC"}

26
lib/logging.js generated
View File

@@ -1,26 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
function getActionsLogger() {
return core;
}
exports.getActionsLogger = getActionsLogger;
function getRunnerLogger(debugMode) {
return {
debug: debugMode ? console.debug : () => undefined,
info: console.info,
warning: console.warn,
error: console.error,
startGroup: () => undefined,
endGroup: () => undefined,
};
}
exports.getRunnerLogger = getRunnerLogger;
//# sourceMappingURL=logging.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../src/logging.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAYtC,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC;AACd,CAAC;AAFD,4CAEC;AAED,SAAgB,eAAe,CAAC,SAAkB;IAChD,OAAO;QACL,KAAK,EAAE,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,SAAS;QAClD,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,OAAO,EAAE,OAAO,CAAC,IAAI;QACrB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AATD,0CASC"}

14
lib/repository.js generated
View File

@@ -1,14 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function parseRepositoryNwo(input) {
const parts = input.split("/");
if (parts.length !== 2) {
throw new Error(`"${input}" is not a valid repository name`);
}
return {
owner: parts[0],
repo: parts[1],
};
}
exports.parseRepositoryNwo = parseRepositoryNwo;
//# sourceMappingURL=repository.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"repository.js","sourceRoot":"","sources":["../src/repository.ts"],"names":[],"mappings":";;AAMA,SAAgB,kBAAkB,CAAC,KAAa;IAC9C,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QACtB,MAAM,IAAI,KAAK,CAAC,IAAI,KAAK,kCAAkC,CAAC,CAAC;KAC9D;IACD,OAAO;QACL,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC;QACf,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;KACf,CAAC;AACJ,CAAC;AATD,gDASC"}

272
lib/runner.js generated
View File

@@ -1,272 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const commander_1 = require("commander");
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path"));
const analyze_1 = require("./analyze");
const autobuild_1 = require("./autobuild");
const codeql_1 = require("./codeql");
const config_utils_1 = require("./config-utils");
const init_1 = require("./init");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const util_1 = require("./util");
const program = new commander_1.Command();
program.version("0.0.1");
function parseGithubUrl(inputUrl) {
try {
const url = new URL(inputUrl);
// If we detect this is trying to be to github.com
// then return with a fixed canonical URL.
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
return "https://github.com";
}
// Remove the API prefix if it's present
if (url.pathname.indexOf("/api/v3") !== -1) {
url.pathname = url.pathname.substring(0, url.pathname.indexOf("/api/v3"));
}
return url.toString();
}
catch (e) {
throw new Error(`"${inputUrl}" is not a valid URL`);
}
}
function getTempDir(userInput) {
const tempDir = path.join(userInput || process.cwd(), "codeql-runner");
if (!fs.existsSync(tempDir)) {
fs.mkdirSync(tempDir, { recursive: true });
}
return tempDir;
}
function getToolsDir(userInput) {
const toolsDir = userInput || path.join(os.homedir(), "codeql-runner-tools");
if (!fs.existsSync(toolsDir)) {
fs.mkdirSync(toolsDir, { recursive: true });
}
return toolsDir;
}
const codeqlEnvJsonFilename = "codeql-env.json";
// Imports the environment from codeqlEnvJsonFilename if not already present
function importTracerEnvironment(config) {
if (!("ODASA_TRACER_CONFIGURATION" in process.env)) {
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
const env = JSON.parse(fs.readFileSync(jsonEnvFile).toString("utf-8"));
Object.keys(env).forEach((key) => (process.env[key] = env[key]));
}
}
// Allow the user to specify refs in full refs/heads/branch format
// or just the short branch name and prepend "refs/heads/" to it.
function parseRef(userInput) {
if (userInput.startsWith("refs/")) {
return userInput;
}
else {
return `refs/heads/${userInput}`;
}
}
// Parses the --trace-process-name arg from process.argv, or returns undefined
function parseTraceProcessName() {
for (let i = 0; i < process.argv.length - 1; i++) {
if (process.argv[i] === "--trace-process-name") {
return process.argv[i + 1];
}
}
return undefined;
}
// Parses the --trace-process-level arg from process.argv, or returns undefined
function parseTraceProcessLevel() {
for (let i = 0; i < process.argv.length - 1; i++) {
if (process.argv[i] === "--trace-process-level") {
const v = parseInt(process.argv[i + 1], 10);
return isNaN(v) ? undefined : v;
}
}
return undefined;
}
program
.command("init")
.description("Initializes CodeQL")
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--languages <languages>", "Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.")
.option("--queries <queries>", "Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.")
.option("--config-file <file>", "Path to config file.")
.option("--codeql-path <path>", "Path to a copy of the CodeQL CLI executable to use. Otherwise downloads a copy.")
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
.option("--tools-dir <dir>", "Directory to use for CodeQL tools and other files to store between runs. Default is a subdirectory of the home directory.")
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--debug", "Print more verbose output", false)
// This prevents a message like: error: unknown option '--trace-process-level'
// Remove this if commander.js starts supporting hidden options.
.allowUnknownOption()
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
try {
const tempDir = getTempDir(cmd.tempDir);
const toolsDir = getToolsDir(cmd.toolsDir);
// Wipe the temp dir
logger.info(`Cleaning temp directory ${tempDir}`);
fs.rmdirSync(tempDir, { recursive: true });
fs.mkdirSync(tempDir, { recursive: true });
const githubUrl = parseGithubUrl(cmd.githubUrl);
const repositoryNWO = repository_1.parseRepositoryNwo(cmd.repository);
const languages = await languages_1.getLanguages(cmd.languages, repositoryNWO, cmd.githubAuth, githubUrl, logger);
let codeql;
if (cmd.codeqlPath !== undefined) {
codeql = codeql_1.getCodeQL(cmd.codeqlPath);
}
else {
codeql = await init_1.initCodeQL(undefined, languages, cmd.githubAuth, githubUrl, tempDir, toolsDir, "runner", logger);
}
const config = await init_1.initConfig(languages, cmd.queries, cmd.configFile, tempDir, toolsDir, codeql, cmd.checkoutPath || process.cwd(), cmd.githubAuth, parseGithubUrl(cmd.githubUrl), logger);
const tracerConfig = await init_1.runInit(codeql, config);
if (tracerConfig === undefined) {
return;
}
if (process.platform === "win32") {
await init_1.injectWindowsTracer(parseTraceProcessName(), parseTraceProcessLevel(), config, codeql, tracerConfig);
}
// Always output a json file of the env that can be consumed programatically
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
fs.writeFileSync(jsonEnvFile, JSON.stringify(tracerConfig.env));
if (process.platform === "win32") {
const batEnvFile = path.join(config.tempDir, "codeql-env.bat");
const batEnvFileContents = Object.entries(tracerConfig.env)
.map(([key, value]) => `Set ${key}=${value}`)
.join("\n");
fs.writeFileSync(batEnvFile, batEnvFileContents);
const powershellEnvFile = path.join(config.tempDir, "codeql-env.sh");
const powershellEnvFileContents = Object.entries(tracerConfig.env)
.map(([key, value]) => `$env:${key}="${value}"`)
.join("\n");
fs.writeFileSync(powershellEnvFile, powershellEnvFileContents);
logger.info(`\nCodeQL environment output to "${jsonEnvFile}", "${batEnvFile}" and "${powershellEnvFile}". ` +
`Please export these variables to future processes so the build can be traced. ` +
`If using cmd/batch run "call ${batEnvFile}" ` +
`or if using PowerShell run "cat ${powershellEnvFile} | Invoke-Expression".`);
}
else {
// Assume that anything that's not windows is using a unix-style shell
const shEnvFile = path.join(config.tempDir, "codeql-env.sh");
const shEnvFileContents = Object.entries(tracerConfig.env)
// Some vars contain ${LIB} that we do not want to be expanded when executing this script
.map(([key, value]) => `export ${key}="${value.replace(/\$/g, "\\$")}"`)
.join("\n");
fs.writeFileSync(shEnvFile, shEnvFileContents);
logger.info(`\nCodeQL environment output to "${jsonEnvFile}" and "${shEnvFile}". ` +
`Please export these variables to future processes so the build can be traced, ` +
`for example by running ". ${shEnvFile}".`);
}
}
catch (e) {
logger.error("Init failed");
logger.error(e);
process.exitCode = 1;
}
});
program
.command("autobuild")
.description("Attempts to automatically build code")
.option("--language <language>", "The language to build. Otherwise will detect the dominant compiled language.")
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
.option("--debug", "Print more verbose output", false)
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
try {
const config = await config_utils_1.getConfig(getTempDir(cmd.tempDir), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. " +
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
}
importTracerEnvironment(config);
let language = undefined;
if (cmd.language !== undefined) {
language = languages_1.parseLanguage(cmd.language);
if (language === undefined || !config.languages.includes(language)) {
throw new Error(`"${cmd.language}" is not a recognised language. ` +
`Known languages in this project are ${config.languages.join(", ")}.`);
}
}
else {
language = autobuild_1.determineAutobuildLanguage(config, logger);
}
if (language !== undefined) {
await autobuild_1.runAutobuild(language, config, logger);
}
}
catch (e) {
logger.error("Autobuild failed");
logger.error(e);
process.exitCode = 1;
}
});
program
.command("analyze")
.description("Finishes extracting code and runs CodeQL queries")
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--no-upload", "Do not upload results after analysis.")
.option("--output-dir <dir>", "Directory to output SARIF files to. Default is in the temp directory.")
.option("--ram <ram>", "Amount of memory to use when running queries. Default is to use all available memory.")
.option("--no-add-snippets", "Specify whether to include code snippets in the sarif output.")
.option("--threads <threads>", "Number of threads to use when running queries. " +
"Default is to use all available cores.")
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
.option("--debug", "Print more verbose output", false)
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
try {
const tempDir = getTempDir(cmd.tempDir);
const outputDir = cmd.outputDir || path.join(tempDir, "codeql-sarif");
const config = await config_utils_1.getConfig(getTempDir(cmd.tempDir), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. " +
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
}
await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), cmd.upload, "runner", outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), util_1.getThreadsFlag(cmd.threads, logger), config, logger);
}
catch (e) {
logger.error("Analyze failed");
logger.error(e);
process.exitCode = 1;
}
});
program
.command("upload")
.description("Uploads a SARIF file, or all SARIF files from a directory, to code scanning")
.requiredOption("--sarif-file <file>", "SARIF file to upload, or a directory containing multiple SARIF files. (Required)")
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--debug", "Print more verbose output", false)
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
try {
await upload_lib.upload(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), "runner", logger);
}
catch (e) {
logger.error("Upload failed");
logger.error(e);
process.exitCode = 1;
}
});
program.parse(process.argv);
//# sourceMappingURL=runner.js.map

File diff suppressed because one or more lines are too long

76
lib/setup-tools.js generated Normal file
View File

@@ -0,0 +1,76 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const toolcache = __importStar(require("@actions/tool-cache"));
const path = __importStar(require("path"));
const semver = __importStar(require("semver"));
class CodeQLSetup {
constructor(codeqlDist) {
this.dist = codeqlDist;
this.tools = path.join(this.dist, 'tools');
this.cmd = path.join(codeqlDist, 'codeql');
// TODO check process.arch ?
if (process.platform === 'win32') {
this.platform = 'win64';
if (this.cmd.endsWith('codeql')) {
this.cmd += ".exe";
}
}
else if (process.platform === 'linux') {
this.platform = 'linux64';
}
else if (process.platform === 'darwin') {
this.platform = 'osx64';
}
else {
throw new Error("Unsupported plaform: " + process.platform);
}
}
}
exports.CodeQLSetup = CodeQLSetup;
async function setupCodeQL() {
try {
const codeqlURL = core.getInput('tools', { required: true });
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL);
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
if (codeqlFolder) {
core.debug(`CodeQL found in cache ${codeqlFolder}`);
}
else {
const codeqlPath = await toolcache.downloadTool(codeqlURL);
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
}
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
}
catch (e) {
core.error(e);
throw new Error("Unable to download and extract CodeQL CLI");
}
}
exports.setupCodeQL = setupCodeQL;
function getCodeQLURLVersion(url) {
const match = url.match(/\/codeql-bundle-(.*)\//);
if (match === null || match.length < 2) {
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
}
let version = match[1];
if (!semver.valid(version)) {
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
version = '0.0.0-' + version;
}
const s = semver.clean(version);
if (!s) {
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
}
return s;
}
exports.getCodeQLURLVersion = getCodeQLURLVersion;
//# sourceMappingURL=setup-tools.js.map

1
lib/setup-tools.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"setup-tools.js","sourceRoot":"","sources":["../src/setup-tools.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,+DAAiD;AACjD,2CAA6B;AAC7B,+CAAiC;AAEjC,MAAa,WAAW;IAMtB,YAAY,UAAkB;QAC5B,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAC3C,4BAA4B;QAC5B,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;YACxB,IAAI,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBAC/B,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC;aACpB;SACF;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YACvC,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;SAC3B;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACxC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;SACzB;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;SAC7D;IACH,CAAC;CACF;AAxBD,kCAwBC;AAEM,KAAK,UAAU,WAAW;IAC/B,IAAI;QACF,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;QAC7D,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAExD,IAAI,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,gBAAgB,CAAC,CAAC;QAC9D,IAAI,YAAY,EAAE;YAChB,IAAI,CAAC,KAAK,CAAC,yBAAyB,YAAY,EAAE,CAAC,CAAC;SACrD;aAAM;YACL,MAAM,UAAU,GAAG,MAAM,SAAS,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,eAAe,GAAG,MAAM,SAAS,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC/D,YAAY,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,eAAe,EAAE,QAAQ,EAAE,gBAAgB,CAAC,CAAC;SACtF;QACD,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAE3D;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACd,MAAM,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC;KAC9D;AACH,CAAC;AAnBD,kCAmBC;AAED,SAAgB,mBAAmB,CAAC,GAAW;IAE7C,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,CAAC,wBAAwB,CAAC,CAAC;IAClD,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,wBAAwB,GAAG,iCAAiC,CAAC,CAAC;KAC/E;IAED,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IAEvB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE;QAC1B,IAAI,CAAC,KAAK,CAAC,kBAAkB,OAAO,gEAAgE,OAAO,GAAG,CAAC,CAAC;QAChH,OAAO,GAAG,QAAQ,GAAG,OAAO,CAAC;KAC9B;IAED,MAAM,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;IAChC,IAAI,CAAC,CAAC,EAAE;QACN,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,iDAAiD,OAAO,UAAU,CAAC,CAAC;KAC/G;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AApBD,kDAoBC"}

60
lib/setup-tools.test.js generated Normal file
View File

@@ -0,0 +1,60 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolcache = __importStar(require("@actions/tool-cache"));
const ava_1 = __importDefault(require("ava"));
const nock_1 = __importDefault(require("nock"));
const path = __importStar(require("path"));
const setupTools = __importStar(require("./setup-tools"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.silenceDebugOutput(ava_1.default);
ava_1.default('download codeql bundle cache', async (t) => {
await util.withTmpDir(async (tmpDir) => {
process.env['GITHUB_WORKSPACE'] = tmpDir;
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
const versions = ['20200601', '20200610'];
for (let i = 0; i < versions.length; i++) {
const version = versions[i];
nock_1.default('https://example.com')
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
await setupTools.setupCodeQL();
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
}
const cachedVersions = toolcache.findAllVersions('CodeQL');
t.is(cachedVersions.length, 2);
});
});
ava_1.default('parse codeql bundle url version', t => {
const tests = {
'20200601': '0.0.0-20200601',
'20200601.0': '0.0.0-20200601.0',
'20200601.0.0': '20200601.0.0',
'1.2.3': '1.2.3',
'1.2.3-alpha': '1.2.3-alpha',
'1.2.3-beta.1': '1.2.3-beta.1',
};
for (const [version, expectedVersion] of Object.entries(tests)) {
const url = `https://github.com/.../codeql-bundle-${version}/...`;
try {
const parsedVersion = setupTools.getCodeQLURLVersion(url);
t.deepEqual(parsedVersion, expectedVersion);
}
catch (e) {
t.fail(e.message);
}
}
});
//# sourceMappingURL=setup-tools.test.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"setup-tools.test.js","sourceRoot":"","sources":["../src/setup-tools.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,0DAA4C;AAC5C,mDAAmD;AACnD,6CAA+B;AAE/B,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,UAAU,CAAC,WAAW,EAAE,CAAC;YAE/B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,UAAU,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YAC1D,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC"}

234
lib/setup-tracer.js generated Normal file
View File

@@ -0,0 +1,234 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const io = __importStar(require("@actions/io"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const sharedEnv = __importStar(require("./shared-environment"));
const util = __importStar(require("./util"));
const CRITICAL_TRACER_VARS = new Set(['SEMMLE_PRELOAD_libtrace',
,
'SEMMLE_RUNNER',
,
'SEMMLE_COPY_EXECUTABLES_ROOT',
,
'SEMMLE_DEPTRACE_SOCKET',
,
'SEMMLE_JAVA_TOOL_OPTIONS'
]);
async function tracerConfig(codeql, database, compilerSpec) {
const env = await codeql.getTracerEnv(database, compilerSpec);
const config = env['ODASA_TRACER_CONFIGURATION'];
const info = { spec: config, env: {} };
// Extract critical tracer variables from the environment
for (let entry of Object.entries(env)) {
const key = entry[0];
const value = entry[1];
// skip ODASA_TRACER_CONFIGURATION as it is handled separately
if (key === 'ODASA_TRACER_CONFIGURATION') {
continue;
}
// skip undefined values
if (typeof value === 'undefined') {
continue;
}
// Keep variables that do not exist in current environment. In addition always keep
// critical and CODEQL_ variables
if (typeof process.env[key] === 'undefined' || CRITICAL_TRACER_VARS.has(key) || key.startsWith('CODEQL_')) {
info.env[key] = value;
}
}
return info;
}
function concatTracerConfigs(configs) {
// A tracer config is a map containing additional environment variables and a tracer 'spec' file.
// A tracer 'spec' file has the following format [log_file, number_of_blocks, blocks_text]
// Merge the environments
const env = {};
let copyExecutables = false;
let envSize = 0;
for (let v of Object.values(configs)) {
for (let e of Object.entries(v.env)) {
const name = e[0];
const value = e[1];
// skip SEMMLE_COPY_EXECUTABLES_ROOT as it is handled separately
if (name === 'SEMMLE_COPY_EXECUTABLES_ROOT') {
copyExecutables = true;
}
else if (name in env) {
if (env[name] !== value) {
throw Error('Incompatible values in environment parameter ' +
name + ': ' + env[name] + ' and ' + value);
}
}
else {
env[name] = value;
envSize += 1;
}
}
}
// Concatenate spec files into a new spec file
let languages = Object.keys(configs);
const cppIndex = languages.indexOf('cpp');
// Make sure cpp is the last language, if it's present since it must be concatenated last
if (cppIndex !== -1) {
let lastLang = languages[languages.length - 1];
languages[languages.length - 1] = languages[cppIndex];
languages[cppIndex] = lastLang;
}
let totalLines = [];
let totalCount = 0;
for (let lang of languages) {
const lines = fs.readFileSync(configs[lang].spec, 'utf8').split(/\r?\n/);
const count = parseInt(lines[1], 10);
totalCount += count;
totalLines.push(...lines.slice(2));
}
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
const spec = path.resolve(tempFolder, 'compound-spec');
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
if (copyExecutables) {
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
envSize += 1;
}
fs.writeFileSync(spec, newSpecContent.join('\n'));
// Prepare the content of the compound environment file
let buffer = Buffer.alloc(4);
buffer.writeInt32LE(envSize, 0);
for (let e of Object.entries(env)) {
const key = e[0];
const value = e[1];
const lineBuffer = new Buffer(key + '=' + value + '\0', 'utf8');
const sizeBuffer = Buffer.alloc(4);
sizeBuffer.writeInt32LE(lineBuffer.length, 0);
buffer = Buffer.concat([buffer, sizeBuffer, lineBuffer]);
}
// Write the compound environment
const envPath = spec + '.environment';
fs.writeFileSync(envPath, buffer);
return { env, spec };
}
async function sendSuccessStatusReport(startedAt, config) {
const statusReportBase = await util.createStatusReportBase('init', 'success', startedAt);
const languages = config.languages.join(',');
const workflowLanguages = core.getInput('languages', { required: false });
const paths = (config.originalUserInput.paths || []).join(',');
const pathsIgnore = (config.originalUserInput['paths-ignore'] || []).join(',');
const disableDefaultQueries = config.originalUserInput['disable-default-queries'] ? languages : '';
const queries = (config.originalUserInput.queries || []).map(q => q.uses).join(',');
const statusReport = {
...statusReportBase,
languages: languages,
workflow_languages: workflowLanguages,
paths: paths,
paths_ignore: pathsIgnore,
disable_default_queries: disableDefaultQueries,
queries: queries,
};
await util.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
let config;
let codeql;
try {
if (util.should_abort('init', false) ||
!await util.sendStatusReport(await util.createStatusReportBase('init', 'starting', startedAt), true)) {
return;
}
core.startGroup('Setup CodeQL tools');
codeql = await codeql_1.setupCodeQL();
await codeql.printVersion();
core.endGroup();
core.startGroup('Load language configuration');
config = await configUtils.initConfig();
analysisPaths.includeAndExcludeAnalysisPaths(config);
core.endGroup();
}
catch (e) {
core.setFailed(e.message);
await util.sendStatusReport(await util.createStatusReportBase('init', 'aborted', startedAt, e.message));
return;
}
try {
const sourceRoot = path.resolve();
// Forward Go flags
const goFlags = process.env['GOFLAGS'];
if (goFlags) {
core.exportVariable('GOFLAGS', goFlags);
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
}
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
core.exportVariable('CODEQL_RAM', codeqlRam);
const databaseFolder = path.resolve(util.getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
await io.mkdirP(databaseFolder);
let tracedLanguages = {};
let scannedLanguages = [];
// TODO: replace this code once CodeQL supports multi-language tracing
for (let language of config.languages) {
const languageDatabase = path.join(databaseFolder, language);
// Init language database
await codeql.databaseInit(languageDatabase, language, sourceRoot);
// TODO: add better detection of 'traced languages' instead of using a hard coded list
if (['cpp', 'java', 'csharp'].includes(language)) {
const config = await tracerConfig(codeql, languageDatabase);
tracedLanguages[language] = config;
}
else {
scannedLanguages.push(language);
}
}
const tracedLanguageKeys = Object.keys(tracedLanguages);
if (tracedLanguageKeys.length > 0) {
const mainTracerConfig = concatTracerConfigs(tracedLanguages);
if (mainTracerConfig.spec) {
for (let entry of Object.entries(mainTracerConfig.env)) {
core.exportVariable(entry[0], entry[1]);
}
core.exportVariable('ODASA_TRACER_CONFIGURATION', mainTracerConfig.spec);
if (process.platform === 'darwin') {
core.exportVariable('DYLD_INSERT_LIBRARIES', path.join(codeql.getDir(), 'tools', 'osx64', 'libtrace.dylib'));
}
else if (process.platform === 'win32') {
await exec.exec('powershell', [
path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
path.resolve(codeql.getDir(), 'tools', 'win64', 'tracer.exe'),
], { env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
}
else {
core.exportVariable('LD_PRELOAD', path.join(codeql.getDir(), 'tools', 'linux64', '${LIB}trace.so'));
}
}
}
core.exportVariable(sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES, scannedLanguages.join(','));
core.exportVariable(sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES, tracedLanguageKeys.join(','));
// TODO: make this a "private" environment variable of the action
core.exportVariable(sharedEnv.CODEQL_ACTION_DATABASE_DIR, databaseFolder);
}
catch (error) {
core.setFailed(error.message);
await util.sendStatusReport(await util.createStatusReportBase('init', 'failure', startedAt, error.message, error.stack));
return;
}
await sendSuccessStatusReport(startedAt, config);
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
}
run().catch(e => {
core.setFailed("init action failed: " + e);
console.log(e);
});
//# sourceMappingURL=setup-tracer.js.map

1
lib/setup-tracer.js.map Normal file

File diff suppressed because one or more lines are too long

View File

@@ -1,10 +1,16 @@
"use strict"; "use strict";
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION"; exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
exports.CODEQL_ACTION_ANALYSIS_KEY = 'CODEQL_ACTION_ANALYSIS_KEY';
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
// The time at which the first action (normally init) started executing. // The time at which the first action (normally init) started executing.
// If a workflow invokes a different action without first invoking the init // If a workflow invokes a different action without first invoking the init
// action (i.e. the upload action is being used by a third-party integrator) // action (i.e. the upload action is being used by a third-party integrator)
// then this variable will be assigned the start time of the action invoked // then this variable will be assigned the start time of the action invoked
// rather that the init action. // rather that the init action.
exports.CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT"; exports.CODEQL_WORKFLOW_STARTED_AT = 'CODEQL_WORKFLOW_STARTED_AT';
// Populated when the init action completes successfully
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
//# sourceMappingURL=shared-environment.js.map //# sourceMappingURL=shared-environment.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"} {"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,+BAA+B,GAAG,iCAAiC,CAAC;AACpE,QAAA,8BAA8B,GAAG,gCAAgC,CAAC;AAC/E,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wDAAwD;AAC3C,QAAA,4BAA4B,GAAG,8BAA8B,CAAC"}

22
lib/test-utils.js generated Normal file
View File

@@ -0,0 +1,22 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function silenceDebugOutput(test) {
const typedTest = test;
typedTest.beforeEach(t => {
const processStdoutWrite = process.stdout.write.bind(process.stdout);
t.context.write = processStdoutWrite;
process.stdout.write = (str, encoding, cb) => {
// Core library will directly call process.stdout.write for commands
// We don't want :: commands to be executed by the runner during tests
if (!str.match(/^::/)) {
processStdoutWrite(str, encoding, cb);
}
return true;
};
});
typedTest.afterEach(t => {
process.stdout.write = t.context.write;
});
}
exports.silenceDebugOutput = silenceDebugOutput;
//# sourceMappingURL=test-utils.js.map

1
lib/test-utils.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"test-utils.js","sourceRoot":"","sources":["../src/test-utils.ts"],"names":[],"mappings":";;AAEA,SAAgB,kBAAkB,CAAC,IAAwB;IACzD,MAAM,SAAS,GAAG,IAAmC,CAAC;IAEtD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACrB,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,KAAK,GAAG,kBAAkB,CAAC;QACrC,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,GAAQ,EAAE,QAAc,EAAE,EAA0B,EAAE,EAAE;YAC5E,oEAAoE;YACpE,sEAAsE;YACtE,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACnB,kBAAkB,CAAC,GAAG,EAAE,QAAQ,EAAE,EAAE,CAAC,CAAC;aACzC;YACD,OAAO,IAAI,CAAC;QAChB,CAAC,CAAC;IACN,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE;QACpB,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC;IAC3C,CAAC,CAAC,CAAC;AACL,CAAC;AAnBD,gDAmBC"}

12
lib/testing-utils.js generated
View File

@@ -19,19 +19,19 @@ function wrapOutput(context) {
// write(str: Uint8Array | string, encoding?: string, cb?: (err?: Error) => void): boolean; // write(str: Uint8Array | string, encoding?: string, cb?: (err?: Error) => void): boolean;
return (chunk, encoding, cb) => { return (chunk, encoding, cb) => {
// Work out which method overload we are in // Work out which method overload we are in
if (cb === undefined && typeof encoding === "function") { if (cb === undefined && typeof encoding === 'function') {
cb = encoding; cb = encoding;
encoding = undefined; encoding = undefined;
} }
// Record the output // Record the output
if (typeof chunk === "string") { if (typeof chunk === 'string') {
context.testOutput += chunk; context.testOutput += chunk;
} }
else { else {
context.testOutput += new TextDecoder(encoding || "utf-8").decode(chunk); context.testOutput += new TextDecoder(encoding || 'utf-8').decode(chunk);
} }
// Satisfy contract by calling callback when done // Satisfy contract by calling callback when done
if (cb !== undefined && typeof cb === "function") { if (cb !== undefined && typeof cb === 'function') {
cb(); cb();
} }
return true; return true;
@@ -39,7 +39,7 @@ function wrapOutput(context) {
} }
function setupTests(test) { function setupTests(test) {
const typedTest = test; const typedTest = test;
typedTest.beforeEach((t) => { typedTest.beforeEach(t => {
// Set an empty CodeQL object so that all method calls will fail // Set an empty CodeQL object so that all method calls will fail
// unless the test explicitly sets one up. // unless the test explicitly sets one up.
CodeQL.setCodeQL({}); CodeQL.setCodeQL({});
@@ -57,7 +57,7 @@ function setupTests(test) {
t.context.env = {}; t.context.env = {};
Object.assign(t.context.env, process.env); Object.assign(t.context.env, process.env);
}); });
typedTest.afterEach.always((t) => { typedTest.afterEach.always(t => {
// Restore stdout and stderr // Restore stdout and stderr
// The captured output is only replayed if the test failed // The captured output is only replayed if the test failed
process.stdout.write = t.context.stdoutWrite; process.stdout.write = t.context.stdoutWrite;

View File

@@ -1 +1 @@
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AACA,kDAA0B;AAE1B,iDAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,eAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC"} {"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AACA,kDAA0B;AAE1B,iDAAmC;AAInC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CAAC,KAA0B,EAAE,QAAiB,EAAE,EAA0B,EAAW,EAAE;QAC5F,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACvB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;QAC7B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,eAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC"}

View File

@@ -1,86 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
/**
* Wrapper for toolrunner.Toolrunner which checks for specific return code and/or regex matches in console output.
* Output will be streamed to the live console as well as captured for subsequent processing.
* Returns promise with return code
*
* @param commandLine command to execute
* @param args optional arguments for tool. Escaping is handled by the lib.
* @param matchers defines specific codes and/or regexes that should lead to return of a custom error
* @param options optional exec options. See ExecOptions
* @returns Promise<number> exit code
*/
async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
var _a, _b, _c;
let stdout = "";
let stderr = "";
const listeners = {
stdout: (data) => {
var _a, _b;
stdout += data.toString();
if (((_b = (_a = options) === null || _a === void 0 ? void 0 : _a.listeners) === null || _b === void 0 ? void 0 : _b.stdout) !== undefined) {
options.listeners.stdout(data);
}
else {
// if no stdout listener was originally defined then we match default behavior of Toolrunner
process.stdout.write(data);
}
},
stderr: (data) => {
var _a, _b;
stderr += data.toString();
if (((_b = (_a = options) === null || _a === void 0 ? void 0 : _a.listeners) === null || _b === void 0 ? void 0 : _b.stderr) !== undefined) {
options.listeners.stderr(data);
}
else {
// if no stderr listener was originally defined then we match default behavior of Toolrunner
process.stderr.write(data);
}
},
};
// we capture the original return code or error so that if no match is found we can duplicate the behavior
let returnState;
try {
returnState = await new toolrunnner.ToolRunner(commandLine, args, {
...options,
listeners,
ignoreReturnCode: true,
}).exec();
}
catch (e) {
returnState = e;
}
// if there is a zero return code then we do not apply the matchers
if (returnState === 0)
return returnState;
if (matchers) {
for (const matcher of matchers) {
if (matcher.exitCode === returnState || ((_a = matcher.outputRegex) === null || _a === void 0 ? void 0 : _a.test(stderr)) || ((_b = matcher.outputRegex) === null || _b === void 0 ? void 0 : _b.test(stdout))) {
throw new Error(matcher.message);
}
}
}
if (typeof returnState === "number") {
// only if we were instructed to ignore the return code do we ever return it non-zero
if ((_c = options) === null || _c === void 0 ? void 0 : _c.ignoreReturnCode) {
return returnState;
}
else {
throw new Error(`The process \'${commandLine}\' failed with exit code ${returnState}`);
}
}
else {
throw returnState;
}
}
exports.toolrunnerErrorCatcher = toolrunnerErrorCatcher;
//# sourceMappingURL=toolrunner-error-catcher.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;AACA,0EAA4D;AAI5D;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,aAAA,OAAO,0CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,aAAA,OAAO,0CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,WAAW,EAAE,IAAI,EAAE;YAChE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI;SACvB,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,CAAC;KACjB;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW,WAChC,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,EAAC,WACjC,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,EAAC,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,UAAI,OAAO,0CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,iBAAiB,WAAW,4BAA4B,WAAW,EAAE,CACtE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AArED,wDAqEC"}

View File

@@ -1,145 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const exec = __importStar(require("@actions/exec"));
const ava_1 = __importDefault(require("ava"));
const testing_utils_1 = require("./testing-utils");
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
testing_utils_1.setupTests(ava_1.default);
ava_1.default("matchers are never applied if non-error exit", async (t) => {
const testArgs = buildDummyArgs("foo bar\\nblort qux", "foo bar\\nblort qux", "", 0);
const matchers = [
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "error!!!" },
];
t.deepEqual(await exec.exec("node", testArgs), 0);
t.deepEqual(await toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), 0);
});
ava_1.default("regex matchers are applied to stdout for non-zero exit code", async (t) => {
const testArgs = buildDummyArgs("foo bar\\nblort qux", "", "", 1);
const matchers = [
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
];
await t.throwsAsync(exec.exec("node", testArgs), {
instanceOf: Error,
message: "The process 'node' failed with exit code 1",
});
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
instanceOf: Error,
message: "🦄",
});
});
ava_1.default("regex matchers are applied to stderr for non-zero exit code", async (t) => {
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
const matchers = [
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
];
await t.throwsAsync(exec.exec("node", testArgs), {
instanceOf: Error,
message: "The process 'node' failed with exit code 1",
});
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
instanceOf: Error,
message: "🦄",
});
});
ava_1.default("matcher returns correct error message when multiple matchers defined", async (t) => {
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
const matchers = [
{ exitCode: 456, outputRegex: new RegExp("lorem ipsum"), message: "😩" },
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
{ exitCode: 789, outputRegex: new RegExp("blah blah"), message: "🤦‍♂️" },
];
await t.throwsAsync(exec.exec("node", testArgs), {
instanceOf: Error,
message: "The process 'node' failed with exit code 1",
});
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
instanceOf: Error,
message: "🦄",
});
});
ava_1.default("matcher returns first match to regex when multiple matches", async (t) => {
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
const matchers = [
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
{ exitCode: 789, outputRegex: new RegExp("blah blah"), message: "🤦‍♂️" },
{ exitCode: 987, outputRegex: new RegExp("foo bar"), message: "🚫" },
];
await t.throwsAsync(exec.exec("node", testArgs), {
instanceOf: Error,
message: "The process 'node' failed with exit code 1",
});
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
instanceOf: Error,
message: "🦄",
});
});
ava_1.default("exit code matchers are applied", async (t) => {
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 123);
const matchers = [
{
exitCode: 123,
outputRegex: new RegExp("this will not match"),
message: "🦄",
},
];
await t.throwsAsync(exec.exec("node", testArgs), {
instanceOf: Error,
message: "The process 'node' failed with exit code 123",
});
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
instanceOf: Error,
message: "🦄",
});
});
ava_1.default("execErrorCatcher respects the ignoreReturnValue option", async (t) => {
const testArgs = buildDummyArgs("standard output", "error output", "", 199);
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, [], { ignoreReturnCode: false }), { instanceOf: Error });
t.deepEqual(await toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, [], {
ignoreReturnCode: true,
}), 199);
});
ava_1.default("execErrorCatcher preserves behavior of provided listeners", async (t) => {
const stdoutExpected = "standard output";
const stderrExpected = "error output";
let stdoutActual = "";
let stderrActual = "";
const listeners = {
stdout: (data) => {
stdoutActual += data.toString();
},
stderr: (data) => {
stderrActual += data.toString();
},
};
const testArgs = buildDummyArgs(stdoutExpected, stderrExpected, "", 0);
t.deepEqual(await toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, [], {
listeners,
}), 0);
t.deepEqual(stdoutActual, `${stdoutExpected}\n`);
t.deepEqual(stderrActual, `${stderrExpected}\n`);
});
function buildDummyArgs(stdoutContents, stderrContents, desiredErrorMessage, desiredExitCode) {
let command = "";
if (stdoutContents)
command += `console.log("${stdoutContents}");`;
if (stderrContents)
command += `console.error("${stderrContents}");`;
if (command.length === 0)
throw new Error("Must provide contents for either stdout or stderr");
if (desiredErrorMessage)
command += `throw new Error("${desiredErrorMessage}");`;
if (desiredExitCode)
command += `process.exitCode = ${desiredExitCode};`;
return ["-e", command];
}
//# sourceMappingURL=toolrunner-error-catcher.test.js.map

File diff suppressed because one or more lines are too long

151
lib/tracer-config.js generated
View File

@@ -1,151 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const languages_1 = require("./languages");
const util = __importStar(require("./util"));
const CRITICAL_TRACER_VARS = new Set([
"SEMMLE_PRELOAD_libtrace",
,
"SEMMLE_RUNNER",
,
"SEMMLE_COPY_EXECUTABLES_ROOT",
,
"SEMMLE_DEPTRACE_SOCKET",
,
"SEMMLE_JAVA_TOOL_OPTIONS",
]);
async function getTracerConfigForLanguage(codeql, config, language) {
const env = await codeql.getTracerEnv(util.getCodeQLDatabasePath(config.tempDir, language));
const spec = env["ODASA_TRACER_CONFIGURATION"];
const info = { spec, env: {} };
// Extract critical tracer variables from the environment
for (const entry of Object.entries(env)) {
const key = entry[0];
const value = entry[1];
// skip ODASA_TRACER_CONFIGURATION as it is handled separately
if (key === "ODASA_TRACER_CONFIGURATION") {
continue;
}
// skip undefined values
if (typeof value === "undefined") {
continue;
}
// Keep variables that do not exist in current environment. In addition always keep
// critical and CODEQL_ variables
if (typeof process.env[key] === "undefined" ||
CRITICAL_TRACER_VARS.has(key) ||
key.startsWith("CODEQL_")) {
info.env[key] = value;
}
}
return info;
}
exports.getTracerConfigForLanguage = getTracerConfigForLanguage;
function concatTracerConfigs(tracerConfigs, config) {
// A tracer config is a map containing additional environment variables and a tracer 'spec' file.
// A tracer 'spec' file has the following format [log_file, number_of_blocks, blocks_text]
// Merge the environments
const env = {};
let copyExecutables = false;
let envSize = 0;
for (const v of Object.values(tracerConfigs)) {
for (const e of Object.entries(v.env)) {
const name = e[0];
const value = e[1];
// skip SEMMLE_COPY_EXECUTABLES_ROOT as it is handled separately
if (name === "SEMMLE_COPY_EXECUTABLES_ROOT") {
copyExecutables = true;
}
else if (name in env) {
if (env[name] !== value) {
throw Error(`Incompatible values in environment parameter ${name}: ${env[name]} and ${value}`);
}
}
else {
env[name] = value;
envSize += 1;
}
}
}
// Concatenate spec files into a new spec file
const languages = Object.keys(tracerConfigs);
const cppIndex = languages.indexOf("cpp");
// Make sure cpp is the last language, if it's present since it must be concatenated last
if (cppIndex !== -1) {
const lastLang = languages[languages.length - 1];
languages[languages.length - 1] = languages[cppIndex];
languages[cppIndex] = lastLang;
}
const totalLines = [];
let totalCount = 0;
for (const lang of languages) {
const lines = fs
.readFileSync(tracerConfigs[lang].spec, "utf8")
.split(/\r?\n/);
const count = parseInt(lines[1], 10);
totalCount += count;
totalLines.push(...lines.slice(2));
}
const newLogFilePath = path.resolve(config.tempDir, "compound-build-tracer.log");
const spec = path.resolve(config.tempDir, "compound-spec");
const compoundTempFolder = path.resolve(config.tempDir, "compound-temp");
const newSpecContent = [
newLogFilePath,
totalCount.toString(10),
...totalLines,
];
if (copyExecutables) {
env["SEMMLE_COPY_EXECUTABLES_ROOT"] = compoundTempFolder;
envSize += 1;
}
fs.writeFileSync(spec, newSpecContent.join("\n"));
// Prepare the content of the compound environment file
let buffer = Buffer.alloc(4);
buffer.writeInt32LE(envSize, 0);
for (const e of Object.entries(env)) {
const key = e[0];
const value = e[1];
const lineBuffer = new Buffer(`${key}=${value}\0`, "utf8");
const sizeBuffer = Buffer.alloc(4);
sizeBuffer.writeInt32LE(lineBuffer.length, 0);
buffer = Buffer.concat([buffer, sizeBuffer, lineBuffer]);
}
// Write the compound environment
const envPath = `${spec}.environment`;
fs.writeFileSync(envPath, buffer);
return { env, spec };
}
exports.concatTracerConfigs = concatTracerConfigs;
async function getCombinedTracerConfig(config, codeql) {
// Abort if there are no traced languages as there's nothing to do
const tracedLanguages = config.languages.filter(languages_1.isTracedLanguage);
if (tracedLanguages.length === 0) {
return undefined;
}
// Get all the tracer configs and combine them together
const tracedLanguageConfigs = {};
for (const language of tracedLanguages) {
tracedLanguageConfigs[language] = await getTracerConfigForLanguage(codeql, config, language);
}
const mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
// Add a couple more variables
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
const codeQLDir = path.dirname(codeql.getPath());
if (process.platform === "darwin") {
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
}
else if (process.platform !== "win32") {
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
}
return mainTracerConfig;
}
exports.getCombinedTracerConfig = getCombinedTracerConfig;
//# sourceMappingURL=tracer-config.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"tracer-config.js","sourceRoot":"","sources":["../src/tracer-config.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAI7B,2CAAyD;AACzD,6CAA+B;AAO/B,MAAM,oBAAoB,GAAG,IAAI,GAAG,CAAC;IACnC,yBAAyB;IACzB,AAD0B;IAE1B,eAAe;IACf,AADgB;IAEhB,8BAA8B;IAC9B,AAD+B;IAE/B,wBAAwB;IACxB,AADyB;IAEzB,0BAA0B;CAC3B,CAAC,CAAC;AAEI,KAAK,UAAU,0BAA0B,CAC9C,MAAc,EACd,MAA0B,EAC1B,QAAkB;IAElB,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,YAAY,CACnC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CACrD,CAAC;IAEF,MAAM,IAAI,GAAG,GAAG,CAAC,4BAA4B,CAAC,CAAC;IAC/C,MAAM,IAAI,GAAiB,EAAE,IAAI,EAAE,GAAG,EAAE,EAAE,EAAE,CAAC;IAE7C,yDAAyD;IACzD,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACvC,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACrB,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACvB,8DAA8D;QAC9D,IAAI,GAAG,KAAK,4BAA4B,EAAE;YACxC,SAAS;SACV;QACD,wBAAwB;QACxB,IAAI,OAAO,KAAK,KAAK,WAAW,EAAE;YAChC,SAAS;SACV;QACD,mFAAmF;QACnF,iCAAiC;QACjC,IACE,OAAO,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,WAAW;YACvC,oBAAoB,CAAC,GAAG,CAAC,GAAG,CAAC;YAC7B,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EACzB;YACA,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;SACvB;KACF;IACD,OAAO,IAAI,CAAC;AACd,CAAC;AAnCD,gEAmCC;AAED,SAAgB,mBAAmB,CACjC,aAA+C,EAC/C,MAA0B;IAE1B,iGAAiG;IACjG,0FAA0F;IAE1F,yBAAyB;IACzB,MAAM,GAAG,GAA8B,EAAE,CAAC;IAC1C,IAAI,eAAe,GAAG,KAAK,CAAC;IAC5B,IAAI,OAAO,GAAG,CAAC,CAAC;IAChB,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE;QAC5C,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;YACrC,MAAM,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YAClB,MAAM,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YACnB,gEAAgE;YAChE,IAAI,IAAI,KAAK,8BAA8B,EAAE;gBAC3C,eAAe,GAAG,IAAI,CAAC;aACxB;iBAAM,IAAI,IAAI,IAAI,GAAG,EAAE;gBACtB,IAAI,GAAG,CAAC,IAAI,CAAC,KAAK,KAAK,EAAE;oBACvB,MAAM,KAAK,CACT,gDAAgD,IAAI,KAAK,GAAG,CAAC,IAAI,CAAC,QAAQ,KAAK,EAAE,CAClF,CAAC;iBACH;aACF;iBAAM;gBACL,GAAG,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC;gBAClB,OAAO,IAAI,CAAC,CAAC;aACd;SACF;KACF;IAED,8CAA8C;IAC9C,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;IAC7C,MAAM,QAAQ,GAAG,SAAS,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IAC1C,yFAAyF;IACzF,IAAI,QAAQ,KAAK,CAAC,CAAC,EAAE;QACnB,MAAM,QAAQ,GAAG,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QACjD,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC,QAAQ,CAAC,CAAC;QACtD,SAAS,CAAC,QAAQ,CAAC,GAAG,QAAQ,CAAC;KAChC;IAED,MAAM,UAAU,GAAa,EAAE,CAAC;IAChC,IAAI,UAAU,GAAG,CAAC,CAAC;IACnB,KAAK,MAAM,IAAI,IAAI,SAAS,EAAE;QAC5B,MAAM,KAAK,GAAG,EAAE;aACb,YAAY,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,MAAM,CAAC;aAC9C,KAAK,CAAC,OAAO,CAAC,CAAC;QAClB,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QACrC,UAAU,IAAI,KAAK,CAAC;QACpB,UAAU,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;KACpC;IAED,MAAM,cAAc,GAAG,IAAI,CAAC,OAAO,CACjC,MAAM,CAAC,OAAO,EACd,2BAA2B,CAC5B,CAAC;IACF,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;IAC3D,MAAM,kBAAkB,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;IACzE,MAAM,cAAc,GAAG;QACrB,cAAc;QACd,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QACvB,GAAG,UAAU;KACd,CAAC;IAEF,IAAI,eAAe,EAAE;QACnB,GAAG,CAAC,8BAA8B,CAAC,GAAG,kBAAkB,CAAC;QACzD,OAAO,IAAI,CAAC,CAAC;KACd;IAED,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;IAElD,uDAAuD;IACvD,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAC7B,MAAM,CAAC,YAAY,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;IAChC,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACnC,MAAM,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACjB,MAAM,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACnB,MAAM,UAAU,GAAG,IAAI,MAAM,CAAC,GAAG,GAAG,IAAI,KAAK,IAAI,EAAE,MAAM,CAAC,CAAC;QAC3D,MAAM,UAAU,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACnC,UAAU,CAAC,YAAY,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC9C,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC,CAAC;KAC1D;IACD,iCAAiC;IACjC,MAAM,OAAO,GAAG,GAAG,IAAI,cAAc,CAAC;IACtC,EAAE,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IAElC,OAAO,EAAE,GAAG,EAAE,IAAI,EAAE,CAAC;AACvB,CAAC;AAvFD,kDAuFC;AAEM,KAAK,UAAU,uBAAuB,CAC3C,MAA0B,EAC1B,MAAc;IAEd,kEAAkE;IAClE,MAAM,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IAClE,IAAI,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE;QAChC,OAAO,SAAS,CAAC;KAClB;IAED,uDAAuD;IACvD,MAAM,qBAAqB,GAAqC,EAAE,CAAC;IACnE,KAAK,MAAM,QAAQ,IAAI,eAAe,EAAE;QACtC,qBAAqB,CAAC,QAAQ,CAAC,GAAG,MAAM,0BAA0B,CAChE,MAAM,EACN,MAAM,EACN,QAAQ,CACT,CAAC;KACH;IACD,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,qBAAqB,EAAE,MAAM,CAAC,CAAC;IAE5E,8BAA8B;IAC9B,gBAAgB,CAAC,GAAG,CAAC,4BAA4B,CAAC,GAAG,gBAAgB,CAAC,IAAI,CAAC;IAC3E,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC;IACjD,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACjC,gBAAgB,CAAC,GAAG,CAAC,uBAAuB,CAAC,GAAG,IAAI,CAAC,IAAI,CACvD,SAAS,EACT,OAAO,EACP,OAAO,EACP,gBAAgB,CACjB,CAAC;KACH;SAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QACvC,gBAAgB,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI,CAC5C,SAAS,EACT,OAAO,EACP,SAAS,EACT,gBAAgB,CACjB,CAAC;KACH;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAzCD,0DAyCC"}

View File

@@ -1,278 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const codeql_1 = require("./codeql");
const languages_1 = require("./languages");
const testing_utils_1 = require("./testing-utils");
const tracer_config_1 = require("./tracer-config");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
function getTestConfig(tmpDir) {
return {
languages: [languages_1.Language.java],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
};
}
// A very minimal setup
ava_1.default("getTracerConfigForLanguage - minimal setup", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const codeQL = codeql_1.setCodeQL({
async getTracerEnv() {
return {
ODASA_TRACER_CONFIGURATION: "abc",
foo: "bar",
};
},
});
const result = await tracer_config_1.getTracerConfigForLanguage(codeQL, config, languages_1.Language.javascript);
t.deepEqual(result, { spec: "abc", env: { foo: "bar" } });
});
});
// Existing vars should not be overwritten, unless they are critical or prefixed with CODEQL_
ava_1.default("getTracerConfigForLanguage - existing / critical vars", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
// Set up some variables in the environment
process.env["foo"] = "abc";
process.env["SEMMLE_PRELOAD_libtrace"] = "abc";
process.env["SEMMLE_RUNNER"] = "abc";
process.env["SEMMLE_COPY_EXECUTABLES_ROOT"] = "abc";
process.env["SEMMLE_DEPTRACE_SOCKET"] = "abc";
process.env["SEMMLE_JAVA_TOOL_OPTIONS"] = "abc";
process.env["SEMMLE_DEPTRACE_SOCKET"] = "abc";
process.env["CODEQL_VAR"] = "abc";
// Now CodeQL returns all these variables, and one more, with different values
const codeQL = codeql_1.setCodeQL({
async getTracerEnv() {
return {
ODASA_TRACER_CONFIGURATION: "abc",
foo: "bar",
baz: "qux",
SEMMLE_PRELOAD_libtrace: "SEMMLE_PRELOAD_libtrace",
SEMMLE_RUNNER: "SEMMLE_RUNNER",
SEMMLE_COPY_EXECUTABLES_ROOT: "SEMMLE_COPY_EXECUTABLES_ROOT",
SEMMLE_DEPTRACE_SOCKET: "SEMMLE_DEPTRACE_SOCKET",
SEMMLE_JAVA_TOOL_OPTIONS: "SEMMLE_JAVA_TOOL_OPTIONS",
CODEQL_VAR: "CODEQL_VAR",
};
},
});
const result = await tracer_config_1.getTracerConfigForLanguage(codeQL, config, languages_1.Language.javascript);
t.deepEqual(result, {
spec: "abc",
env: {
// Should contain all variables except 'foo', because that already existed in the
// environment with a different value, and is not deemed a "critical" variable.
baz: "qux",
SEMMLE_PRELOAD_libtrace: "SEMMLE_PRELOAD_libtrace",
SEMMLE_RUNNER: "SEMMLE_RUNNER",
SEMMLE_COPY_EXECUTABLES_ROOT: "SEMMLE_COPY_EXECUTABLES_ROOT",
SEMMLE_DEPTRACE_SOCKET: "SEMMLE_DEPTRACE_SOCKET",
SEMMLE_JAVA_TOOL_OPTIONS: "SEMMLE_JAVA_TOOL_OPTIONS",
CODEQL_VAR: "CODEQL_VAR",
},
});
});
});
ava_1.default("concatTracerConfigs - minimal configs correctly combined", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec1 = path.join(tmpDir, "spec1");
fs.writeFileSync(spec1, "foo.log\n2\nabc\ndef");
const tc1 = {
spec: spec1,
env: {
a: "a",
b: "b",
},
};
const spec2 = path.join(tmpDir, "spec2");
fs.writeFileSync(spec2, "foo.log\n1\nghi");
const tc2 = {
spec: spec2,
env: {
c: "c",
},
};
const result = tracer_config_1.concatTracerConfigs({ javascript: tc1, python: tc2 }, config);
t.deepEqual(result, {
spec: path.join(tmpDir, "compound-spec"),
env: {
a: "a",
b: "b",
c: "c",
},
});
t.true(fs.existsSync(result.spec));
t.deepEqual(fs.readFileSync(result.spec, "utf8"), `${path.join(tmpDir, "compound-build-tracer.log")}\n3\nabc\ndef\nghi`);
});
});
ava_1.default("concatTracerConfigs - conflicting env vars", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec = path.join(tmpDir, "spec");
fs.writeFileSync(spec, "foo.log\n0");
// Ok if env vars have the same name and the same value
t.deepEqual(tracer_config_1.concatTracerConfigs({
javascript: { spec, env: { a: "a", b: "b" } },
python: { spec, env: { b: "b", c: "c" } },
}, config).env, {
a: "a",
b: "b",
c: "c",
});
// Throws if env vars have same name but different values
const e = t.throws(() => tracer_config_1.concatTracerConfigs({
javascript: { spec, env: { a: "a", b: "b" } },
python: { spec, env: { b: "c" } },
}, config));
t.deepEqual(e.message, "Incompatible values in environment parameter b: b and c");
});
});
ava_1.default("concatTracerConfigs - cpp spec lines come last if present", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec1 = path.join(tmpDir, "spec1");
fs.writeFileSync(spec1, "foo.log\n2\nabc\ndef");
const tc1 = {
spec: spec1,
env: {
a: "a",
b: "b",
},
};
const spec2 = path.join(tmpDir, "spec2");
fs.writeFileSync(spec2, "foo.log\n1\nghi");
const tc2 = {
spec: spec2,
env: {
c: "c",
},
};
const result = tracer_config_1.concatTracerConfigs({ cpp: tc1, python: tc2 }, config);
t.deepEqual(result, {
spec: path.join(tmpDir, "compound-spec"),
env: {
a: "a",
b: "b",
c: "c",
},
});
t.true(fs.existsSync(result.spec));
t.deepEqual(fs.readFileSync(result.spec, "utf8"), `${path.join(tmpDir, "compound-build-tracer.log")}\n3\nghi\nabc\ndef`);
});
});
ava_1.default("concatTracerConfigs - SEMMLE_COPY_EXECUTABLES_ROOT is updated to point to compound spec", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec = path.join(tmpDir, "spec");
fs.writeFileSync(spec, "foo.log\n0");
const result = tracer_config_1.concatTracerConfigs({
javascript: { spec, env: { a: "a", b: "b" } },
python: { spec, env: { SEMMLE_COPY_EXECUTABLES_ROOT: "foo" } },
}, config);
t.deepEqual(result.env, {
a: "a",
b: "b",
SEMMLE_COPY_EXECUTABLES_ROOT: path.join(tmpDir, "compound-temp"),
});
});
});
ava_1.default("concatTracerConfigs - compound environment file is created correctly", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec1 = path.join(tmpDir, "spec1");
fs.writeFileSync(spec1, "foo.log\n2\nabc\ndef");
const tc1 = {
spec: spec1,
env: {
a: "a",
},
};
const spec2 = path.join(tmpDir, "spec2");
fs.writeFileSync(spec2, "foo.log\n1\nghi");
const tc2 = {
spec: spec2,
env: {
foo: "bar_baz",
},
};
const result = tracer_config_1.concatTracerConfigs({ javascript: tc1, python: tc2 }, config);
const envPath = `${result.spec}.environment`;
t.true(fs.existsSync(envPath));
const buffer = fs.readFileSync(envPath);
// Contents is binary data
t.deepEqual(buffer.length, 28);
t.deepEqual(buffer.readInt32LE(0), 2); // number of env vars
t.deepEqual(buffer.readInt32LE(4), 4); // length of env var definition
t.deepEqual(buffer.toString("utf8", 8, 12), "a=a\0"); // [key]=[value]\0
t.deepEqual(buffer.readInt32LE(12), 12); // length of env var definition
t.deepEqual(buffer.toString("utf8", 16, 28), "foo=bar_baz\0"); // [key]=[value]\0
});
});
ava_1.default("getCombinedTracerConfig - return undefined when no languages are traced languages", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
// No traced languages
config.languages = [languages_1.Language.javascript, languages_1.Language.python];
const codeQL = codeql_1.setCodeQL({
async getTracerEnv() {
return {
ODASA_TRACER_CONFIGURATION: "abc",
foo: "bar",
};
},
});
t.deepEqual(await tracer_config_1.getCombinedTracerConfig(config, codeQL), undefined);
});
});
ava_1.default("getCombinedTracerConfig - valid spec file", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec = path.join(tmpDir, "spec");
fs.writeFileSync(spec, "foo.log\n2\nabc\ndef");
const codeQL = codeql_1.setCodeQL({
async getTracerEnv() {
return {
ODASA_TRACER_CONFIGURATION: spec,
foo: "bar",
};
},
});
const result = await tracer_config_1.getCombinedTracerConfig(config, codeQL);
const expectedEnv = {
foo: "bar",
ODASA_TRACER_CONFIGURATION: result.spec,
};
if (process.platform === "darwin") {
expectedEnv["DYLD_INSERT_LIBRARIES"] = path.join(path.dirname(codeQL.getPath()), "tools", "osx64", "libtrace.dylib");
}
else if (process.platform !== "win32") {
expectedEnv["LD_PRELOAD"] = path.join(path.dirname(codeQL.getPath()), "tools", "linux64", "${LIB}trace.so");
}
t.deepEqual(result, {
spec: path.join(tmpDir, "compound-spec"),
env: expectedEnv,
});
});
});
//# sourceMappingURL=tracer-config.test.js.map

File diff suppressed because one or more lines are too long

21
lib/tracer-env.js generated Normal file
View File

@@ -0,0 +1,21 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const env = {};
for (let entry of Object.entries(process.env)) {
const key = entry[0];
const value = entry[1];
if (typeof value !== 'undefined' && key !== '_' && !key.startsWith('JAVA_MAIN_CLASS_')) {
env[key] = value;
}
}
process.stdout.write(process.argv[2]);
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');
//# sourceMappingURL=tracer-env.js.map

1
lib/tracer-env.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"tracer-env.js","sourceRoot":"","sources":["../src/tracer-env.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AAEzB,MAAM,GAAG,GAAG,EAAE,CAAC;AACf,KAAK,IAAI,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;IAC7C,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACrB,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACvB,IAAI,OAAO,KAAK,KAAK,WAAW,IAAI,GAAG,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;QACtF,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KAClB;CACF;AACD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,CAAC"}

166
lib/upload-lib.js generated
View File

@@ -23,18 +23,18 @@ const util = __importStar(require("./util"));
// Takes a list of paths to sarif files and combines them together, // Takes a list of paths to sarif files and combines them together,
// returning the contents of the combined sarif file. // returning the contents of the combined sarif file.
function combineSarifFiles(sarifFiles) { function combineSarifFiles(sarifFiles) {
const combinedSarif = { let combinedSarif = {
version: null, version: null,
runs: [], runs: []
}; };
for (const sarifFile of sarifFiles) { for (let sarifFile of sarifFiles) {
const sarifObject = JSON.parse(fs.readFileSync(sarifFile, "utf8")); let sarifObject = JSON.parse(fs.readFileSync(sarifFile, 'utf8'));
// Check SARIF version // Check SARIF version
if (combinedSarif.version === null) { if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version; combinedSarif.version = sarifObject.version;
} }
else if (combinedSarif.version !== sarifObject.version) { else if (combinedSarif.version !== sarifObject.version) {
throw `Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`; throw "Different SARIF versions encountered: " + combinedSarif.version + " and " + sarifObject.version;
} }
combinedSarif.runs.push(...sarifObject.runs); combinedSarif.runs.push(...sarifObject.runs);
} }
@@ -43,79 +43,73 @@ function combineSarifFiles(sarifFiles) {
exports.combineSarifFiles = combineSarifFiles; exports.combineSarifFiles = combineSarifFiles;
// Upload the given payload. // Upload the given payload.
// If the request fails then this will retry a small number of times. // If the request fails then this will retry a small number of times.
async function uploadPayload(payload, repositoryNwo, githubAuth, githubUrl, mode, logger) { async function uploadPayload(payload) {
logger.info("Uploading results"); core.info('Uploading results');
// If in test mode we don't want to upload the results // If in test mode we don't want to upload the results
const testMode = process.env["TEST_MODE"] === "true" || false; const testMode = process.env['TEST_MODE'] === 'true' || false;
if (testMode) { if (testMode) {
return; return;
} }
const [owner, repo] = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
// Make up to 4 attempts to upload, and sleep for these // Make up to 4 attempts to upload, and sleep for these
// number of seconds between each attempt. // number of seconds between each attempt.
// We don't want to backoff too much to avoid wasting action // We don't want to backoff too much to avoid wasting action
// minutes, but just waiting a little bit could maybe help. // minutes, but just waiting a little bit could maybe help.
const backoffPeriods = [1, 5, 15]; const backoffPeriods = [1, 5, 15];
const client = api.getApiClient(githubAuth, githubUrl);
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) { for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
const reqURL = mode === "actions" const response = await api.getApiClient().request("PUT /repos/:owner/:repo/code-scanning/analysis", ({
? "PUT /repos/:owner/:repo/code-scanning/analysis" owner: owner,
: "POST /repos/:owner/:repo/code-scanning/sarifs"; repo: repo,
const response = await client.request(reqURL, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
data: payload, data: payload,
}); }));
logger.debug(`response status: ${response.status}`); core.debug('response status: ' + response.status);
const statusCode = response.status; const statusCode = response.status;
if (statusCode === 202) { if (statusCode === 202) {
logger.info("Successfully uploaded results"); core.info("Successfully uploaded results");
return; return;
} }
const requestID = response.headers["x-github-request-id"]; const requestID = response.headers["x-github-request-id"];
// On any other status code that's not 5xx mark the upload as failed // On any other status code that's not 5xx mark the upload as failed
if (!statusCode || statusCode < 500 || statusCode >= 600) { if (!statusCode || statusCode < 500 || statusCode >= 600) {
throw new Error(`Upload failed (${requestID}): (${statusCode}) ${JSON.stringify(response.data)}`); throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
} }
// On a 5xx status code we may retry the request // On a 5xx status code we may retry the request
if (attempt < backoffPeriods.length) { if (attempt < backoffPeriods.length) {
// Log the failure as a warning but don't mark the action as failed yet // Log the failure as a warning but don't mark the action as failed yet
logger.warning(`Upload attempt (${attempt + 1} of ${backoffPeriods.length + 1}) failed (${requestID}). Retrying in ${backoffPeriods[attempt]} seconds: (${statusCode}) ${JSON.stringify(response.data)}`); core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
' seconds: (' + statusCode + ') ' + JSON.stringify(response.data));
// Sleep for the backoff period // Sleep for the backoff period
await new Promise((r) => setTimeout(r, backoffPeriods[attempt] * 1000)); await new Promise(r => setTimeout(r, backoffPeriods[attempt] * 1000));
continue; continue;
} }
else { else {
// If the upload fails with 5xx then we assume it is a temporary problem // If the upload fails with 5xx then we assume it is a temporary problem
// and not an error that the user has caused or can fix. // and not an error that the user has caused or can fix.
// We avoid marking the job as failed to avoid breaking CI workflows. // We avoid marking the job as failed to avoid breaking CI workflows.
throw new Error(`Upload failed (${requestID}): (${statusCode}) ${JSON.stringify(response.data)}`); throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
} }
} }
// This case shouldn't ever happen as the final iteration of the loop // This case shouldn't ever happen as the final iteration of the loop
// will always throw an error instead of exiting to here. // will always throw an error instead of exiting to here.
throw new Error("Upload failed"); throw new Error('Upload failed');
} }
// Uploads a single sarif file or a directory of sarif files // Uploads a single sarif file or a directory of sarif files
// depending on what the path happens to refer to. // depending on what the path happens to refer to.
// Returns true iff the upload occurred and succeeded // Returns true iff the upload occurred and succeeded
async function upload(sarifPath, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger) { async function upload(input) {
const sarifFiles = []; if (fs.lstatSync(input).isDirectory()) {
if (!fs.existsSync(sarifPath)) { const sarifFiles = fs.readdirSync(input)
throw new Error(`Path does not exist: ${sarifPath}`); .filter(f => f.endsWith(".sarif"))
} .map(f => path.resolve(input, f));
if (fs.lstatSync(sarifPath).isDirectory()) {
fs.readdirSync(sarifPath)
.filter((f) => f.endsWith(".sarif"))
.map((f) => path.resolve(sarifPath, f))
.forEach((f) => sarifFiles.push(f));
if (sarifFiles.length === 0) { if (sarifFiles.length === 0) {
throw new Error(`No SARIF files found to upload in "${sarifPath}".`); throw new Error("No SARIF files found to upload in \"" + input + "\".");
} }
return await uploadFiles(sarifFiles);
} }
else { else {
sarifFiles.push(sarifPath); return await uploadFiles([input]);
} }
return await uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger);
} }
exports.upload = upload; exports.upload = upload;
// Counts the number of results in the given SARIF file // Counts the number of results in the given SARIF file
@@ -129,78 +123,80 @@ function countResultsInSarif(sarif) {
exports.countResultsInSarif = countResultsInSarif; exports.countResultsInSarif = countResultsInSarif;
// Validates that the given file path refers to a valid SARIF file. // Validates that the given file path refers to a valid SARIF file.
// Throws an error if the file is invalid. // Throws an error if the file is invalid.
function validateSarifFileSchema(sarifFilePath, logger) { function validateSarifFileSchema(sarifFilePath) {
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, "utf8")); const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
const schema = require("../src/sarif_v2.1.0_schema.json"); const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
const result = new jsonschema.Validator().validate(sarif, schema); const result = new jsonschema.Validator().validate(sarif, schema);
if (!result.valid) { if (!result.valid) {
// Output the more verbose error messages in groups as these may be very large. // Output the more verbose error messages in groups as these may be very large.
for (const error of result.errors) { for (const error of result.errors) {
logger.startGroup(`Error details: ${error.stack}`); core.startGroup("Error details: " + error.stack);
logger.info(JSON.stringify(error, null, 2)); core.info(JSON.stringify(error, null, 2));
logger.endGroup(); core.endGroup();
} }
// Set the main error message to the stacks of all the errors. // Set the main error message to the stacks of all the errors.
// This should be of a manageable size and may even give enough to fix the error. // This should be of a manageable size and may even give enough to fix the error.
const sarifErrors = result.errors.map((e) => `- ${e.stack}`); const sarifErrors = result.errors.map(e => "- " + e.stack);
throw new Error(`Unable to upload "${sarifFilePath}" as it is not valid SARIF:\n${sarifErrors.join("\n")}`); throw new Error("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + sarifErrors.join("\n"));
} }
} }
exports.validateSarifFileSchema = validateSarifFileSchema; exports.validateSarifFileSchema = validateSarifFileSchema;
// Uploads the given set of sarif files. // Uploads the given set of sarif files.
// Returns true iff the upload occurred and succeeded // Returns true iff the upload occurred and succeeded
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger) { async function uploadFiles(sarifFiles) {
logger.info(`Uploading sarif files: ${JSON.stringify(sarifFiles)}`); core.startGroup("Uploading results");
if (mode === "actions") { core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
// This check only works on actions as env vars don't persist between calls to the runner const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF"; if (process.env[sentinelEnvVar]) {
if (process.env[sentinelEnvVar]) { throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
} }
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
// Validate that the files we were asked to upload are all valid SARIF files // Validate that the files we were asked to upload are all valid SARIF files
for (const file of sarifFiles) { for (const file of sarifFiles) {
validateSarifFileSchema(file, logger); validateSarifFileSchema(file);
} }
const commitOid = await util.getCommitOid();
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
const ref = util.getRef();
const analysisKey = await util.getAnalysisKey();
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
const startedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
let sarifPayload = combineSarifFiles(sarifFiles); let sarifPayload = combineSarifFiles(sarifFiles);
sarifPayload = fingerprints.addFingerprints(sarifPayload, checkoutPath, logger); sarifPayload = fingerprints.addFingerprints(sarifPayload);
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString("base64"); const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
const checkoutURI = file_url_1.default(checkoutPath); let checkoutPath = core.getInput('checkout_path');
let checkoutURI = file_url_1.default(checkoutPath);
const workflowRunID = parseInt(workflowRunIDStr, 10);
if (Number.isNaN(workflowRunID)) {
throw new Error('GITHUB_RUN_ID must define a non NaN workflow run ID');
}
let matrix = core.getInput('matrix');
if (matrix === "null" || matrix === "") {
matrix = undefined;
}
const toolNames = util.getToolNames(sarifPayload); const toolNames = util.getToolNames(sarifPayload);
let payload; const payload = JSON.stringify({
if (mode === "actions") { "commit_oid": commitOid,
payload = JSON.stringify({ "ref": ref,
commit_oid: commitOid, "analysis_key": analysisKey,
ref, "analysis_name": analysisName,
analysis_key: analysisKey, "sarif": zipped_sarif,
analysis_name: analysisName, "workflow_run_id": workflowRunID,
sarif: zipped_sarif, "checkout_uri": checkoutURI,
workflow_run_id: workflowRunID, "environment": matrix,
checkout_uri: checkoutURI, "started_at": startedAt,
environment, "tool_names": toolNames,
started_at: process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT], });
tool_names: toolNames,
});
}
else {
payload = JSON.stringify({
commit_sha: commitOid,
ref,
sarif: zipped_sarif,
checkout_uri: checkoutURI,
tool_name: toolNames[0],
});
}
// Log some useful debug info about the info // Log some useful debug info about the info
const rawUploadSizeBytes = sarifPayload.length; const rawUploadSizeBytes = sarifPayload.length;
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`); core.debug("Raw upload size: " + rawUploadSizeBytes + " bytes");
const zippedUploadSizeBytes = zipped_sarif.length; const zippedUploadSizeBytes = zipped_sarif.length;
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`); core.debug("Base64 zipped upload size: " + zippedUploadSizeBytes + " bytes");
const numResultInSarif = countResultsInSarif(sarifPayload); const numResultInSarif = countResultsInSarif(sarifPayload);
logger.debug(`Number of results in upload: ${numResultInSarif}`); core.debug("Number of results in upload: " + numResultInSarif);
// Make the upload // Make the upload
await uploadPayload(payload, repositoryNwo, githubAuth, githubUrl, mode, logger); await uploadPayload(payload);
core.endGroup();
return { return {
raw_upload_size_bytes: rawUploadSizeBytes, raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes, zipped_upload_size_bytes: zippedUploadSizeBytes,

File diff suppressed because one or more lines are too long

13
lib/upload-lib.test.js generated
View File

@@ -11,16 +11,15 @@ var __importStar = (this && this.__importStar) || function (mod) {
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava")); const ava_1 = __importDefault(require("ava"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils"); const testing_utils_1 = require("./testing-utils");
const uploadLib = __importStar(require("./upload-lib")); const uploadLib = __importStar(require("./upload-lib"));
testing_utils_1.setupTests(ava_1.default); testing_utils_1.setupTests(ava_1.default);
ava_1.default("validateSarifFileSchema - valid", (t) => { ava_1.default('validateSarifFileSchema - valid', t => {
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`; const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile, logging_1.getRunnerLogger(true))); t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile));
}); });
ava_1.default("validateSarifFileSchema - invalid", (t) => { ava_1.default('validateSarifFileSchema - invalid', t => {
const inputFile = `${__dirname}/../src/testdata/invalid-sarif.sarif`; const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
t.throws(() => uploadLib.validateSarifFileSchema(inputFile, logging_1.getRunnerLogger(true))); t.throws(() => uploadLib.validateSarifFileSchema(inputFile));
}); });
//# sourceMappingURL=upload-lib.test.js.map //# sourceMappingURL=upload-lib.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,uCAA4C;AAC5C,mDAA6C;AAC7C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,MAAM,SAAS,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CACf,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC9C,MAAM,SAAS,GAAG,GAAG,SAAS,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CACZ,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC"} {"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,mDAA2C;AAC3C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AAClE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/D,CAAC,CAAC,CAAC"}

View File

@@ -1,43 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
async function sendSuccessStatusReport(startedAt, uploadStats) {
const statusReportBase = await actionsUtil.createStatusReportBase("upload-sarif", "success", startedAt);
const statusReport = {
...statusReportBase,
...uploadStats,
};
await actionsUtil.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", "starting", startedAt), true))) {
return;
}
try {
const uploadStats = await upload_lib.upload(actionsUtil.getRequiredInput("sarif_file"), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), actionsUtil.getRequiredInput("token"), actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"), "actions", logging_1.getActionsLogger());
await sendSuccessStatusReport(startedAt, uploadStats);
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", "failure", startedAt, error.message, error.stack));
return;
}
}
run().catch((e) => {
core.setFailed(`codeql/upload-sarif action failed: ${e}`);
console.log(e);
});
//# sourceMappingURL=upload-sarif-action.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAM3C,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,EACD,IAAI,CACL,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,MAAM,WAAW,CAAC,YAAY,EAAE,EAChC,WAAW,CAAC,MAAM,EAAE,EACpB,MAAM,WAAW,CAAC,cAAc,EAAE,EAClC,WAAW,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAClD,WAAW,CAAC,gBAAgB,EAAE,EAC9B,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,SAAS,EACT,0BAAgB,EAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,sCAAsC,CAAC,EAAE,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

41
lib/upload-sarif.js generated Normal file
View File

@@ -0,0 +1,41 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
async function sendSuccessStatusReport(startedAt, uploadStats) {
const statusReportBase = await util.createStatusReportBase('upload-sarif', 'success', startedAt);
const statusReport = {
...statusReportBase,
...uploadStats,
};
await util.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
if (util.should_abort('upload-sarif', false) ||
!await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'starting', startedAt), true)) {
return;
}
try {
const uploadStats = await upload_lib.upload(core.getInput('sarif_file'));
await sendSuccessStatusReport(startedAt, uploadStats);
}
catch (error) {
core.setFailed(error.message);
await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'failure', startedAt, error.message, error.stack));
return;
}
}
run().catch(e => {
core.setFailed("codeql/upload-sarif action failed: " + e);
console.log(e);
});
//# sourceMappingURL=upload-sarif.js.map

Some files were not shown because too many files have changed in this diff Show More