Compare commits

..

5 Commits

Author SHA1 Message Date
David Verdeguer
f65b956f54 Catch auto_install_packages errors 2020-07-23 15:56:03 +02:00
David Verdeguer
42f07e2f87 Update python scripts download link 2020-07-23 13:12:47 +02:00
David Verdeguer
d190a018d6 Call core.endGroup() when doing an early return in installPythonDeps 2020-07-23 13:05:30 +02:00
David Verdeguer
4d4cc33d97 Setup python extractor for installed deps 2020-07-23 12:57:35 +02:00
David Verdeguer
1c95faf847 Add autoinstaller for python deps 2020-07-23 12:57:12 +02:00
6179 changed files with 215941 additions and 583146 deletions

View File

@@ -1,5 +0,0 @@
**/webpack.config.js
lib/**
runner/dist/**
src/testdata/**
tests/**

View File

@@ -1,58 +0,0 @@
{
"parser": "@typescript-eslint/parser",
"parserOptions": {
"project": "./tsconfig.json"
},
"plugins": ["@typescript-eslint", "filenames", "github", "import", "no-async-foreach"],
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/recommended",
"plugin:@typescript-eslint/recommended-requiring-type-checking",
"plugin:github/recommended",
"plugin:github/typescript"
],
"rules": {
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],
"import/extensions": "error",
"import/no-amd": "error",
"import/no-commonjs": "error",
"import/no-dynamic-require": "error",
"import/no-extraneous-dependencies": ["error", {"devDependencies": false}],
"import/no-namespace": "off",
"import/no-unresolved": "error",
"import/no-webpack-loader-syntax": "error",
"no-async-foreach/no-async-foreach": "error",
"no-console": "off",
"no-sequences": "error",
"one-var": ["error", "never"],
"sort-imports": ["error", { "allowSeparatedGroups": true }]
},
"overrides": [{
// "temporarily downgraded during transition to eslint
"files": "**",
"rules": {
"@typescript-eslint/ban-types": "off",
"@typescript-eslint/explicit-module-boundary-types": "off",
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/no-unsafe-assignment": "off",
"@typescript-eslint/no-unsafe-call": "off",
"@typescript-eslint/no-unsafe-member-access": "off",
"@typescript-eslint/no-unsafe-return": "off",
"@typescript-eslint/no-unused-vars": "off",
"@typescript-eslint/no-var-requires": "off",
"@typescript-eslint/prefer-regexp-exec": "off",
"@typescript-eslint/require-await": "off",
"@typescript-eslint/restrict-template-expressions": "off",
"eslint-comments/no-use": "off",
"func-style": "off",
"github/no-then": "off",
"import/no-extraneous-dependencies": "off",
"no-shadow": "off",
"no-sparse-arrays": "off",
"no-throw-literal": "off",
"no-useless-escape": "off",
"sort-imports": "off"
}
}]
}

View File

@@ -1,9 +1,6 @@
name: "CodeQL action"
on:
push:
branches: [main, v1]
pull_request:
on: [push, pull_request]
jobs:
build:

View File

@@ -1,9 +1,6 @@
name: "Integration Testing"
on:
push:
branches: [main, v1]
pull_request:
on: [push, pull_request]
jobs:
multi-language-repo_test-autodetect-languages:
@@ -25,7 +22,7 @@ jobs:
env:
TEST_MODE: true
- run: |
cd "$RUNNER_TEMP/codeql_databases"
cd "$CODEQL_ACTION_DATABASE_DIR"
# List all directories as there will be precisely one directory per database
# but there may be other files in this directory such as query suites.
if [ "$(ls -d */ | wc -l)" != 6 ] || \
@@ -153,297 +150,3 @@ jobs:
- uses: ./../action/analyze
env:
TEST_MODE: true
runner-analyze-javascript-ubuntu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
# Pass --config-file here, but not for other jobs in this workflow.
# This means we're testing the config file parsing in the runner
# but not slowing down all jobs unnecessarily as it doesn't add much
# testing the parsing on different operating systems and languages.
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-ubuntu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
. ./codeql-runner/codeql-env.sh
dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
. ./codeql-runner/codeql-env.sh
dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-ubuntu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
../action/runner/dist/codeql-runner-linux autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
../action/runner/dist/codeql-runner-win.exe autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
../action/runner/dist/codeql-runner-macos autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-upload-sarif:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Upload with runner
run: |
# Deliberately don't use TEST_MODE here. This is specifically testing
# the compatibility with the API.
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}

View File

@@ -1,24 +1,21 @@
name: "PR checks"
on:
push:
branches: [main, v1]
pull_request:
on: [push, pull_request]
jobs:
lint-js:
tslint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Run Lint
- uses: actions/checkout@v1
- name: tslint
run: npm run-script lint
check-js:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v1
- name: Check generated JavaScript
run: |
# Sanity check that repo is clean to start with
@@ -27,8 +24,6 @@ jobs:
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Wipe the lib directory incase there are extra unnecessary files in there
rm -rf lib
# Generate the JavaScript files
npm run-script build
# Check that repo is still clean
@@ -44,7 +39,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v1
- name: Check node modules up to date
run: |
# Sanity check that repo is clean to start with
@@ -53,6 +48,7 @@ jobs:
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Reinstall modules and then clean to remove absolute paths
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
npm ci
@@ -67,12 +63,9 @@ jobs:
echo "Success: node_modules are up to date"
npm-test:
strategy:
matrix:
os: [ubuntu-latest,macos-latest]
runs-on: ${{ matrix.os }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v1
- name: npm run-script test
run: npm run-script test
run: npm run-script test

View File

@@ -1,73 +0,0 @@
#
# Split the CodeQL Bundle into platform bundles
#
# Instructions:
# 1. Upload the new codeql-bundle (codeql-bundle.tar.gz) as an asset of the
# release (codeql-bundle-20200826)
# 2. Take note of the CLI Release used by the bundle (e.g., v2.2.5)
# 3. Manually launch this workflow file (via the Actions UI) specifying
# - The CLI Release (e.g., v2.2.5)
# - The release tag (e.g., codeql-bundle-20200826)
# 4. If everything succeeds you should see 3 new assets.
#
name: Split Bundle
on:
workflow_dispatch:
inputs:
cli-release:
description: 'CodeQL CLI Release (e.g., "v2.2.5")'
required: true
bundle-tag:
description: 'Tag of the bundle release (e.g., "codeql-bundle-20200826")'
required: true
jobs:
build:
runs-on: ubuntu-latest
env:
CLI_RELEASE: "${{ github.event.inputs.cli-release }}"
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
strategy:
fail-fast: false
matrix:
platform: ["linux64", "osx64", "win64"]
steps:
- name: Resolve Upload URL for the release
id: save_url
run: |
UPLOAD_URL=$(curl -sS \
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}" \
-H "Accept: application/json" \
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" | jq .upload_url | sed s/\"//g)
echo ${UPLOAD_URL}
echo "::set-output name=upload_url::${UPLOAD_URL}"
- name: Download CodeQL CLI and Bundle
run: |
wget --no-verbose "https://github.com/${GITHUB_REPOSITORY}/releases/download/${RELEASE_TAG}/codeql-bundle.tar.gz"
wget --no-verbose "https://github.com/github/codeql-cli-binaries/releases/download/${CLI_RELEASE}/codeql-${{matrix.platform}}.zip"
- name: Create Platform Package
# Replace the codeql-binaries with the platform specific ones
run: |
gunzip codeql-bundle.tar.gz
tar -f codeql-bundle.tar --delete codeql
unzip -q codeql-${{matrix.platform}}.zip
tar -f codeql-bundle.tar --append codeql
gzip codeql-bundle.tar
mv codeql-bundle.tar.gz codeql-bundle-${{matrix.platform}}.tar.gz
du -sh codeql-bundle-${{matrix.platform}}.tar.gz
- name: Upload Platform Package
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.save_url.outputs.upload_url }}
asset_path: ./codeql-bundle-${{matrix.platform}}.tar.gz
asset_name: codeql-bundle-${{matrix.platform}}.tar.gz
asset_content_type: application/tar+gzip

2
.gitignore vendored
View File

@@ -1,2 +0,0 @@
/runner/dist/
/runner/node_modules/

View File

@@ -1,4 +1,4 @@
# Contributing
## Contributing
[fork]: https://github.com/github/codeql-action/fork
[pr]: https://github.com/github/codeql-action/compare
@@ -26,33 +26,10 @@ This project also includes configuration to run tests from VSCode (with support
To see the effect of your changes and to test them, push your changes in a branch and then look at the [Actions output](https://github.com/github/codeql-action/actions) for that branch. You can also exercise the code locally by running the automated tests.
### Running the action locally
It is possible to run this action locally via [act](https://github.com/nektos/act) via the following steps:
1. Create a GitHub [Personal Access Token](https://github.com/settings/tokens) (PAT).
1. Install [act](https://github.com/nektos/act) v0.2.10 or greater.
1. Add a `.env` file in the root of the project you are running:
```bash
CODEQL_LOCAL_RUN=true
# Optional, for better logging
GITHUB_JOB=<ANY_JOB_NAME>
```
1. Run `act -j codeql -s GITHUB_TOKEN=<PAT>`
Running locally will generate the CodeQL database and run all the queries, but it will avoid uploading and reporting results to GitHub. Note that this must be done on a repository that _consumes_ this action, not this repository. The use case is to debug failures of this action on specific repositories.
### Integration tests
As well as the unit tests (see _Common tasks_ above), there are integration tests, defined in `.github/workflows/integration-testing.yml`. These are run by a CI check. Depending on the change youre making, you may want to add a test to this file or extend an existing one.
### Building the CodeQL runner
Navigate to the `runner` directory and run `npm install` to install dependencies needed only for compiling the CodeQL runner. Run `npm run build-runner` to output files to the `runner/dist` directory.
## Submitting a pull request
1. [Fork][fork] and clone the repository
@@ -60,7 +37,6 @@ Navigate to the `runner` directory and run `npm install` to install dependencies
3. Make your change, add tests, and make sure the tests still pass
4. Push to your fork and [submit a pull request][pr]
5. Pat yourself on the back and wait for your pull request to be reviewed and merged.
If you're a GitHub staff member, you can merge your own PR once it's approved; for external contributors, GitHub staff will merge your PR once it's approved.
Here are a few things you can do that will increase the likelihood of your pull request being accepted:

View File

@@ -98,23 +98,7 @@ Use the `config-file` parameter of the `init` action to enable the configuration
config-file: ./.github/codeql/codeql-config.yml
```
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration file](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration-file)."
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:
```yaml
- uses: github/codeql-action/init@v1
with:
queries: <local-or-remote-query>,<another-query>
```
By default, this will override any queries specified in a config file. If you wish to use both sets of queries, prefix the list of queries in the workflow with `+`:
```yaml
- uses: github/codeql-action/init@v1
with:
queries: +<local-or-remote-query>,<another-query>
```
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration)."
## Troubleshooting

View File

@@ -16,13 +16,10 @@ inputs:
ram:
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
required: false
add-snippets:
description: Specify whether or not to add code snippets to the output sarif file.
required: false
default: "false"
threads:
description: The number of threads to be used by CodeQL.
required: false
default: "1"
checkout_path:
description: "The path at which the analyzed repository was checked out. Used to relativeize any absolute paths in the uploaded SARIF file."
required: false
@@ -33,4 +30,4 @@ inputs:
default: ${{ toJson(matrix) }}
runs:
using: 'node12'
main: '../lib/analyze-action.js'
main: '../lib/finalize-db.js'

View File

@@ -8,4 +8,4 @@ inputs:
default: ${{ toJson(matrix) }}
runs:
using: 'node12'
main: '../lib/autobuild-action.js'
main: '../lib/autobuild.js'

View File

@@ -5,7 +5,7 @@ inputs:
tools:
description: URL of CodeQL tools
required: false
# If not specified the Action will check in several places until it finds the CodeQL tools.
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200630/codeql-bundle.tar.gz
languages:
description: The languages to be analysed
required: false
@@ -16,13 +16,10 @@ inputs:
config-file:
description: Path of the config file to use
required: false
queries:
description: Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file; prefix with "+" to use both sets of queries.
required: false
setup-python-dependencies:
description: Try to auto-install your python dependencies
required: true
default: 'true'
runs:
using: 'node12'
main: '../lib/init-action.js'
main: '../lib/setup-tracer.js'

266
lib/actions-util.js generated
View File

@@ -1,266 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
const api = __importStar(require("./api-client"));
const sharedEnv = __importStar(require("./shared-environment"));
const util_1 = require("./util");
/**
* Wrapper around core.getInput for inputs that always have a value.
* Also see getOptionalInput.
*
* This allows us to get stronger type checking of required/optional inputs
* and make behaviour more consistent between actions and the runner.
*/
function getRequiredInput(name) {
return core.getInput(name, { required: true });
}
exports.getRequiredInput = getRequiredInput;
/**
* Wrapper around core.getInput that converts empty inputs to undefined.
* Also see getRequiredInput.
*
* This allows us to get stronger type checking of required/optional inputs
* and make behaviour more consistent between actions and the runner.
*/
function getOptionalInput(name) {
const value = core.getInput(name);
return value.length > 0 ? value : undefined;
}
exports.getOptionalInput = getOptionalInput;
/**
* Get an environment parameter, but throw an error if it is not set.
*/
function getRequiredEnvParam(paramName) {
const value = process.env[paramName];
if (value === undefined || value.length === 0) {
throw new Error(`${paramName} environment variable must be set`);
}
core.debug(`${paramName}=${value}`);
return value;
}
exports.getRequiredEnvParam = getRequiredEnvParam;
/**
* Ensures all required environment variables are set in the context of a local run.
*/
function prepareLocalRunEnvironment() {
if (!util_1.isLocalRun()) {
return;
}
core.debug("Action is running locally.");
if (!process.env.GITHUB_JOB) {
core.exportVariable("GITHUB_JOB", "UNKNOWN-JOB");
}
}
exports.prepareLocalRunEnvironment = prepareLocalRunEnvironment;
/**
* Gets the SHA of the commit that is currently checked out.
*/
async function getCommitOid() {
// Try to use git to get the current commit SHA. If that fails then
// log but otherwise silently fall back to using the SHA from the environment.
// The only time these two values will differ is during analysis of a PR when
// the workflow has changed the current commit to the head commit instead of
// the merge commit, which must mean that git is available.
// Even if this does go wrong, it's not a huge problem for the alerts to
// reported on the merge commit.
try {
let commitOid = "";
await new toolrunnner.ToolRunner("git", ["rev-parse", "HEAD"], {
silent: true,
listeners: {
stdout: (data) => {
commitOid += data.toString();
},
stderr: (data) => {
process.stderr.write(data);
},
},
}).exec();
return commitOid.trim();
}
catch (e) {
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
return getRequiredEnvParam("GITHUB_SHA");
}
}
exports.getCommitOid = getCommitOid;
/**
* Get the path of the currently executing workflow.
*/
async function getWorkflowPath() {
const repo_nwo = getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
const owner = repo_nwo[0];
const repo = repo_nwo[1];
const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID"));
const apiClient = api.getActionsApiClient();
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
owner,
repo,
run_id,
});
const workflowUrl = runsResponse.data.workflow_url;
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
return workflowResponse.data.path;
}
/**
* Get the workflow run ID.
*/
function getWorkflowRunID() {
const workflowRunID = parseInt(getRequiredEnvParam("GITHUB_RUN_ID"), 10);
if (Number.isNaN(workflowRunID)) {
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
}
return workflowRunID;
}
exports.getWorkflowRunID = getWorkflowRunID;
/**
* Get the analysis key paramter for the current job.
*
* This will combine the workflow path and current job name.
* Computing this the first time requires making requests to
* the github API, but after that the result will be cached.
*/
async function getAnalysisKey() {
const analysisKeyEnvVar = "CODEQL_ACTION_ANALYSIS_KEY";
let analysisKey = process.env[analysisKeyEnvVar];
if (analysisKey !== undefined) {
return analysisKey;
}
const workflowPath = await getWorkflowPath();
const jobName = getRequiredEnvParam("GITHUB_JOB");
analysisKey = `${workflowPath}:${jobName}`;
core.exportVariable(analysisKeyEnvVar, analysisKey);
return analysisKey;
}
exports.getAnalysisKey = getAnalysisKey;
/**
* Get the ref currently being analyzed.
*/
function getRef() {
// Will be in the form "refs/heads/master" on a push event
// or in the form "refs/pull/N/merge" on a pull_request event
const ref = getRequiredEnvParam("GITHUB_REF");
// For pull request refs we want to convert from the 'merge' ref
// to the 'head' ref, as that is what we want to analyse.
// There should have been some code earlier in the workflow to do
// the checkout, but we have no way of verifying that here.
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
if (pull_ref_regex.test(ref)) {
return ref.replace(pull_ref_regex, "refs/pull/$1/head");
}
else {
return ref;
}
}
exports.getRef = getRef;
/**
* Compose a StatusReport.
*
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
* @param status The status. Must be 'success', 'failure', or 'starting'
* @param startedAt The time this action started executing.
* @param cause Cause of failure (only supply if status is 'failure')
* @param exception Exception (only supply if status is 'failure')
*/
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
const commitOid = process.env["GITHUB_SHA"] || "";
const ref = getRef();
const workflowRunIDStr = process.env["GITHUB_RUN_ID"];
let workflowRunID = -1;
if (workflowRunIDStr) {
workflowRunID = parseInt(workflowRunIDStr, 10);
}
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
const jobName = process.env["GITHUB_JOB"] || "";
const analysis_key = await getAnalysisKey();
let workflowStartedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
if (workflowStartedAt === undefined) {
workflowStartedAt = actionStartedAt.toISOString();
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
}
const statusReport = {
workflow_run_id: workflowRunID,
workflow_name: workflowName,
job_name: jobName,
analysis_key,
commit_oid: commitOid,
ref,
action_name: actionName,
action_oid: "unknown",
started_at: workflowStartedAt,
action_started_at: actionStartedAt.toISOString(),
status,
};
// Add optional parameters
if (cause) {
statusReport.cause = cause;
}
if (exception) {
statusReport.exception = exception;
}
if (status === "success" || status === "failure" || status === "aborted") {
statusReport.completed_at = new Date().toISOString();
}
const matrix = getRequiredInput("matrix");
if (matrix) {
statusReport.matrix_vars = matrix;
}
return statusReport;
}
exports.createStatusReportBase = createStatusReportBase;
/**
* Send a status report to the code_scanning/analysis/status endpoint.
*
* Optionally checks the response from the API endpoint and sets the action
* as failed if the status report failed. This is only expected to be used
* when sending a 'starting' report.
*
* Returns whether sending the status report was successful of not.
*/
async function sendStatusReport(statusReport, ignoreFailures) {
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
core.debug("Not sending status report to GitHub Enterprise");
return true;
}
if (util_1.isLocalRun()) {
core.debug("Not sending status report because this is a local run");
return true;
}
const statusReportJSON = JSON.stringify(statusReport);
core.debug(`Sending status report: ${statusReportJSON}`);
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
const [owner, repo] = nwo.split("/");
const client = api.getActionsApiClient();
const statusResponse = await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
owner,
repo,
data: statusReportJSON,
});
if (!ignoreFailures) {
// If the status report request fails with a 403 or a 404, then this is a deliberate
// message from the endpoint that the SARIF upload can be expected to fail too,
// so the action should fail to avoid wasting actions minutes.
//
// Other failure responses (or lack thereof) could be transitory and should not
// cause the action to fail.
if (statusResponse.status === 403) {
core.setFailed("The repo on which this action is running is not opted-in to CodeQL code scanning.");
return false;
}
if (statusResponse.status === 404) {
core.setFailed("Not authorized to used the CodeQL code scanning feature on this repo.");
return false;
}
}
return true;
}
exports.sendStatusReport = sendStatusReport;
//# sourceMappingURL=actions-util.js.map

File diff suppressed because one or more lines are too long

View File

@@ -1,31 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const actions_util_1 = require("./actions-util");
const testing_utils_1 = require("./testing-utils");
testing_utils_1.setupTests(ava_1.default);
ava_1.default("getRef() throws on the empty string", (t) => {
process.env["GITHUB_REF"] = "";
t.throws(actions_util_1.getRef);
});
ava_1.default("prepareEnvironment() when a local run", (t) => {
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
process.env.CODEQL_LOCAL_RUN = "false";
process.env.GITHUB_JOB = "YYY";
actions_util_1.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, "YYY");
process.env.CODEQL_LOCAL_RUN = "true";
actions_util_1.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, "YYY");
process.env.GITHUB_JOB = "";
actions_util_1.prepareLocalRunEnvironment();
// updated
t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB");
process.env.CODEQL_LOCAL_RUN = origLocalRun;
});
//# sourceMappingURL=actions-util.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"actions-util.test.js","sourceRoot":"","sources":["../src/actions-util.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,iDAAoE;AACpE,mDAA6C;AAE7C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE,EAAE;IAChD,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC;IAC/B,CAAC,CAAC,MAAM,CAAC,qBAAM,CAAC,CAAC;AACnB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,uCAAuC,EAAE,CAAC,CAAC,EAAE,EAAE;IAClD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC;IAElD,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,OAAO,CAAC;IACvC,OAAO,CAAC,GAAG,CAAC,UAAU,GAAG,KAAK,CAAC;IAE/B,yCAA0B,EAAE,CAAC;IAE7B,YAAY;IACZ,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;IAE3C,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,MAAM,CAAC;IAEtC,yCAA0B,EAAE,CAAC;IAE7B,YAAY;IACZ,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;IAE3C,OAAO,CAAC,GAAG,CAAC,UAAU,GAAG,EAAE,CAAC;IAE5B,yCAA0B,EAAE,CAAC;IAE7B,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAEnD,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,YAAY,CAAC;AAC9C,CAAC,CAAC,CAAC"}

45
lib/analysis-paths.js generated
View File

@@ -1,29 +1,28 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
function isInterpretedLanguage(language) {
return language === "javascript" || language === "python";
return language === 'javascript' || language === 'python';
}
// Matches a string containing only characters that are legal to include in paths on windows.
exports.legalWindowsPathCharactersRegex = /^[^<>:"\|?]*$/;
// Builds an environment variable suitable for LGTM_INDEX_INCLUDE or LGTM_INDEX_EXCLUDE
function buildIncludeExcludeEnvVar(paths) {
// Ignore anything containing a *
paths = paths.filter((p) => p.indexOf("*") === -1);
paths = paths.filter(p => p.indexOf('*') === -1);
// Some characters are illegal in path names in windows
if (process.platform === "win32") {
paths = paths.filter((p) => p.match(exports.legalWindowsPathCharactersRegex));
if (process.platform === 'win32') {
paths = paths.filter(p => p.match(exports.legalWindowsPathCharactersRegex));
}
return paths.join("\n");
return paths.join('\n');
}
function printPathFiltersWarning(config, logger) {
// Index include/exclude/filters only work in javascript and python.
// If any other languages are detected/configured then show a warning.
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
!config.languages.every(isInterpretedLanguage)) {
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
}
}
exports.printPathFiltersWarning = printPathFiltersWarning;
function includeAndExcludeAnalysisPaths(config) {
// The 'LGTM_INDEX_INCLUDE' and 'LGTM_INDEX_EXCLUDE' environment variables
// control which files/directories are traversed when scanning.
@@ -33,19 +32,27 @@ function includeAndExcludeAnalysisPaths(config) {
// traverse the entire file tree to determine which files are matched.
// Any paths containing "*" are not included in these.
if (config.paths.length !== 0) {
process.env["LGTM_INDEX_INCLUDE"] = buildIncludeExcludeEnvVar(config.paths);
core.exportVariable('LGTM_INDEX_INCLUDE', buildIncludeExcludeEnvVar(config.paths));
}
if (config.pathsIgnore.length !== 0) {
process.env["LGTM_INDEX_EXCLUDE"] = buildIncludeExcludeEnvVar(config.pathsIgnore);
core.exportVariable('LGTM_INDEX_EXCLUDE', buildIncludeExcludeEnvVar(config.pathsIgnore));
}
// The 'LGTM_INDEX_FILTERS' environment variable controls which files are
// extracted or ignored. It does not control which directories are traversed.
// This does understand the glob and double-glob syntax.
const filters = [];
filters.push(...config.paths.map((p) => `include:${p}`));
filters.push(...config.pathsIgnore.map((p) => `exclude:${p}`));
filters.push(...config.paths.map(p => 'include:' + p));
filters.push(...config.pathsIgnore.map(p => 'exclude:' + p));
if (filters.length !== 0) {
process.env["LGTM_INDEX_FILTERS"] = filters.join("\n");
core.exportVariable('LGTM_INDEX_FILTERS', filters.join('\n'));
}
// Index include/exclude/filters only work in javascript and python.
// If any other languages are detected/configured then show a warning.
if ((config.paths.length !== 0 ||
config.pathsIgnore.length !== 0 ||
filters.length !== 0) &&
!config.languages.every(isInterpretedLanguage)) {
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
}
}
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;AAGA,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,oEAAoE;IACpE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,4FAA4F,CAC7F,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAC3D,MAAM,CAAC,WAAW,CACnB,CAAC;KACH;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AA1BD,wEA0BC"}
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEjD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACrE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACpF;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC;KAC1F;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACvD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC/D;IAED,oEAAoE;IACpE,sEAAsE;IACtE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;QACxB,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC;QAC/B,OAAO,CAAC,MAAM,KAAK,CAAC,CAAC;QACvB,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAClD,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC5G;AACH,CAAC;AAjCD,wEAiCC"}

View File

@@ -13,42 +13,29 @@ Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const analysisPaths = __importStar(require("./analysis-paths"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
ava_1.default("emptyPaths", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const config = {
languages: [],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
t.is(process.env["LGTM_INDEX_EXCLUDE"], undefined);
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
});
const config = {
languages: [],
queries: {},
pathsIgnore: [],
paths: [],
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_FILTERS'], undefined);
});
ava_1.default("nonEmptyPaths", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const config = {
languages: [],
queries: {},
paths: ["path1", "path2", "**/path3"],
pathsIgnore: ["path4", "path5", "path6/**"],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
t.is(process.env["LGTM_INDEX_EXCLUDE"], "path4\npath5");
t.is(process.env["LGTM_INDEX_FILTERS"], "include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**");
});
const config = {
languages: [],
queries: {},
paths: ['path1', 'path2', '**/path3'],
pathsIgnore: ['path4', 'path5', 'path6/**'],
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path4\npath5');
t.is(process.env['LGTM_INDEX_FILTERS'], 'include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**');
});
//# sourceMappingURL=analysis-paths.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA2C;AAE3C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;KACV,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;KAC5C,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,gGAAgG,CAAC,CAAC;AAC5I,CAAC,CAAC,CAAC"}

56
lib/analyze-action.js generated
View File

@@ -1,56 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const analyze_1 = require("./analyze");
const config_utils_1 = require("./config-utils");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const util = __importStar(require("./util"));
async function sendStatusReport(startedAt, stats, error) {
var _a, _b, _c;
const status = ((_a = stats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined
? "failure"
: "success";
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
const statusReport = {
...statusReportBase,
...(stats || {}),
};
await actionsUtil.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
let stats = undefined;
try {
actionsUtil.prepareLocalRunEnvironment();
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt), true))) {
return;
}
const logger = logging_1.getActionsLogger();
const config = await config_utils_1.getConfig(actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
}
stats = await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), actionsUtil.getRequiredInput("token"), actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"), actionsUtil.getRequiredInput("upload") === "true", "actions", actionsUtil.getRequiredInput("output"), util.getMemoryFlag(actionsUtil.getOptionalInput("ram")), util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger), config, logger);
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await sendStatusReport(startedAt, stats, error);
return;
}
await sendStatusReport(startedAt, stats);
}
run().catch((e) => {
core.setFailed(`analyze action failed: ${e}`);
console.log(e);
});
//# sourceMappingURL=analyze-action.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6D;AAC7D,iDAA2C;AAC3C,uCAA6C;AAC7C,6CAAkD;AAClD,6CAA+B;AAM/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,EACD,IAAI,CACL,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,MAAM,GAAG,MAAM,wBAAS,CAC5B,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,KAAK,GAAG,MAAM,oBAAU,CACtB,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,MAAM,WAAW,CAAC,YAAY,EAAE,EAChC,WAAW,CAAC,MAAM,EAAE,EACpB,MAAM,WAAW,CAAC,cAAc,EAAE,EAClC,WAAW,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAClD,WAAW,CAAC,gBAAgB,EAAE,EAC9B,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EACjD,SAAS,EACT,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EACvD,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACpE,MAAM,EACN,MAAM,CACP,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,0BAA0B,CAAC,EAAE,CAAC,CAAC;IAC9C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

129
lib/analyze.js generated
View File

@@ -1,129 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql");
const languages_1 = require("./languages");
const sharedEnv = __importStar(require("./shared-environment"));
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
async function setupPythonExtractor(logger) {
const codeqlPython = process.env["CODEQL_PYTHON"];
if (codeqlPython === undefined || codeqlPython.length === 0) {
// If CODEQL_PYTHON is not set, no dependencies were installed, so we don't need to do anything
return;
}
let output = "";
const options = {
listeners: {
stdout: (data) => {
output += data.toString();
},
},
};
await new toolrunnner.ToolRunner(codeqlPython, [
"-c",
"import os; import pip; print(os.path.dirname(os.path.dirname(pip.__file__)))",
], options).exec();
logger.info(`Setting LGTM_INDEX_IMPORT_PATH=${output}`);
process.env["LGTM_INDEX_IMPORT_PATH"] = output;
output = "";
await new toolrunnner.ToolRunner(codeqlPython, ["-c", "import sys; print(sys.version_info[0])"], options).exec();
logger.info(`Setting LGTM_PYTHON_SETUP_VERSION=${output}`);
process.env["LGTM_PYTHON_SETUP_VERSION"] = output;
}
async function createdDBForScannedLanguages(config, logger) {
// Insert the LGTM_INDEX_X env vars at this point so they are set when
// we extract any scanned languages.
analysisPaths.includeAndExcludeAnalysisPaths(config);
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
if (languages_1.isScannedLanguage(language)) {
logger.startGroup(`Extracting ${language}`);
if (language === languages_1.Language.python) {
await setupPythonExtractor(logger);
}
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config.tempDir, language), language);
logger.endGroup();
}
}
}
async function finalizeDatabaseCreation(config, logger) {
await createdDBForScannedLanguages(config, logger);
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
logger.startGroup(`Finalizing ${language}`);
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config.tempDir, language));
logger.endGroup();
}
}
// Runs queries and creates sarif files in the given folder
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
const statusReport = {};
for (const language of config.languages) {
logger.startGroup(`Analyzing ${language}`);
const queries = config.queries[language];
if (queries.builtin.length === 0 && queries.custom.length === 0) {
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
}
try {
for (const type of ["builtin", "custom"]) {
if (queries[type].length > 0) {
const startTime = new Date().getTime();
const databasePath = util.getCodeQLDatabasePath(config.tempDir, language);
// Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows.
const querySuitePath = `${databasePath}-queries-${type}.qls`;
const querySuiteContents = queries[type]
.map((q) => `- query: ${q}`)
.join("\n");
fs.writeFileSync(querySuitePath, querySuiteContents);
logger.debug(`Query suite file for ${language}...\n${querySuiteContents}`);
const sarifFile = path.join(sarifFolder, `${language}-${type}.sarif`);
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
await codeql.databaseAnalyze(databasePath, sarifFile, querySuitePath, memoryFlag, addSnippetsFlag, threadsFlag);
logger.debug(`SARIF results for database ${language} created at "${sarifFile}"`);
logger.endGroup();
// Record the performance
const endTime = new Date().getTime();
statusReport[`analyze_${type}_queries_${language}_duration_ms`] =
endTime - startTime;
}
}
}
catch (e) {
logger.error(`Error running analysis for ${language}: ${e}`);
logger.info(e);
statusReport.analyze_failure_language = language;
return statusReport;
}
}
return statusReport;
}
exports.runQueries = runQueries;
async function runAnalyze(repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, doUpload, mode, outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
// Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
fs.mkdirSync(outputDir, { recursive: true });
logger.info("Finalizing database creation");
await finalizeDatabaseCreation(config, logger);
logger.info("Analyzing database");
const queriesStats = await runQueries(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger);
if (!doUpload) {
logger.info("Not uploading results");
return { ...queriesStats };
}
const uploadStats = await upload_lib.upload(outputDir, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger);
return { ...queriesStats, ...uploadStats };
}
exports.runAnalyze = runAnalyze;
//# sourceMappingURL=analyze.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;;;;;;;AAAA,0EAA4D;AAC5D,uCAAyB;AACzB,2CAA6B;AAE7B,gEAAkD;AAClD,qCAAqC;AAErC,2CAA0D;AAG1D,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAmC/B,KAAK,UAAU,oBAAoB,CAAC,MAAc;IAChD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IAClD,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3D,+FAA+F;QAC/F,OAAO;KACR;IAED,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,OAAO,GAAG;QACd,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC;IAEF,MAAM,IAAI,WAAW,CAAC,UAAU,CAC9B,YAAY,EACZ;QACE,IAAI;QACJ,8EAA8E;KAC/E,EACD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,kCAAkC,MAAM,EAAE,CAAC,CAAC;IACxD,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;IAE/C,MAAM,GAAG,EAAE,CAAC;IACZ,MAAM,IAAI,WAAW,CAAC,UAAU,CAC9B,YAAY,EACZ,CAAC,IAAI,EAAE,wCAAwC,CAAC,EAChD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,qCAAqC,MAAM,EAAE,CAAC,CAAC;IAC3D,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC,GAAG,MAAM,CAAC;AACpD,CAAC;AAED,KAAK,UAAU,4BAA4B,CACzC,MAA0B,EAC1B,MAAc;IAEd,sEAAsE;IACtE,oCAAoC;IACpC,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;YAE5C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,MAAM,EAAE;gBAChC,MAAM,oBAAoB,CAAC,MAAM,CAAC,CAAC;aACpC;YAED,MAAM,MAAM,CAAC,sBAAsB,CACjC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,CACT,CAAC;YACF,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CACrC,MAA0B,EAC1B,MAAc;IAEd,MAAM,4BAA4B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,gBAAgB,CAC3B,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CACrD,CAAC;QACF,MAAM,CAAC,QAAQ,EAAE,CAAC;KACnB;AACH,CAAC;AAED,2DAA2D;AACpD,KAAK,UAAU,UAAU,CAC9B,WAAmB,EACnB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,MAAM,YAAY,GAAwB,EAAE,CAAC;IAE7C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,aAAa,QAAQ,EAAE,CAAC,CAAC;QAE3C,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QACzC,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;YAC/D,MAAM,IAAI,KAAK,CACb,qBAAqB,QAAQ,gDAAgD,CAC9E,CAAC;SACH;QAED,IAAI;YACF,KAAK,MAAM,IAAI,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,EAAE;gBACxC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBAEvC,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAC7C,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;oBACF,uEAAuE;oBACvE,2EAA2E;oBAC3E,MAAM,cAAc,GAAG,GAAG,YAAY,YAAY,IAAI,MAAM,CAAC;oBAC7D,MAAM,kBAAkB,GAAG,OAAO,CAAC,IAAI,CAAC;yBACrC,GAAG,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC;yBACnC,IAAI,CAAC,IAAI,CAAC,CAAC;oBACd,EAAE,CAAC,aAAa,CAAC,cAAc,EAAE,kBAAkB,CAAC,CAAC;oBACrD,MAAM,CAAC,KAAK,CACV,wBAAwB,QAAQ,QAAQ,kBAAkB,EAAE,CAC7D,CAAC;oBAEF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,GAAG,QAAQ,IAAI,IAAI,QAAQ,CAAC,CAAC;oBAEtE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;oBAC3C,MAAM,MAAM,CAAC,eAAe,CAC1B,YAAY,EACZ,SAAS,EACT,cAAc,EACd,UAAU,EACV,eAAe,EACf,WAAW,CACZ,CAAC;oBAEF,MAAM,CAAC,KAAK,CACV,8BAA8B,QAAQ,gBAAgB,SAAS,GAAG,CACnE,CAAC;oBACF,MAAM,CAAC,QAAQ,EAAE,CAAC;oBAElB,yBAAyB;oBACzB,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBACrC,YAAY,CAAC,WAAW,IAAI,YAAY,QAAQ,cAAc,CAAC;wBAC7D,OAAO,GAAG,SAAS,CAAC;iBACvB;aACF;SACF;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,KAAK,CAAC,8BAA8B,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;YAC7D,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YACf,YAAY,CAAC,wBAAwB,GAAG,QAAQ,CAAC;YACjD,OAAO,YAAY,CAAC;SACrB;KACF;IAED,OAAO,YAAY,CAAC;AACtB,CAAC;AAxED,gCAwEC;AAEM,KAAK,UAAU,UAAU,CAC9B,aAA4B,EAC5B,SAAiB,EACjB,GAAW,EACX,WAA+B,EAC/B,YAAgC,EAChC,aAAiC,EACjC,YAAoB,EACpB,WAA+B,EAC/B,UAAkB,EAClB,SAAiB,EACjB,QAAiB,EACjB,IAAe,EACf,SAAiB,EACjB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,8DAA8D;IAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;IAEzD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7C,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;IAC5C,MAAM,wBAAwB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE/C,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,MAAM,UAAU,CACnC,SAAS,EACT,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,MAAM,CACP,CAAC;IAEF,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACrC,OAAO,EAAE,GAAG,YAAY,EAAE,CAAC;KAC5B;IAED,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,SAAS,EACT,aAAa,EACb,SAAS,EACT,GAAG,EACH,WAAW,EACX,YAAY,EACZ,aAAa,EACb,YAAY,EACZ,WAAW,EACX,UAAU,EACV,SAAS,EACT,IAAI,EACJ,MAAM,CACP,CAAC;IAEF,OAAO,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;AAC7C,CAAC;AA5DD,gCA4DC"}

63
lib/analyze.test.js generated
View File

@@ -1,63 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const analyze_1 = require("./analyze");
const codeql_1 = require("./codeql");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
// Checks that the duration fields are populated for the correct language
// and correct case of builtin or custom.
ava_1.default("status report fields", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
codeql_1.setCodeQL({
databaseAnalyze: async () => undefined,
});
const memoryFlag = "";
const addSnippetsFlag = "";
const threadsFlag = "";
for (const language of Object.values(languages_1.Language)) {
const config = {
languages: [language],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
};
fs.mkdirSync(util.getCodeQLDatabasePath(config.tempDir, language), {
recursive: true,
});
config.queries[language] = {
builtin: ["foo.ql"],
custom: [],
};
const builtinStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logging_1.getRunnerLogger(true));
t.deepEqual(Object.keys(builtinStatusReport).length, 1);
t.true(`analyze_builtin_queries_${language}_duration_ms` in builtinStatusReport);
config.queries[language] = {
builtin: [],
custom: ["foo.ql"],
};
const customStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logging_1.getRunnerLogger(true));
t.deepEqual(Object.keys(customStatusReport).length, 1);
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
}
});
});
//# sourceMappingURL=analyze.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analyze.test.js","sourceRoot":"","sources":["../src/analyze.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AAEzB,uCAAuC;AACvC,qCAAqC;AAErC,2CAAuC;AACvC,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,yEAAyE;AACzE,yCAAyC;AACzC,aAAI,CAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,kBAAS,CAAC;YACR,eAAe,EAAE,KAAK,IAAI,EAAE,CAAC,SAAS;SACvC,CAAC,CAAC;QAEH,MAAM,UAAU,GAAG,EAAE,CAAC;QACtB,MAAM,eAAe,GAAG,EAAE,CAAC;QAC3B,MAAM,WAAW,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,MAAM,CAAC,oBAAQ,CAAC,EAAE;YAC9C,MAAM,MAAM,GAAW;gBACrB,SAAS,EAAE,CAAC,QAAQ,CAAC;gBACrB,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,EAAE;gBACf,KAAK,EAAE,EAAE;gBACT,iBAAiB,EAAE,EAAE;gBACrB,OAAO,EAAE,MAAM;gBACf,YAAY,EAAE,MAAM;gBACpB,SAAS,EAAE,EAAE;aACd,CAAC;YACF,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE;gBACjE,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,CAAC,QAAQ,CAAC;gBACnB,MAAM,EAAE,EAAE;aACX,CAAC;YACF,MAAM,mBAAmB,GAAG,MAAM,oBAAU,CAC1C,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACxD,CAAC,CAAC,IAAI,CACJ,2BAA2B,QAAQ,cAAc,IAAI,mBAAmB,CACzE,CAAC;YAEF,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,EAAE;gBACX,MAAM,EAAE,CAAC,QAAQ,CAAC;aACnB,CAAC;YACF,MAAM,kBAAkB,GAAG,MAAM,oBAAU,CACzC,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACvD,CAAC,CAAC,IAAI,CACJ,0BAA0B,QAAQ,cAAc,IAAI,kBAAkB,CACvE,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

38
lib/api-client.js generated
View File

@@ -10,39 +10,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const githubUtils = __importStar(require("@actions/github/lib/utils"));
const retry = __importStar(require("@octokit/plugin-retry"));
const core = __importStar(require("@actions/core"));
const github = __importStar(require("@actions/github"));
const console_log_level_1 = __importDefault(require("console-log-level"));
const path = __importStar(require("path"));
const actions_util_1 = require("./actions-util");
const util_1 = require("./util");
exports.getApiClient = function (githubAuth, githubUrl, allowLocalRun = false) {
if (util_1.isLocalRun() && !allowLocalRun) {
throw new Error("Invalid API call in local run");
}
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
return new retryingOctokit(githubUtils.getOctokitOptions(githubAuth, {
baseUrl: getApiUrl(githubUrl),
exports.getApiClient = function () {
return new github.GitHub(core.getInput('token'), {
userAgent: "CodeQL Action",
log: console_log_level_1.default({ level: "debug" }),
}));
log: console_log_level_1.default({ level: "debug" })
});
};
function getApiUrl(githubUrl) {
const url = new URL(githubUrl);
// If we detect this is trying to be to github.com
// then return with a fixed canonical URL.
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
return "https://api.github.com";
}
// Add the /api/v3 API prefix
url.pathname = path.join(url.pathname, "api", "v3");
return url.toString();
}
// Temporary function to aid in the transition to running on and off of github actions.
// Once all code has been coverted this function should be removed or made canonical
// and called only from the action entrypoints.
function getActionsApiClient(allowLocalRun = false) {
return exports.getApiClient(actions_util_1.getRequiredInput("token"), actions_util_1.getRequiredEnvParam("GITHUB_SERVER_URL"), allowLocalRun);
}
exports.getActionsApiClient = getActionsApiClient;
//# sourceMappingURL=api-client.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAChD,2CAA6B;AAE7B,iDAAuE;AACvE,iCAAoC;AAEvB,QAAA,YAAY,GAAG,UAC1B,UAAkB,EAClB,SAAiB,EACjB,aAAa,GAAG,KAAK;IAErB,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,UAAU,EAAE;QACxC,OAAO,EAAE,SAAS,CAAC,SAAS,CAAC;QAC7B,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,kDAAkD;IAClD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,oFAAoF;AACpF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,OAAO,oBAAY,CACjB,+BAAgB,CAAC,OAAO,CAAC,EACzB,kCAAmB,CAAC,mBAAmB,CAAC,EACxC,aAAa,CACd,CAAC;AACJ,CAAC;AAND,kDAMC"}
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAEnC,QAAA,YAAY,GAAG;IAC1B,OAAO,IAAI,MAAM,CAAC,MAAM,CACtB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB;QACE,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACP,CAAC,CAAC"}

View File

@@ -1,58 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const autobuild_1 = require("./autobuild");
const config_utils = __importStar(require("./config-utils"));
const logging_1 = require("./logging");
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
var _a, _b;
const status = failingLanguage !== undefined || cause !== undefined
? "failure"
: "success";
const statusReportBase = await actionsUtil.createStatusReportBase("autobuild", status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
const statusReport = {
...statusReportBase,
autobuild_languages: allLanguages.join(","),
autobuild_failure: failingLanguage,
};
await actionsUtil.sendStatusReport(statusReport);
}
async function run() {
const logger = logging_1.getActionsLogger();
const startedAt = new Date();
let language = undefined;
try {
actionsUtil.prepareLocalRunEnvironment();
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("autobuild", "starting", startedAt), true))) {
return;
}
const config = await config_utils.getConfig(actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
}
language = autobuild_1.determineAutobuildLanguage(config, logger);
if (language !== undefined) {
await autobuild_1.runAutobuild(language, config, logger);
}
}
catch (error) {
core.setFailed(`We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error.message}`);
console.log(error);
await sendCompletedStatusReport(startedAt, language ? [language] : [], language, error);
return;
}
await sendCompletedStatusReport(startedAt, language ? [language] : []);
}
run().catch((e) => {
core.setFailed(`autobuild action failed. ${e}`);
console.log(e);
});
//# sourceMappingURL=autobuild-action.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAS7C,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,WAAW,EACX,UAAU,EACV,SAAS,CACV,EACD,IAAI,CACL,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,sCAA0B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,wBAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIAAmI,KAAK,CAAC,OAAO,EAAE,CACnJ,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,6BAA6B,CAAC,EAAE,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

68
lib/autobuild.js generated
View File

@@ -1,32 +1,50 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const codeql_1 = require("./codeql");
const languages_1 = require("./languages");
function determineAutobuildLanguage(config, logger) {
// Attempt to find a language to autobuild
// We want pick the dominant language in the repo from the ones we're able to build
// The languages are sorted in order specified by user or by lines of code if we got
// them from the GitHub API, so try to build the first language on the list.
const autobuildLanguages = config.languages.filter(languages_1.isTracedLanguage);
const language = autobuildLanguages[0];
if (!language) {
logger.info("None of the languages in this project require extra build steps");
return undefined;
const sharedEnv = __importStar(require("./shared-environment"));
const util = __importStar(require("./util"));
async function run() {
var _a;
try {
if (util.should_abort('autobuild', true) || !await util.reportActionStarting('autobuild')) {
return;
}
// Attempt to find a language to autobuild
// We want pick the dominant language in the repo from the ones we're able to build
// The languages are sorted in order specified by user or by lines of code if we got
// them from the GitHub API, so try to build the first language on the list.
const autobuildLanguages = ((_a = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]) === null || _a === void 0 ? void 0 : _a.split(',')) || [];
const language = autobuildLanguages[0];
if (!language) {
core.info("None of the languages in this project require extra build steps");
return;
}
core.debug(`Detected dominant traced language: ${language}`);
if (autobuildLanguages.length > 1) {
core.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages.slice(1).join(' and ')}, you must replace this block with custom build steps.`);
}
core.startGroup(`Attempting to automatically build ${language} code`);
const codeQL = codeql_1.getCodeQL();
await codeQL.runAutobuild(language);
core.endGroup();
}
logger.debug(`Detected dominant traced language: ${language}`);
if (autobuildLanguages.length > 1) {
logger.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages
.slice(1)
.join(" and ")}, you must replace this call with custom build steps.`);
catch (error) {
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
await util.reportActionFailed('autobuild', error.message, error.stack);
return;
}
return language;
await util.reportActionSucceeded('autobuild');
}
exports.determineAutobuildLanguage = determineAutobuildLanguage;
async function runAutobuild(language, config, logger) {
logger.startGroup(`Attempting to automatically build ${language} code`);
const codeQL = codeql_1.getCodeQL(config.codeQLCmd);
await codeQL.runAutobuild(language);
logger.endGroup();
}
exports.runAutobuild = runAutobuild;
run().catch(e => {
core.setFailed("autobuild action failed. " + e);
console.log(e);
});
//# sourceMappingURL=autobuild.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;AAAA,qCAAqC;AAErC,2CAAyD;AAGzD,SAAgB,0BAA0B,CACxC,MAA2B,EAC3B,MAAc;IAEd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IACrE,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;IAEvC,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;IAE/D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QACjC,MAAM,CAAC,OAAO,CACZ,oCAAoC,QAAQ,8BAA8B,kBAAkB;aACzF,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CAAC,OAAO,CAAC,uDAAuD,CACxE,CAAC;KACH;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AA7BD,gEA6BC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,qCAAqC;AACrC,gEAAkD;AAClD,6CAA+B;AAE/B,KAAK,UAAU,GAAG;;IAChB,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,EAAE;YACzF,OAAO;SACR;QAED,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,OAAA,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,8BAA8B,CAAC,0CAAE,KAAK,CAAC,GAAG,MAAK,EAAE,CAAC;QACnG,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAEpC,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,MAAM,IAAI,CAAC,kBAAkB,CAAC,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QACvE,OAAO;KACR;IAED,MAAM,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,CAAC;AAChD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

428
lib/codeql.js generated
View File

@@ -6,167 +6,63 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
const http = __importStar(require("@actions/http-client"));
const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const toolcache = __importStar(require("@actions/tool-cache"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const semver = __importStar(require("semver"));
const stream = __importStar(require("stream"));
const globalutil = __importStar(require("util"));
const v4_1 = __importDefault(require("uuid/v4"));
const actions_util_1 = require("./actions-util");
const api = __importStar(require("./api-client"));
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
const error_matcher_1 = require("./error-matcher");
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
const util = __importStar(require("./util"));
/**
* Stores the CodeQL object, and is populated by `setupCodeQL` or `getCodeQL`.
* Can be overridden in tests using `setCodeQL`.
*/
let cachedCodeQL = undefined;
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
const CODEQL_BUNDLE_NAME = "codeql-bundle.tar.gz";
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
function getCodeQLActionRepository(mode) {
if (mode !== "actions") {
return CODEQL_DEFAULT_ACTION_REPOSITORY;
}
// Actions do not know their own repository name,
// so we currently use this hack to find the name based on where our files are.
// This can be removed once the change to the runner in https://github.com/actions/runner/pull/585 is deployed.
const runnerTemp = actions_util_1.getRequiredEnvParam("RUNNER_TEMP");
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
const relativeScriptPath = path.relative(actionsDirectory, __filename);
// This handles the case where the Action does not come from an Action repository,
// e.g. our integration tests which use the Action code from the current checkout.
if (relativeScriptPath.startsWith("..") ||
path.isAbsolute(relativeScriptPath)) {
return CODEQL_DEFAULT_ACTION_REPOSITORY;
}
const relativeScriptPathParts = relativeScriptPath.split(path.sep);
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
}
async function getCodeQLBundleDownloadURL(githubAuth, githubUrl, mode, logger) {
const codeQLActionRepository = getCodeQLActionRepository(mode);
const potentialDownloadSources = [
// This GitHub instance, and this Action.
[githubUrl, codeQLActionRepository],
// This GitHub instance, and the canonical Action.
[githubUrl, CODEQL_DEFAULT_ACTION_REPOSITORY],
// GitHub.com, and the canonical Action.
[util.GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
];
// We now filter out any duplicates.
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
const uniqueDownloadSources = potentialDownloadSources.filter((url, index, self) => index === self.indexOf(url));
for (const downloadSource of uniqueDownloadSources) {
const [apiURL, repository] = downloadSource;
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
if (apiURL === util.GITHUB_DOTCOM_URL &&
repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
break;
}
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release = await api
.getApiClient(githubAuth, githubUrl)
.repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: CODEQL_BUNDLE_VERSION,
});
for (const asset of release.data.assets) {
if (asset.name === CODEQL_BUNDLE_NAME) {
logger.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
return asset.url;
}
}
}
catch (e) {
logger.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
}
}
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${CODEQL_BUNDLE_NAME}`;
}
// We have to download CodeQL manually because the toolcache doesn't support Accept headers.
// This can be removed once https://github.com/actions/toolkit/pull/530 is merged and released.
async function toolcacheDownloadTool(url, headers, tempDir, logger) {
const client = new http.HttpClient("CodeQL Action");
const dest = path.join(tempDir, v4_1.default());
const response = await client.get(url, headers);
if (response.message.statusCode !== 200) {
logger.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
throw new Error(`Unexpected HTTP response: ${response.message.statusCode}`);
}
const pipeline = globalutil.promisify(stream.pipeline);
fs.mkdirSync(path.dirname(dest), { recursive: true });
await pipeline(response.message, fs.createWriteStream(dest));
return dest;
}
async function setupCodeQL(codeqlURL, githubAuth, githubUrl, tempDir, toolsDir, mode, logger) {
// Setting these two env vars makes the toolcache code safe to use outside,
// of actions but this is obviously not a great thing we're doing and it would
// be better to write our own implementation to use outside of actions.
process.env["RUNNER_TEMP"] = tempDir;
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
/**
* Environment variable used to store the location of the CodeQL CLI executable.
* Value is set by setupCodeQL and read by getCodeQL.
*/
const CODEQL_ACTION_CMD = "CODEQL_ACTION_CMD";
async function setupCodeQL() {
try {
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`, logger);
let codeqlFolder = toolcache.find("CodeQL", codeqlURLVersion);
const codeqlURL = core.getInput('tools', { required: true });
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL);
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
if (codeqlFolder) {
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
core.debug(`CodeQL found in cache ${codeqlFolder}`);
}
else {
if (!codeqlURL) {
codeqlURL = await getCodeQLBundleDownloadURL(githubAuth, githubUrl, mode, logger);
}
const headers = { accept: "application/octet-stream" };
// We only want to provide an authorization header if we are downloading
// from the same GitHub instance the Action is running on.
// This avoids leaking Enterprise tokens to dotcom.
if (codeqlURL.startsWith(`${githubUrl}/`)) {
logger.debug("Downloading CodeQL bundle with token.");
headers.authorization = `token ${githubAuth}`;
}
else {
logger.debug("Downloading CodeQL bundle without token.");
}
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`);
const codeqlPath = await toolcacheDownloadTool(codeqlURL, headers, tempDir, logger);
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
const codeqlPath = await toolcache.downloadTool(codeqlURL);
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLVersion);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
}
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
if (process.platform === "win32") {
let codeqlCmd = path.join(codeqlFolder, 'codeql', 'codeql');
if (process.platform === 'win32') {
codeqlCmd += ".exe";
}
else if (process.platform !== "linux" && process.platform !== "darwin") {
throw new Error(`Unsupported platform: ${process.platform}`);
else if (process.platform !== 'linux' && process.platform !== 'darwin') {
throw new Error("Unsupported plaform: " + process.platform);
}
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
core.exportVariable(CODEQL_ACTION_CMD, codeqlCmd);
return cachedCodeQL;
}
catch (e) {
logger.error(e);
core.error(e);
throw new Error("Unable to download and extract CodeQL CLI");
}
}
exports.setupCodeQL = setupCodeQL;
function getCodeQLURLVersion(url, logger) {
function getCodeQLURLVersion(url) {
const match = url.match(/\/codeql-bundle-(.*)\//);
if (match === null || match.length < 2) {
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
}
let version = match[1];
if (!semver.valid(version)) {
logger.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
version = `0.0.0-${version}`;
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
version = '0.0.0-' + version;
}
const s = semver.clean(version);
if (!s) {
@@ -175,23 +71,18 @@ function getCodeQLURLVersion(url, logger) {
return s;
}
exports.getCodeQLURLVersion = getCodeQLURLVersion;
/**
* Use the CodeQL executable located at the given path.
*/
function getCodeQL(cmd) {
function getCodeQL() {
if (cachedCodeQL === undefined) {
cachedCodeQL = getCodeQLForCmd(cmd);
const codeqlCmd = util.getRequiredEnvParam(CODEQL_ACTION_CMD);
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
}
return cachedCodeQL;
}
exports.getCodeQL = getCodeQL;
function resolveFunction(partialCodeql, methodName, defaultImplementation) {
if (typeof partialCodeql[methodName] !== "function") {
if (defaultImplementation !== undefined) {
return defaultImplementation;
}
function resolveFunction(partialCodeql, methodName) {
if (typeof partialCodeql[methodName] !== 'function') {
const dummyMethod = () => {
throw new Error(`CodeQL ${methodName} method not correctly defined`);
throw new Error('CodeQL ' + methodName + ' method not correctly defined');
};
return dummyMethod;
}
@@ -205,218 +96,131 @@ function resolveFunction(partialCodeql, methodName, defaultImplementation) {
*/
function setCodeQL(partialCodeql) {
cachedCodeQL = {
getPath: resolveFunction(partialCodeql, "getPath", () => "/tmp/dummy-path"),
printVersion: resolveFunction(partialCodeql, "printVersion"),
getTracerEnv: resolveFunction(partialCodeql, "getTracerEnv"),
databaseInit: resolveFunction(partialCodeql, "databaseInit"),
runAutobuild: resolveFunction(partialCodeql, "runAutobuild"),
extractScannedLanguage: resolveFunction(partialCodeql, "extractScannedLanguage"),
finalizeDatabase: resolveFunction(partialCodeql, "finalizeDatabase"),
resolveQueries: resolveFunction(partialCodeql, "resolveQueries"),
databaseAnalyze: resolveFunction(partialCodeql, "databaseAnalyze"),
getDir: resolveFunction(partialCodeql, 'getDir'),
printVersion: resolveFunction(partialCodeql, 'printVersion'),
getTracerEnv: resolveFunction(partialCodeql, 'getTracerEnv'),
databaseInit: resolveFunction(partialCodeql, 'databaseInit'),
runAutobuild: resolveFunction(partialCodeql, 'runAutobuild'),
extractScannedLanguage: resolveFunction(partialCodeql, 'extractScannedLanguage'),
finalizeDatabase: resolveFunction(partialCodeql, 'finalizeDatabase'),
resolveQueries: resolveFunction(partialCodeql, 'resolveQueries'),
databaseAnalyze: resolveFunction(partialCodeql, 'databaseAnalyze')
};
return cachedCodeQL;
}
exports.setCodeQL = setCodeQL;
/**
* Get the cached CodeQL object. Should only be used from tests.
*
* TODO: Work out a good way for tests to get this from the test context
* instead of having to have this method.
*/
function getCachedCodeQL() {
if (cachedCodeQL === undefined) {
// Should never happen as setCodeQL is called by testing-utils.setupTests
throw new Error("cachedCodeQL undefined");
}
return cachedCodeQL;
}
exports.getCachedCodeQL = getCachedCodeQL;
function getCodeQLForCmd(cmd) {
return {
getPath() {
return cmd;
getDir: function () {
return path.dirname(cmd);
},
async printVersion() {
await new toolrunnner.ToolRunner(cmd, [
"version",
"--format=json",
]).exec();
printVersion: async function () {
await exec.exec(cmd, [
'version',
'--format=json'
]);
},
async getTracerEnv(databasePath) {
// Write tracer-env.js to a temp location.
const tracerEnvJs = path.resolve(databasePath, "working", "tracer-env.js");
fs.mkdirSync(path.dirname(tracerEnvJs), { recursive: true });
fs.writeFileSync(tracerEnvJs, `
const fs = require('fs');
const env = {};
for (let entry of Object.entries(process.env)) {
const key = entry[0];
const value = entry[1];
if (typeof value !== 'undefined' && key !== '_' && !key.startsWith('JAVA_MAIN_CLASS_')) {
env[key] = value;
}
}
process.stdout.write(process.argv[2]);
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');`);
const envFile = path.resolve(databasePath, "working", "env.tmp");
await new toolrunnner.ToolRunner(cmd, [
"database",
"trace-command",
getTracerEnv: async function (databasePath, compilerSpec) {
let envFile = path.resolve(databasePath, 'working', 'env.tmp');
const compilerSpecArg = compilerSpec ? ["--compiler-spec=" + compilerSpec] : [];
await exec.exec(cmd, [
'database',
'trace-command',
databasePath,
...getExtraOptionsFromEnv(["database", "trace-command"]),
...compilerSpecArg,
process.execPath,
tracerEnvJs,
envFile,
]).exec();
return JSON.parse(fs.readFileSync(envFile, "utf-8"));
path.resolve(__dirname, 'tracer-env.js'),
envFile
]);
return JSON.parse(fs.readFileSync(envFile, 'utf-8'));
},
async databaseInit(databasePath, language, sourceRoot) {
await new toolrunnner.ToolRunner(cmd, [
"database",
"init",
databaseInit: async function (databasePath, language, sourceRoot) {
await exec.exec(cmd, [
'database',
'init',
databasePath,
`--language=${language}`,
`--source-root=${sourceRoot}`,
...getExtraOptionsFromEnv(["database", "init"]),
]).exec();
'--language=' + language,
'--source-root=' + sourceRoot,
]);
},
async runAutobuild(language) {
const cmdName = process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh";
const autobuildCmd = path.join(path.dirname(cmd), language, "tools", cmdName);
runAutobuild: async function (language) {
const cmdName = process.platform === 'win32' ? 'autobuild.cmd' : 'autobuild.sh';
const autobuildCmd = path.join(path.dirname(cmd), language, 'tools', cmdName);
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
// This is because of an issue with Azure pipelines timing out connections after 4 minutes
// and Maven not properly handling closed connections
// Otherwise long build processes will timeout when pulling down Java packages
// https://developercommunity.visualstudio.com/content/problem/292284/maven-hosted-agent-connection-timeout.html
const javaToolOptions = process.env["JAVA_TOOL_OPTIONS"] || "";
process.env["JAVA_TOOL_OPTIONS"] = [
...javaToolOptions.split(/\s+/),
"-Dhttp.keepAlive=false",
"-Dmaven.wagon.http.pool=false",
].join(" ");
await new toolrunnner.ToolRunner(autobuildCmd).exec();
let javaToolOptions = process.env['JAVA_TOOL_OPTIONS'] || "";
process.env['JAVA_TOOL_OPTIONS'] = [...javaToolOptions.split(/\s+/), '-Dhttp.keepAlive=false', '-Dmaven.wagon.http.pool=false'].join(' ');
await exec.exec(autobuildCmd);
},
async extractScannedLanguage(databasePath, language) {
extractScannedLanguage: async function (databasePath, language) {
// Get extractor location
let extractorPath = "";
await new toolrunnner.ToolRunner(cmd, [
"resolve",
"extractor",
"--format=json",
`--language=${language}`,
...getExtraOptionsFromEnv(["resolve", "extractor"]),
let extractorPath = '';
await exec.exec(cmd, [
'resolve',
'extractor',
'--format=json',
'--language=' + language
], {
silent: true,
listeners: {
stdout: (data) => {
extractorPath += data.toString();
},
stderr: (data) => {
process.stderr.write(data);
},
},
}).exec();
stdout: (data) => { extractorPath += data.toString(); },
stderr: (data) => { process.stderr.write(data); }
}
});
// Set trace command
const ext = process.platform === "win32" ? ".cmd" : ".sh";
const traceCommand = path.resolve(JSON.parse(extractorPath), "tools", `autobuild${ext}`);
const ext = process.platform === 'win32' ? '.cmd' : '.sh';
const traceCommand = path.resolve(JSON.parse(extractorPath), 'tools', 'autobuild' + ext);
// Run trace command
await toolrunner_error_catcher_1.toolrunnerErrorCatcher(cmd, [
"database",
"trace-command",
...getExtraOptionsFromEnv(["database", "trace-command"]),
await exec.exec(cmd, [
'database',
'trace-command',
databasePath,
"--",
traceCommand,
], error_matcher_1.errorMatchers);
'--',
traceCommand
]);
},
async finalizeDatabase(databasePath) {
await toolrunner_error_catcher_1.toolrunnerErrorCatcher(cmd, [
"database",
"finalize",
...getExtraOptionsFromEnv(["database", "finalize"]),
databasePath,
], error_matcher_1.errorMatchers);
finalizeDatabase: async function (databasePath) {
await exec.exec(cmd, [
'database',
'finalize',
databasePath
]);
},
async resolveQueries(queries, extraSearchPath) {
resolveQueries: async function (queries, extraSearchPath) {
const codeqlArgs = [
"resolve",
"queries",
'resolve',
'queries',
...queries,
"--format=bylanguage",
...getExtraOptionsFromEnv(["resolve", "queries"]),
'--format=bylanguage'
];
if (extraSearchPath !== undefined) {
codeqlArgs.push("--search-path", extraSearchPath);
codeqlArgs.push('--search-path', extraSearchPath);
}
let output = "";
await new toolrunnner.ToolRunner(cmd, codeqlArgs, {
let output = '';
await exec.exec(cmd, codeqlArgs, {
listeners: {
stdout: (data) => {
output += data.toString();
},
},
}).exec();
}
}
});
return JSON.parse(output);
},
async databaseAnalyze(databasePath, sarifFile, querySuite, memoryFlag, addSnippetsFlag, threadsFlag) {
await new toolrunnner.ToolRunner(cmd, [
"database",
"analyze",
memoryFlag,
threadsFlag,
databaseAnalyze: async function (databasePath, sarifFile, querySuite) {
await exec.exec(cmd, [
'database',
'analyze',
util.getMemoryFlag(),
util.getThreadsFlag(),
databasePath,
"--format=sarif-latest",
`--output=${sarifFile}`,
addSnippetsFlag,
...getExtraOptionsFromEnv(["database", "analyze"]),
querySuite,
]).exec();
},
'--format=sarif-latest',
'--output=' + sarifFile,
'--no-sarif-add-snippets',
querySuite
]);
}
};
}
/**
* Gets the options for `path` of `options` as an array of extra option strings.
*/
function getExtraOptionsFromEnv(path) {
const options = util.getExtraOptionsEnvParam();
return getExtraOptions(options, path, []);
}
/**
* Gets the options for `path` of `options` as an array of extra option strings.
*
* - the special terminal step name '*' in `options` matches all path steps
* - throws an exception if this conversion is impossible.
*/
function getExtraOptions(options, path, pathInfo) {
var _a, _b, _c;
/**
* Gets `options` as an array of extra option strings.
*
* - throws an exception mentioning `pathInfo` if this conversion is impossible.
*/
function asExtraOptions(options, pathInfo) {
if (options === undefined) {
return [];
}
if (!Array.isArray(options)) {
const msg = `The extra options for '${pathInfo.join(".")}' ('${JSON.stringify(options)}') are not in an array.`;
throw new Error(msg);
}
return options.map((o) => {
const t = typeof o;
if (t !== "string" && t !== "number" && t !== "boolean") {
const msg = `The extra option for '${pathInfo.join(".")}' ('${JSON.stringify(o)}') is not a primitive value.`;
throw new Error(msg);
}
return `${o}`;
});
}
const all = asExtraOptions((_a = options) === null || _a === void 0 ? void 0 : _a["*"], pathInfo.concat("*"));
const specific = path.length === 0
? asExtraOptions(options, pathInfo)
: getExtraOptions((_b = options) === null || _b === void 0 ? void 0 : _b[path[0]], (_c = path) === null || _c === void 0 ? void 0 : _c.slice(1), pathInfo.concat(path[0]));
return all.concat(specific);
}
exports.getExtraOptions = getExtraOptions;
//# sourceMappingURL=codeql.js.map

File diff suppressed because one or more lines are too long

55
lib/codeql.test.js generated
View File

@@ -15,38 +15,41 @@ const ava_1 = __importDefault(require("ava"));
const nock_1 = __importDefault(require("nock"));
const path = __importStar(require("path"));
const codeql = __importStar(require("./codeql"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
ava_1.default("download codeql bundle cache", async (t) => {
ava_1.default('download codeql bundle cache', async (t) => {
await util.withTmpDir(async (tmpDir) => {
const versions = ["20200601", "20200610"];
process.env['GITHUB_WORKSPACE'] = tmpDir;
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
const versions = ['20200601', '20200610'];
for (let i = 0; i < versions.length; i++) {
const version = versions[i];
nock_1.default("https://example.com")
nock_1.default('https://example.com')
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, "token", "https://github.example.com", tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
await codeql.setupCodeQL();
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
}
const cachedVersions = toolcache.findAllVersions("CodeQL");
const cachedVersions = toolcache.findAllVersions('CodeQL');
t.is(cachedVersions.length, 2);
});
});
ava_1.default("parse codeql bundle url version", (t) => {
ava_1.default('parse codeql bundle url version', t => {
const tests = {
"20200601": "0.0.0-20200601",
"20200601.0": "0.0.0-20200601.0",
"20200601.0.0": "20200601.0.0",
"1.2.3": "1.2.3",
"1.2.3-alpha": "1.2.3-alpha",
"1.2.3-beta.1": "1.2.3-beta.1",
'20200601': '0.0.0-20200601',
'20200601.0': '0.0.0-20200601.0',
'20200601.0.0': '20200601.0.0',
'1.2.3': '1.2.3',
'1.2.3-alpha': '1.2.3-alpha',
'1.2.3-beta.1': '1.2.3-beta.1',
};
for (const [version, expectedVersion] of Object.entries(tests)) {
const url = `https://github.com/.../codeql-bundle-${version}/...`;
try {
const parsedVersion = codeql.getCodeQLURLVersion(url, logging_1.getRunnerLogger(true));
const parsedVersion = codeql.getCodeQLURLVersion(url);
t.deepEqual(parsedVersion, expectedVersion);
}
catch (e) {
@@ -54,26 +57,4 @@ ava_1.default("parse codeql bundle url version", (t) => {
}
}
});
ava_1.default("getExtraOptions works for explicit paths", (t) => {
t.deepEqual(codeql.getExtraOptions({}, ["foo"], []), []);
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ["foo"], []), ["42"]);
t.deepEqual(codeql.getExtraOptions({ foo: { bar: [42] } }, ["foo", "bar"], []), ["42"]);
});
ava_1.default("getExtraOptions works for wildcards", (t) => {
t.deepEqual(codeql.getExtraOptions({ "*": [42] }, ["foo"], []), ["42"]);
});
ava_1.default("getExtraOptions works for wildcards and explicit paths", (t) => {
const o1 = { "*": [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o1, ["foo"], []), ["42", "87"]);
const o2 = { "*": [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o2, ["foo", "bar"], []), ["42"]);
const o3 = { "*": [42], foo: { "*": [87], bar: [99] } };
const p = ["foo", "bar"];
t.deepEqual(codeql.getExtraOptions(o3, p, []), ["42", "87", "99"]);
});
ava_1.default("getExtraOptions throws for bad content", (t) => {
t.throws(() => codeql.getExtraOptions({ "*": 42 }, ["foo"], []));
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ["foo"], []));
t.throws(() => codeql.getExtraOptions({ "*": [42], foo: { "*": 87, bar: [99] } }, ["foo", "bar"], []));
});
//# sourceMappingURL=codeql.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"codeql.test.js","sourceRoot":"","sources":["../src/codeql.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,iDAAmC;AACnC,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CACZ,GAAG,EACH,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAC9D,CAAC;YAEJ,MAAM,MAAM,CAAC,WAAW,CACtB,8CAA8C,OAAO,uBAAuB,EAC5E,OAAO,EACP,4BAA4B,EAC5B,MAAM,EACN,MAAM,EACN,QAAQ,EACR,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YAEF,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,MAAM,CAAC,mBAAmB,CAC9C,GAAG,EACH,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,0CAA0C,EAAE,CAAC,CAAC,EAAE,EAAE;IACrD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;IAEzD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;IAExE,CAAC,CAAC,SAAS,CACT,MAAM,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,EAClE,CAAC,IAAI,CAAC,CACP,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE,EAAE;IAChD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1E,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,wDAAwD,EAAE,CAAC,CAAC,EAAE,EAAE;IACnE,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;IACpC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;IAEnE,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;IACpC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;IAEpE,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;IACxD,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;IACzB,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;AACrE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,wCAAwC,EAAE,CAAC,CAAC,EAAE,EAAE;IACnD,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IAEjE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IAEjE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CACZ,MAAM,CAAC,eAAe,CACpB,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAC1C,CAAC,KAAK,EAAE,KAAK,CAAC,EACd,EAAE,CACH,CACF,CAAC;AACJ,CAAC,CAAC,CAAC"}
{"version":3,"file":"codeql.test.js","sourceRoot":"","sources":["../src/codeql.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,iDAAmC;AACnC,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,MAAM,CAAC,WAAW,EAAE,CAAC;YAE3B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,MAAM,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YACtD,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC"}

471
lib/config-utils.js generated
View File

@@ -7,19 +7,22 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const io = __importStar(require("@actions/io"));
const fs = __importStar(require("fs"));
const yaml = __importStar(require("js-yaml"));
const path = __importStar(require("path"));
const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
const externalQueries = __importStar(require("./external-queries"));
const languages_1 = require("./languages");
const util = __importStar(require("./util"));
// Property names from the user-supplied config file.
const NAME_PROPERTY = "name";
const DISABLE_DEFAULT_QUERIES_PROPERTY = "disable-default-queries";
const QUERIES_PROPERTY = "queries";
const QUERIES_USES_PROPERTY = "uses";
const PATHS_IGNORE_PROPERTY = "paths-ignore";
const PATHS_PROPERTY = "paths";
const NAME_PROPERTY = 'name';
const DISABLE_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
const QUERIES_PROPERTY = 'queries';
const QUERIES_USES_PROPERTY = 'uses';
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
const PATHS_PROPERTY = 'paths';
/**
* A list of queries from https://github.com/github/codeql that
* we don't want to run. Disabling them here is a quicker alternative to
@@ -30,13 +33,14 @@ const PATHS_PROPERTY = "paths";
* Format is a map from language to an array of path suffixes of .ql files.
*/
const DISABLED_BUILTIN_QUERIES = {
csharp: [
"ql/src/Security Features/CWE-937/VulnerablePackage.ql",
"ql/src/Security Features/CWE-451/MissingXFrameOptions.ql",
],
'csharp': [
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
]
};
function queryIsDisabled(language, query) {
return (DISABLED_BUILTIN_QUERIES[language] || []).some((disabledQuery) => query.endsWith(disabledQuery));
return (DISABLED_BUILTIN_QUERIES[language] || [])
.some(disabledQuery => query.endsWith(disabledQuery));
}
/**
* Asserts that the noDeclaredLanguage and multipleDeclaredLanguages fields are
@@ -46,73 +50,63 @@ function validateQueries(resolvedQueries) {
const noDeclaredLanguage = resolvedQueries.noDeclaredLanguage;
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
if (noDeclaredLanguageQueries.length !== 0) {
throw new Error(`${"The following queries do not declare a language. " +
"Their qlpack.yml files are either missing or is invalid.\n"}${noDeclaredLanguageQueries.join("\n")}`);
throw new Error('The following queries do not declare a language. ' +
'Their qlpack.yml files are either missing or is invalid.\n' +
noDeclaredLanguageQueries.join('\n'));
}
const multipleDeclaredLanguages = resolvedQueries.multipleDeclaredLanguages;
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
if (multipleDeclaredLanguagesQueries.length !== 0) {
throw new Error(`${"The following queries declare multiple languages. " +
"Their qlpack.yml files are either missing or is invalid.\n"}${multipleDeclaredLanguagesQueries.join("\n")}`);
throw new Error('The following queries declare multiple languages. ' +
'Their qlpack.yml files are either missing or is invalid.\n' +
multipleDeclaredLanguagesQueries.join('\n'));
}
}
/**
* Run 'codeql resolve queries' and add the results to resultMap
*
* If a checkout path is given then the queries are assumed to be custom queries
* and an error will be thrown if there is anything invalid about the queries.
* If a checkout path is not given then the queries are assumed to be builtin
* queries, and error checking will be suppressed.
*/
async function runResolveQueries(codeQL, resultMap, toResolve, extraSearchPath) {
const resolvedQueries = await codeQL.resolveQueries(toResolve, extraSearchPath);
if (extraSearchPath !== undefined) {
validateQueries(resolvedQueries);
}
for (const [language, queryPaths] of Object.entries(resolvedQueries.byLanguage)) {
async function runResolveQueries(resultMap, toResolve, extraSearchPath, errorOnInvalidQueries) {
const codeQl = codeql_1.getCodeQL();
const resolvedQueries = await codeQl.resolveQueries(toResolve, extraSearchPath);
for (const [language, queries] of Object.entries(resolvedQueries.byLanguage)) {
if (resultMap[language] === undefined) {
resultMap[language] = {
builtin: [],
custom: [],
};
}
const queries = Object.keys(queryPaths).filter((q) => !queryIsDisabled(language, q));
if (extraSearchPath !== undefined) {
resultMap[language].custom.push(...queries);
}
else {
resultMap[language].builtin.push(...queries);
resultMap[language] = [];
}
resultMap[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
}
if (errorOnInvalidQueries) {
validateQueries(resolvedQueries);
}
}
/**
* Get the set of queries included by default.
*/
async function addDefaultQueries(codeQL, languages, resultMap) {
const suites = languages.map((l) => `${l}-code-scanning.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined);
async function addDefaultQueries(languages, resultMap) {
const suites = languages.map(l => l + '-code-scanning.qls');
await runResolveQueries(resultMap, suites, undefined, false);
}
// The set of acceptable values for built-in suites from the codeql bundle
const builtinSuites = ["security-extended", "security-and-quality"];
const builtinSuites = ['security-extended', 'security-and-quality'];
/**
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
* Throws an error if suiteName is not a valid builtin suite.
*/
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, suiteName, configFile) {
async function addBuiltinSuiteQueries(configFile, languages, resultMap, suiteName) {
const suite = builtinSuites.find((suite) => suite === suiteName);
if (!suite) {
throw new Error(getQueryUsesInvalid(configFile, suiteName));
}
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined);
const suites = languages.map(l => l + '-' + suiteName + '.qls');
await runResolveQueries(resultMap, suites, undefined, false);
}
/**
* Retrieve the set of queries at localQueryPath and add them to resultMap.
*/
async function addLocalQueries(codeQL, resultMap, localQueryPath, checkoutPath, configFile) {
async function addLocalQueries(configFile, resultMap, localQueryPath) {
// Resolve the local path against the workspace so that when this is
// passed to codeql it resolves to exactly the path we expect it to resolve to.
let absoluteQueryPath = path.join(checkoutPath, localQueryPath);
const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
// Check the file exists
if (!fs.existsSync(absoluteQueryPath)) {
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
@@ -120,21 +114,23 @@ async function addLocalQueries(codeQL, resultMap, localQueryPath, checkoutPath,
// Call this after checking file exists, because it'll fail if file doesn't exist
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
// Check the local path doesn't jump outside the repo using '..' or symlinks
if (!(absoluteQueryPath + path.sep).startsWith(fs.realpathSync(checkoutPath) + path.sep)) {
if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
}
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], checkoutPath);
// Get the root of the current repo to use when resolving query dependencies
const rootOfRepo = util.getRequiredEnvParam('GITHUB_WORKSPACE');
await runResolveQueries(resultMap, [absoluteQueryPath], rootOfRepo, true);
}
/**
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
*/
async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile) {
let tok = queryUses.split("@");
async function addRemoteQueries(configFile, resultMap, queryUses) {
let tok = queryUses.split('@');
if (tok.length !== 2) {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
const ref = tok[1];
tok = tok[0].split("/");
tok = tok[0].split('/');
// The first token is the owner
// The second token is the repo
// The rest is a path, if there is more than one token combine them to form the full path
@@ -142,16 +138,16 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
// Check none of the parts of the repository name are empty
if (tok[0].trim() === "" || tok[1].trim() === "") {
if (tok[0].trim() === '' || tok[1].trim() === '') {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
const nwo = `${tok[0]}/${tok[1]}`;
const nwo = tok[0] + '/' + tok[1];
// Checkout the external repository
const checkoutPath = await externalQueries.checkoutExternalRepository(nwo, ref, githubUrl, tempDir, logger);
const rootOfRepo = await externalQueries.checkoutExternalRepository(nwo, ref);
const queryPath = tok.length > 2
? path.join(checkoutPath, tok.slice(2).join("/"))
: checkoutPath;
await runResolveQueries(codeQL, resultMap, [queryPath], checkoutPath);
? path.join(rootOfRepo, tok.slice(2).join('/'))
: rootOfRepo;
await runResolveQueries(resultMap, [queryPath], rootOfRepo, true);
}
/**
* Parse a query 'uses' field to a discrete set of query files and update resultMap.
@@ -161,23 +157,23 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl
* local paths starting with './', or references to remote repos, or
* a finite set of hardcoded terms for builtin suites.
*/
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, checkoutPath, githubUrl, logger, configFile) {
async function parseQueryUses(configFile, languages, resultMap, queryUses) {
queryUses = queryUses.trim();
if (queryUses === "") {
throw new Error(getQueryUsesInvalid(configFile));
}
// Check for the local path case before we start trying to parse the repository name
if (queryUses.startsWith("./")) {
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), checkoutPath, configFile);
await addLocalQueries(configFile, resultMap, queryUses.slice(2));
return;
}
// Check for one of the builtin suites
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
await addBuiltinSuiteQueries(languages, codeQL, resultMap, queryUses, configFile);
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
await addBuiltinSuiteQueries(configFile, languages, resultMap, queryUses);
return;
}
// Otherwise, must be a reference to another repo
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile);
await addRemoteQueries(configFile, resultMap, queryUses);
}
// Regex validating stars in paths or paths-ignore entries.
// The intention is to only allow ** to appear when immediately
@@ -188,140 +184,143 @@ const pathStarsRegex = /.*(?:\*\*[^/].*|\*\*$|[^/]\*\*.*)/;
const filterPatternCharactersRegex = /.*[\?\+\[\]!].*/;
// Checks that a paths of paths-ignore entry is valid, possibly modifying it
// to make it valid, or if not possible then throws an error.
function validateAndSanitisePath(originalPath, propertyName, configFile, logger) {
function validateAndSanitisePath(originalPath, propertyName, configFile) {
// Take a copy so we don't modify the original path, so we can still construct error messages
let path = originalPath;
// All paths are relative to the src root, so strip off leading slashes.
while (path.charAt(0) === "/") {
while (path.charAt(0) === '/') {
path = path.substring(1);
}
// Trailing ** are redundant, so strip them off
if (path.endsWith("/**")) {
if (path.endsWith('/**')) {
path = path.substring(0, path.length - 2);
}
// An empty path is not allowed as it's meaningless
if (path === "") {
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" is not an invalid path. ` +
`It is not necessary to include it, and it is not allowed to exclude it.`));
if (path === '') {
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" is not an invalid path. ' +
'It is not necessary to include it, and it is not allowed to exclude it.'));
}
// Check for illegal uses of **
if (path.match(pathStarsRegex)) {
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an invalid "**" wildcard. ` +
`They must be immediately preceeded and followed by a slash as in "/**/", or come at the start or end.`));
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an invalid "**" wildcard. ' +
'They must be immediately preceeded and followed by a slash as in "/**/", or come at the start or end.'));
}
// Check for other regex characters that we don't support.
// Output a warning so the user knows, but otherwise continue normally.
if (path.match(filterPatternCharactersRegex)) {
logger.warning(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an unsupported character. ` +
`The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.`));
core.warning(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an unsupported character. ' +
'The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.'));
}
// Ban any uses of backslash for now.
// This may not play nicely with project layouts.
// This restriction can be lifted later if we determine they are ok.
if (path.indexOf("\\") !== -1) {
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an "\\" character. These are not allowed in filters. ` +
`If running on windows we recommend using "/" instead for path filters.`));
if (path.indexOf('\\') !== -1) {
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an "\\" character. These are not allowed in filters. ' +
'If running on windows we recommend using "/" instead for path filters.'));
}
return path;
}
exports.validateAndSanitisePath = validateAndSanitisePath;
// An undefined configFile in some of these functions indicates that
// the property was in a workflow file, not a config file
function getNameInvalid(configFile) {
return getConfigFilePropertyError(configFile, NAME_PROPERTY, "must be a non-empty string");
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
}
exports.getNameInvalid = getNameInvalid;
function getDisableDefaultQueriesInvalid(configFile) {
return getConfigFilePropertyError(configFile, DISABLE_DEFAULT_QUERIES_PROPERTY, "must be a boolean");
return getConfigFilePropertyError(configFile, DISABLE_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
}
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
function getQueriesInvalid(configFile) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, "must be an array");
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
}
exports.getQueriesInvalid = getQueriesInvalid;
function getQueryUsesInvalid(configFile, queryUses) {
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `must be a built-in suite (${builtinSuites.join(" or ")}), a relative path, or be of the form "owner/repo[/path]@ref"${queryUses !== undefined ? `\n Found: ${queryUses}` : ""}`);
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'must be a built-in suite (' + builtinSuites.join(' or ') +
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
}
exports.getQueryUsesInvalid = getQueryUsesInvalid;
function getPathsIgnoreInvalid(configFile) {
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, "must be an array of non-empty strings");
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
}
exports.getPathsIgnoreInvalid = getPathsIgnoreInvalid;
function getPathsInvalid(configFile) {
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, "must be an array of non-empty strings");
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
}
exports.getPathsInvalid = getPathsInvalid;
function getLocalPathOutsideOfRepository(configFile, localPath) {
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `is invalid as the local path "${localPath}" is outside of the repository`);
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" is outside of the repository');
}
exports.getLocalPathOutsideOfRepository = getLocalPathOutsideOfRepository;
function getLocalPathDoesNotExist(configFile, localPath) {
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `is invalid as the local path "${localPath}" does not exist in the repository`);
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" does not exist in the repository');
}
exports.getLocalPathDoesNotExist = getLocalPathDoesNotExist;
function getConfigFileOutsideWorkspaceErrorMessage(configFile) {
return `The configuration file "${configFile}" is outside of the workspace`;
return 'The configuration file "' + configFile + '" is outside of the workspace';
}
exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage;
function getConfigFileDoesNotExistErrorMessage(configFile) {
return `The configuration file "${configFile}" does not exist`;
return 'The configuration file "' + configFile + '" does not exist';
}
exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage;
function getConfigFileRepoFormatInvalidMessage(configFile) {
let error = `The configuration file "${configFile}" is not a supported remote file reference.`;
error += " Expected format <owner>/<repository>/<file-path>@<ref>";
let error = 'The configuration file "' + configFile + '" is not a supported remote file reference.';
error += ' Expected format <owner>/<repository>/<file-path>@<ref>';
return error;
}
exports.getConfigFileRepoFormatInvalidMessage = getConfigFileRepoFormatInvalidMessage;
function getConfigFileFormatInvalidMessage(configFile) {
return `The configuration file "${configFile}" could not be read`;
return 'The configuration file "' + configFile + '" could not be read';
}
exports.getConfigFileFormatInvalidMessage = getConfigFileFormatInvalidMessage;
function getConfigFileDirectoryGivenMessage(configFile) {
return `The configuration file "${configFile}" looks like a directory, not a file`;
return 'The configuration file "' + configFile + '" looks like a directory, not a file';
}
exports.getConfigFileDirectoryGivenMessage = getConfigFileDirectoryGivenMessage;
function getConfigFilePropertyError(configFile, property, error) {
if (configFile === undefined) {
return `The workflow property "${property}" is invalid: ${error}`;
}
else {
return `The configuration file "${configFile}" is invalid: property "${property}" ${error}`;
}
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
}
function getNoLanguagesError() {
return ("Did not detect any languages to analyze. " +
"Please update input in workflow or check that GitHub detects the correct languages in your repository.");
}
exports.getNoLanguagesError = getNoLanguagesError;
function getUnknownLanguagesError(languages) {
return `Did not recognise the following languages: ${languages.join(", ")}`;
}
exports.getUnknownLanguagesError = getUnknownLanguagesError;
/**
* Gets the set of languages in the current repository
*/
async function getLanguagesInRepo(repository, githubAuth, githubUrl, logger) {
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
const response = await api
.getApiClient(githubAuth, githubUrl, true)
.repos.listLanguages({
owner: repository.owner,
repo: repository.repo,
});
logger.debug(`Languages API response: ${JSON.stringify(response)}`);
// The GitHub API is going to return languages in order of popularity,
// When we pick a language to autobuild we want to pick the most popular traced language
// Since sets in javascript maintain insertion order, using a set here and then splatting it
// into an array gives us an array of languages ordered by popularity
const languages = new Set();
for (const lang of Object.keys(response.data)) {
const parsedLang = languages_1.parseLanguage(lang);
if (parsedLang !== undefined) {
languages.add(parsedLang);
async function getLanguagesInRepo() {
var _a;
// Translate between GitHub's API names for languages and ours
const codeqlLanguages = {
'C': 'cpp',
'C++': 'cpp',
'C#': 'csharp',
'Go': 'go',
'Java': 'java',
'JavaScript': 'javascript',
'TypeScript': 'javascript',
'Python': 'python',
};
let repo_nwo = (_a = process.env['GITHUB_REPOSITORY']) === null || _a === void 0 ? void 0 : _a.split("/");
if (repo_nwo) {
let owner = repo_nwo[0];
let repo = repo_nwo[1];
core.debug(`GitHub repo ${owner} ${repo}`);
const response = await api.getApiClient().request("GET /repos/:owner/:repo/languages", ({
owner,
repo
}));
core.debug("Languages API response: " + JSON.stringify(response));
// The GitHub API is going to return languages in order of popularity,
// When we pick a language to autobuild we want to pick the most popular traced language
// Since sets in javascript maintain insertion order, using a set here and then splatting it
// into an array gives us an array of languages ordered by popularity
let languages = new Set();
for (let lang in response.data) {
if (lang in codeqlLanguages) {
languages.add(codeqlLanguages[lang]);
}
}
return [...languages];
}
else {
return [];
}
return [...languages];
}
/**
* Get the languages to analyse.
@@ -329,96 +328,49 @@ async function getLanguagesInRepo(repository, githubAuth, githubUrl, logger) {
* The result is obtained from the action input parameter 'languages' if that
* has been set, otherwise it is deduced as all languages in the repo that
* can be analysed.
*
* If no languages could be detected from either the workflow or the repository
* then throw an error.
*/
async function getLanguages(languagesInput, repository, githubAuth, githubUrl, logger) {
async function getLanguages() {
// Obtain from action input 'languages' if set
let languages = (languagesInput || "")
.split(",")
.map((x) => x.trim())
.filter((x) => x.length > 0);
logger.info(`Languages from configuration: ${JSON.stringify(languages)}`);
let languages = core.getInput('languages', { required: false })
.split(',')
.map(x => x.trim())
.filter(x => x.length > 0);
core.info("Languages from configuration: " + JSON.stringify(languages));
if (languages.length === 0) {
// Obtain languages as all languages in the repo that can be analysed
languages = await getLanguagesInRepo(repository, githubAuth, githubUrl, logger);
logger.info(`Automatically detected languages: ${JSON.stringify(languages)}`);
languages = await getLanguagesInRepo();
core.info("Automatically detected languages: " + JSON.stringify(languages));
}
// If the languages parameter was not given and no languages were
// detected then fail here as this is a workflow configuration error.
if (languages.length === 0) {
throw new Error(getNoLanguagesError());
}
// Make sure they are supported
const parsedLanguages = [];
const unknownLanguages = [];
for (const language of languages) {
const parsedLanguage = languages_1.parseLanguage(language);
if (parsedLanguage === undefined) {
unknownLanguages.push(language);
}
else if (parsedLanguages.indexOf(parsedLanguage) === -1) {
parsedLanguages.push(parsedLanguage);
}
}
if (unknownLanguages.length > 0) {
throw new Error(getUnknownLanguagesError(unknownLanguages));
}
return parsedLanguages;
}
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, checkoutPath, githubUrl, logger) {
queriesInput = queriesInput.trim();
// "+" means "don't override config file" - see shouldAddConfigFileQueries
queriesInput = queriesInput.replace(/^\+/, "");
for (const query of queriesInput.split(",")) {
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, githubUrl, logger);
}
}
// Returns true if either no queries were provided in the workflow.
// or if the queries in the workflow were provided in "additive" mode,
// indicating that they shouldn't override the config queries but
// should instead be added in addition
function shouldAddConfigFileQueries(queriesInput) {
if (queriesInput) {
return queriesInput.trimStart().substr(0, 1) === "+";
}
return true;
return languages;
}
/**
* Get the default config for when the user has not supplied one.
*/
async function getDefaultConfig(languagesInput, queriesInput, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) {
const languages = await getLanguages(languagesInput, repository, githubAuth, githubUrl, logger);
async function getDefaultConfig() {
const languages = await getLanguages();
const queries = {};
await addDefaultQueries(codeQL, languages, queries);
if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, githubUrl, logger);
}
await addDefaultQueries(languages, queries);
return {
languages,
queries,
languages: languages,
queries: queries,
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(),
paths: []
};
}
exports.getDefaultConfig = getDefaultConfig;
/**
* Load the config from the given file.
*/
async function loadConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) {
async function loadConfig(configFile) {
let parsedYAML;
if (isLocal(configFile)) {
// Treat the config file as relative to the workspace
configFile = path.resolve(checkoutPath, configFile);
parsedYAML = getLocalConfig(configFile, checkoutPath);
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
configFile = path.resolve(workspacePath, configFile);
parsedYAML = getLocalConfig(configFile, workspacePath);
}
else {
parsedYAML = await getRemoteConfig(configFile, githubAuth, githubUrl);
parsedYAML = await getRemoteConfig(configFile);
}
// Validate that the 'name' property is syntactically correct,
// even though we don't use the value yet.
@@ -430,7 +382,12 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
throw new Error(getNameInvalid(configFile));
}
}
const languages = await getLanguages(languagesInput, repository, githubAuth, githubUrl, logger);
const languages = await getLanguages();
// If the languages parameter was not given and no languages were
// detected then fail here as this is a workflow configuration error.
if (languages.length === 0) {
throw new Error("Did not detect any languages to analyze. Please update input in workflow.");
}
const queries = {};
const pathsIgnore = [];
const paths = [];
@@ -442,70 +399,42 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
disableDefaultQueries = parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY];
}
if (!disableDefaultQueries) {
await addDefaultQueries(codeQL, languages, queries);
await addDefaultQueries(languages, queries);
}
// If queries were provided using `with` in the action configuration,
// they should take precedence over the queries in the config file
// unless they're prefixed with "+", in which case they supplement those
// in the config file.
if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, githubUrl, logger);
}
if (shouldAddConfigFileQueries(queriesInput) &&
QUERIES_PROPERTY in parsedYAML) {
if (QUERIES_PROPERTY in parsedYAML) {
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
throw new Error(getQueriesInvalid(configFile));
}
for (const query of parsedYAML[QUERIES_PROPERTY]) {
if (!(QUERIES_USES_PROPERTY in query) ||
typeof query[QUERIES_USES_PROPERTY] !== "string") {
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueryUsesInvalid(configFile));
}
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, checkoutPath, githubUrl, logger, configFile);
await parseQueryUses(configFile, languages, queries, query[QUERIES_USES_PROPERTY]);
}
}
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
throw new Error(getPathsIgnoreInvalid(configFile));
}
for (const path of parsedYAML[PATHS_IGNORE_PROPERTY]) {
if (typeof path !== "string" || path === "") {
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
if (typeof path !== "string" || path === '') {
throw new Error(getPathsIgnoreInvalid(configFile));
}
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile, logger));
}
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile));
});
}
if (PATHS_PROPERTY in parsedYAML) {
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
throw new Error(getPathsInvalid(configFile));
}
for (const path of parsedYAML[PATHS_PROPERTY]) {
if (typeof path !== "string" || path === "") {
parsedYAML[PATHS_PROPERTY].forEach(path => {
if (typeof path !== "string" || path === '') {
throw new Error(getPathsInvalid(configFile));
}
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile, logger));
}
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile));
});
}
// The list of queries should not be empty for any language. If it is then
// it is a user configuration error.
for (const language of languages) {
if (queries[language] === undefined ||
(queries[language].builtin.length === 0 &&
queries[language].custom.length === 0)) {
throw new Error(`Did not detect any queries to run for ${language}. ` +
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
}
}
return {
languages,
queries,
pathsIgnore,
paths,
originalUserInput: parsedYAML,
tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(),
};
return { languages, queries, pathsIgnore, paths };
}
/**
* Load and return the config.
@@ -513,18 +442,19 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
* This will parse the config from the user input if present, or generate
* a default config. The parsed config is then stored to a known location.
*/
async function initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) {
async function initConfig() {
const configFile = core.getInput('config-file');
let config;
// If no config file was provided create an empty one
if (!configFile) {
logger.debug("No configuration file was provided");
config = await getDefaultConfig(languagesInput, queriesInput, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger);
if (configFile === '') {
core.debug('No configuration file was provided');
config = await getDefaultConfig();
}
else {
config = await loadConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger);
config = await loadConfig(configFile);
}
// Save the config so we can easily access it again in the future
await saveConfig(config, logger);
await saveConfig(config);
return config;
}
exports.initConfig = initConfig;
@@ -533,30 +463,28 @@ function isLocal(configPath) {
if (configPath.indexOf("./") === 0) {
return true;
}
return configPath.indexOf("@") === -1;
return (configPath.indexOf("@") === -1);
}
function getLocalConfig(configFile, checkoutPath) {
function getLocalConfig(configFile, workspacePath) {
// Error if the config file is now outside of the workspace
if (!(configFile + path.sep).startsWith(checkoutPath + path.sep)) {
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
}
// Error if the file does not exist
if (!fs.existsSync(configFile)) {
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
}
return yaml.safeLoad(fs.readFileSync(configFile, "utf8"));
return yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
}
async function getRemoteConfig(configFile, githubAuth, githubUrl) {
async function getRemoteConfig(configFile) {
// retrieve the various parts of the config location, and ensure they're present
const format = new RegExp("(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)");
const format = new RegExp('(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)');
const pieces = format.exec(configFile);
// 5 = 4 groups + the whole expression
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
}
const response = await api
.getApiClient(githubAuth, githubUrl, true)
.repos.getContent({
const response = await api.getApiClient().repos.getContents({
owner: pieces.groups.owner,
repo: pieces.groups.repo,
path: pieces.groups.path,
@@ -572,38 +500,47 @@ async function getRemoteConfig(configFile, githubAuth, githubUrl) {
else {
throw new Error(getConfigFileFormatInvalidMessage(configFile));
}
return yaml.safeLoad(Buffer.from(fileContents, "base64").toString("binary"));
return yaml.safeLoad(Buffer.from(fileContents, 'base64').toString('binary'));
}
/**
* Get the directory where the parsed config will be stored.
*/
function getPathToParsedConfigFolder() {
return util.getRequiredEnvParam('RUNNER_TEMP');
}
/**
* Get the file path where the parsed config will be stored.
*/
function getPathToParsedConfigFile(tempDir) {
return path.join(tempDir, "config");
function getPathToParsedConfigFile() {
return path.join(getPathToParsedConfigFolder(), 'config');
}
exports.getPathToParsedConfigFile = getPathToParsedConfigFile;
/**
* Store the given config to the path returned from getPathToParsedConfigFile.
*/
async function saveConfig(config, logger) {
async function saveConfig(config) {
const configString = JSON.stringify(config);
const configFile = getPathToParsedConfigFile(config.tempDir);
fs.mkdirSync(path.dirname(configFile), { recursive: true });
fs.writeFileSync(configFile, configString, "utf8");
logger.debug("Saved config:");
logger.debug(configString);
await io.mkdirP(getPathToParsedConfigFolder());
fs.writeFileSync(getPathToParsedConfigFile(), configString, 'utf8');
core.debug('Saved config:');
core.debug(configString);
}
/**
* Get the config that has been saved to the given temp dir.
* If the config could not be found then returns undefined.
* Get the config.
*
* If this is the first time in a workflow that this is being called then
* this will parse the config from the user input. The parsed config is then
* stored to a known location. On the second and further calls, this will
* return the contents of the parsed config from the known location.
*/
async function getConfig(tempDir, logger) {
const configFile = getPathToParsedConfigFile(tempDir);
async function getConfig() {
const configFile = getPathToParsedConfigFile();
if (!fs.existsSync(configFile)) {
return undefined;
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
}
const configString = fs.readFileSync(configFile, "utf8");
logger.debug("Loaded config:");
logger.debug(configString);
const configString = fs.readFileSync(configFile, 'utf8');
core.debug('Loaded config:');
core.debug(configString);
return JSON.parse(configString);
}
exports.getConfig = getConfig;

File diff suppressed because one or more lines are too long

544
lib/config-utils.test.js generated
View File

@@ -16,49 +16,40 @@ const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const sinon_1 = __importDefault(require("sinon"));
const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
const CodeQL = __importStar(require("./codeql"));
const configUtils = __importStar(require("./config-utils"));
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
// Returns the filepath of the newly-created file
function createConfigFile(inputFileContents, tmpDir) {
const configFilePath = path.join(tmpDir, "input");
fs.writeFileSync(configFilePath, inputFileContents, "utf8");
return configFilePath;
function setInput(name, value) {
// Transformation copied from
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
if (value !== undefined) {
process.env[envVar] = value;
}
else {
delete process.env[envVar];
}
}
function mockGetContents(content) {
// Passing an auth token is required, so we just use a dummy value
const client = github.getOctokit("123");
let client = new github.GitHub('123');
const response = {
data: content,
data: content
};
const spyGetContents = sinon_1.default
.stub(client.repos, "getContent")
.resolves(response);
const spyGetContents = sinon_1.default.stub(client.repos, "getContents").resolves(response);
sinon_1.default.stub(api, "getApiClient").value(() => client);
return spyGetContents;
}
function mockListLanguages(languages) {
// Passing an auth token is required, so we just use a dummy value
const client = github.getOctokit("123");
const response = {
data: {},
};
for (const language of languages) {
response.data[language] = 123;
}
sinon_1.default.stub(client.repos, "listLanguages").resolves(response);
sinon_1.default.stub(api, "getApiClient").value(() => client);
}
ava_1.default("load empty config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const logger = logging_1.getRunnerLogger(true);
const languages = "javascript,python";
const codeQL = codeql_1.setCodeQL({
async resolveQueries() {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', undefined);
setInput('languages', 'javascript,python');
CodeQL.setCodeQL({
resolveQueries: async function () {
return {
byLanguage: {},
noDeclaredLanguage: {},
@@ -66,15 +57,18 @@ ava_1.default("load empty config", async (t) => {
};
},
});
const config = await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logger);
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logger));
const config = await configUtils.initConfig();
t.deepEqual(config, await configUtils.getDefaultConfig());
});
});
ava_1.default("loading config saves config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const logger = logging_1.getRunnerLogger(true);
const codeQL = codeql_1.setCodeQL({
async resolveQueries() {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', undefined);
setInput('languages', 'javascript,python');
CodeQL.setCodeQL({
resolveQueries: async function () {
return {
byLanguage: {},
noDeclaredLanguage: {},
@@ -83,64 +77,73 @@ ava_1.default("loading config saves config", async (t) => {
},
});
// Sanity check the saved config file does not already exist
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// Sanity check that getConfig returns undefined before we have called initConfig
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logger);
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile()));
// Sanity check that getConfig throws before we have called initConfig
await t.throwsAsync(configUtils.getConfig);
const config1 = await configUtils.initConfig();
// The saved config file should now exist
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile()));
// And that same newly-initialised config should now be returned by getConfig
const config2 = await configUtils.getConfig(tmpDir, logger);
const config2 = await configUtils.getConfig();
t.deepEqual(config1, config2);
});
});
ava_1.default("load input outside of workspace", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', '../input');
try {
await configUtils.initConfig(undefined, undefined, "../input", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
await configUtils.initConfig();
throw new Error('initConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, "../input"))));
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
}
});
});
ava_1.default("load non-local input with invalid repo syntax", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// no filename given, just a repo
const configFile = "octo-org/codeql-config@main";
setInput('config-file', 'octo-org/codeql-config@main');
try {
await configUtils.initConfig(undefined, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
await configUtils.initConfig();
throw new Error('initConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage("octo-org/codeql-config@main")));
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage('octo-org/codeql-config@main')));
}
});
});
ava_1.default("load non-existent input", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const languages = "javascript";
const configFile = "input";
t.false(fs.existsSync(path.join(tmpDir, configFile)));
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
t.false(fs.existsSync(path.join(tmpDir, 'input')));
setInput('config-file', 'input');
setInput('languages', 'javascript');
try {
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
await configUtils.initConfig();
throw new Error('initConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, "input"))));
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
}
});
});
ava_1.default("load non-empty input", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const codeQL = codeql_1.setCodeQL({
async resolveQueries() {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
CodeQL.setCodeQL({
resolveQueries: async function () {
return {
byLanguage: {
javascript: {
"/foo/a.ql": {},
"/bar/b.ql": {},
'javascript': {
'/foo/a.ql': {},
'/bar/b.ql': {},
},
},
noDeclaredLanguage: {},
@@ -159,52 +162,38 @@ ava_1.default("load non-empty input", async (t) => {
- b
paths:
- c/d`;
fs.mkdirSync(path.join(tmpDir, "foo"));
fs.mkdirSync(path.join(tmpDir, 'foo'));
// And the config we expect it to parse to
const expectedConfig = {
languages: [languages_1.Language.javascript],
queries: {
javascript: {
builtin: [],
custom: ["/foo/a.ql", "/bar/b.ql"],
},
},
pathsIgnore: ["a", "b"],
paths: ["c/d"],
originalUserInput: {
name: "my config",
"disable-default-queries": true,
queries: [{ uses: "./foo" }],
"paths-ignore": ["a", "b"],
paths: ["c/d"],
},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: codeQL.getPath(),
languages: ['javascript'],
queries: { 'javascript': ['/foo/a.ql', '/bar/b.ql'] },
pathsIgnore: ['a', 'b'],
paths: ['c/d'],
};
const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir);
const actualConfig = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
setInput('languages', 'javascript');
const actualConfig = await configUtils.initConfig();
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
});
ava_1.default("Default queries are used", async (t) => {
ava_1.default("default queries are used", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// Check that the default behaviour is to add the default queries.
// In this case if a config file is specified but does not include
// the disable-default-queries field.
// We determine this by whether CodeQL.resolveQueries is called
// with the correct arguments.
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
CodeQL.setCodeQL({
resolveQueries: async function (queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return {
byLanguage: {
javascript: {
"foo.ql": {},
},
'javascript': {},
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
@@ -217,239 +206,25 @@ ava_1.default("Default queries are used", async (t) => {
const inputFileContents = `
paths:
- foo`;
fs.mkdirSync(path.join(tmpDir, "foo"));
const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir);
await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
fs.mkdirSync(path.join(tmpDir, 'foo'));
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
setInput('languages', 'javascript');
await configUtils.initConfig();
// Check resolve queries was called correctly
t.deepEqual(resolveQueriesArgs.length, 1);
t.deepEqual(resolveQueriesArgs[0].queries, [
"javascript-code-scanning.qls",
]);
t.deepEqual(resolveQueriesArgs[0].queries, ['javascript-code-scanning.qls']);
t.deepEqual(resolveQueriesArgs[0].extraSearchPath, undefined);
});
});
/**
* Returns the provided queries, just in the right format for a resolved query
* This way we can test by seeing which returned items are in the final
* configuration.
*/
function queriesToResolvedQueryForm(queries) {
const dummyResolvedQueries = {};
for (const q of queries) {
dummyResolvedQueries[q] = {};
}
return {
byLanguage: {
javascript: dummyResolvedQueries,
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
}
ava_1.default("Queries can be specified in config file", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const inputFileContents = `
name: my config
queries:
- uses: ./foo`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
fs.mkdirSync(path.join(tmpDir, "foo"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries
// and once for `./foo` from the config file.
t.deepEqual(resolveQueriesArgs.length, 2);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/foo$/);
// Now check that the end result contains the default queries and the query from config
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 1);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/foo$/);
});
});
ava_1.default("Queries from config file can be overridden in workflow file", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const inputFileContents = `
name: my config
queries:
- uses: ./foo`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
// This config item should take precedence over the config file but shouldn't affect the default queries.
const queries = "./override";
fs.mkdirSync(path.join(tmpDir, "foo"));
fs.mkdirSync(path.join(tmpDir, "override"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, queries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries and once for `./override`,
// but won't be called for './foo' from the config file.
t.deepEqual(resolveQueriesArgs.length, 2);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override$/);
// Now check that the end result contains only the default queries and the override query
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 1);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/override$/);
});
});
ava_1.default("Queries in workflow file can be used in tandem with the 'disable default queries' option", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
process.env["GITHUB_WORKSPACE"] = tmpDir;
const inputFileContents = `
name: my config
disable-default-queries: true`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
const queries = "./workflow-query";
fs.mkdirSync(path.join(tmpDir, "workflow-query"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, queries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for `./workflow-query`,
// but won't be called for the default one since that was disabled
t.deepEqual(resolveQueriesArgs.length, 1);
t.deepEqual(resolveQueriesArgs[0].queries.length, 1);
t.regex(resolveQueriesArgs[0].queries[0], /.*\/workflow-query$/);
// Now check that the end result contains only the workflow query, and not the default one
t.deepEqual(config.queries["javascript"].builtin.length, 0);
t.deepEqual(config.queries["javascript"].custom.length, 1);
t.regex(config.queries["javascript"].custom[0], /.*\/workflow-query$/);
});
});
ava_1.default("Multiple queries can be specified in workflow file, no config file required", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
fs.mkdirSync(path.join(tmpDir, "override1"));
fs.mkdirSync(path.join(tmpDir, "override2"));
const queries = "./override1,./override2";
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, queries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly:
// It'll be called once for the default queries,
// and then once for each of the two queries from the workflow
t.deepEqual(resolveQueriesArgs.length, 3);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.deepEqual(resolveQueriesArgs[2].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override1$/);
t.regex(resolveQueriesArgs[2].queries[0], /.*\/override2$/);
// Now check that the end result contains both the queries from the workflow, as well as the defaults
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 2);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/override1$/);
t.regex(config.queries["javascript"].custom[1], /.*\/override2$/);
});
});
ava_1.default("Queries in workflow file can be added to the set of queries without overriding config file", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
process.env["GITHUB_WORKSPACE"] = tmpDir;
const inputFileContents = `
name: my config
queries:
- uses: ./foo`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
// These queries shouldn't override anything, because the value is prefixed with "+"
const queries = "+./additional1,./additional2";
fs.mkdirSync(path.join(tmpDir, "foo"));
fs.mkdirSync(path.join(tmpDir, "additional1"));
fs.mkdirSync(path.join(tmpDir, "additional2"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, queries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries,
// once for each of additional1 and additional2,
// and once for './foo' from the config file
t.deepEqual(resolveQueriesArgs.length, 4);
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/additional1$/);
t.deepEqual(resolveQueriesArgs[2].queries.length, 1);
t.regex(resolveQueriesArgs[2].queries[0], /.*\/additional2$/);
t.deepEqual(resolveQueriesArgs[3].queries.length, 1);
t.regex(resolveQueriesArgs[3].queries[0], /.*\/foo$/);
// Now check that the end result contains all the queries
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 3);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/additional1$/);
t.regex(config.queries["javascript"].custom[1], /.*\/additional2$/);
t.regex(config.queries["javascript"].custom[2], /.*\/foo$/);
});
});
ava_1.default("Invalid queries in workflow file handled correctly", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const queries = "foo/bar@v1@v3";
const languages = "javascript";
// This function just needs to be type-correct; it doesn't need to do anything,
// since we're deliberately passing in invalid data
const codeQL = codeql_1.setCodeQL({
async resolveQueries(_queries, _extraSearchPath) {
return {
byLanguage: {
javascript: {},
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
},
});
try {
await configUtils.initConfig(languages, queries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
t.fail("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getQueryUsesInvalid(undefined, "foo/bar@v1@v3")));
}
});
});
ava_1.default("API client used when reading remote config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const codeQL = codeql_1.setCodeQL({
async resolveQueries() {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
CodeQL.setCodeQL({
resolveQueries: async function () {
return {
byLanguage: {
javascript: {
"foo.ql": {},
},
},
byLanguage: {},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
@@ -472,21 +247,24 @@ ava_1.default("API client used when reading remote config", async (t) => {
};
const spyGetContents = mockGetContents(dummyResponse);
// Create checkout directory for remote queries repository
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
const configFile = "octo-org/codeql-config/config.yaml@main";
const languages = "javascript";
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
fs.mkdirSync(path.join(tmpDir, 'foo/bar'), { recursive: true });
setInput('config-file', 'octo-org/codeql-config/config.yaml@main');
setInput('languages', 'javascript');
await configUtils.initConfig();
t.assert(spyGetContents.called);
});
});
ava_1.default("Remote config handles the case where a directory is provided", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
const dummyResponse = []; // directories are returned as arrays
mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main";
const repoReference = 'octo-org/codeql-config/config.yaml@main';
setInput('config-file', repoReference);
try {
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
await configUtils.initConfig();
throw new Error('initConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileDirectoryGivenMessage(repoReference)));
@@ -495,49 +273,30 @@ ava_1.default("Remote config handles the case where a directory is provided", as
});
ava_1.default("Invalid format of remote config handled correctly", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
const dummyResponse = {
// note no "content" property here
};
mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main";
const repoReference = 'octo-org/codeql-config/config.yaml@main';
setInput('config-file', repoReference);
try {
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
await configUtils.initConfig();
throw new Error('initConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference)));
}
});
});
ava_1.default("No detected languages", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
mockListLanguages([]);
try {
await configUtils.initConfig(undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getNoLanguagesError()));
}
});
});
ava_1.default("Unknown languages", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const languages = "ruby,english";
try {
await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getUnknownLanguagesError(["ruby", "english"])));
}
});
});
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
ava_1.default(`load invalid input - ${testName}`, async (t) => {
ava_1.default("load invalid input - " + testName, async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const codeQL = codeql_1.setCodeQL({
async resolveQueries() {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
CodeQL.setCodeQL({
resolveQueries: async function () {
return {
byLanguage: {},
noDeclaredLanguage: {},
@@ -545,13 +304,13 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
};
},
});
const languages = "javascript";
const configFile = "input";
const inputFile = path.join(tmpDir, configFile);
fs.writeFileSync(inputFile, inputFileContents, "utf8");
const inputFile = path.join(tmpDir, 'input');
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
setInput('config-file', 'input');
setInput('languages', 'javascript');
try {
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
await configUtils.initConfig();
throw new Error('initConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
@@ -559,14 +318,14 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
});
});
}
doInvalidInputTest("name invalid type", `
doInvalidInputTest('name invalid type', `
name:
- foo: bar`, configUtils.getNameInvalid);
doInvalidInputTest("disable-default-queries invalid type", `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
doInvalidInputTest("queries invalid type", `queries: foo`, configUtils.getQueriesInvalid);
doInvalidInputTest("paths-ignore invalid type", `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
doInvalidInputTest("paths invalid type", `paths: 17`, configUtils.getPathsInvalid);
doInvalidInputTest("queries uses invalid type", `
doInvalidInputTest('disable-default-queries invalid type', `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
doInvalidInputTest('queries invalid type', `queries: foo`, configUtils.getQueriesInvalid);
doInvalidInputTest('paths-ignore invalid type', `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
doInvalidInputTest('paths invalid type', `paths: 17`, configUtils.getPathsInvalid);
doInvalidInputTest('queries uses invalid type', `
queries:
- uses:
- hello: world`, configUtils.getQueryUsesInvalid);
@@ -577,47 +336,52 @@ function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) {
name: my config
queries:
- name: foo
uses: ${input}`;
doInvalidInputTest(`queries uses "${input}"`, inputFileContents, expectedErrorMessageGenerator);
uses: ` + input;
doInvalidInputTest("queries uses \"" + input + "\"", inputFileContents, expectedErrorMessageGenerator);
}
// Various "uses" fields, and the errors they should produce
doInvalidQueryUsesTest("''", (c) => configUtils.getQueryUsesInvalid(c, undefined));
doInvalidQueryUsesTest("foo/bar", (c) => configUtils.getQueryUsesInvalid(c, "foo/bar"));
doInvalidQueryUsesTest("foo/bar@v1@v2", (c) => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
doInvalidQueryUsesTest("foo@master", (c) => configUtils.getQueryUsesInvalid(c, "foo@master"));
doInvalidQueryUsesTest("https://github.com/foo/bar@master", (c) => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
doInvalidQueryUsesTest("./foo", (c) => configUtils.getLocalPathDoesNotExist(c, "foo"));
doInvalidQueryUsesTest("./..", (c) => configUtils.getLocalPathOutsideOfRepository(c, ".."));
doInvalidQueryUsesTest("''", c => configUtils.getQueryUsesInvalid(c, undefined));
doInvalidQueryUsesTest("foo/bar", c => configUtils.getQueryUsesInvalid(c, "foo/bar"));
doInvalidQueryUsesTest("foo/bar@v1@v2", c => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
doInvalidQueryUsesTest("foo@master", c => configUtils.getQueryUsesInvalid(c, "foo@master"));
doInvalidQueryUsesTest("https://github.com/foo/bar@master", c => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
doInvalidQueryUsesTest("./foo", c => configUtils.getLocalPathDoesNotExist(c, "foo"));
doInvalidQueryUsesTest("./..", c => configUtils.getLocalPathOutsideOfRepository(c, ".."));
const validPaths = [
"foo",
"foo/",
"foo/**",
"foo/**/",
"foo/**/**",
"foo/**/bar/**/baz",
"**/",
"**/foo",
"/foo",
'foo',
'foo/',
'foo/**',
'foo/**/',
'foo/**/**',
'foo/**/bar/**/baz',
'**/',
'**/foo',
'/foo',
];
const invalidPaths = ["a/***/b", "a/**b", "a/b**", "**"];
ava_1.default("path validations", (t) => {
const invalidPaths = [
'a/***/b',
'a/**b',
'a/b**',
'**',
];
ava_1.default('path validations', t => {
// Dummy values to pass to validateAndSanitisePath
const propertyName = "paths";
const configFile = "./.github/codeql/config.yml";
const propertyName = 'paths';
const configFile = './.github/codeql/config.yml';
for (const path of validPaths) {
t.truthy(configUtils.validateAndSanitisePath(path, propertyName, configFile, logging_1.getRunnerLogger(true)));
t.truthy(configUtils.validateAndSanitisePath(path, propertyName, configFile));
}
for (const path of invalidPaths) {
t.throws(() => configUtils.validateAndSanitisePath(path, propertyName, configFile, logging_1.getRunnerLogger(true)));
t.throws(() => configUtils.validateAndSanitisePath(path, propertyName, configFile));
}
});
ava_1.default("path sanitisation", (t) => {
ava_1.default('path sanitisation', t => {
// Dummy values to pass to validateAndSanitisePath
const propertyName = "paths";
const configFile = "./.github/codeql/config.yml";
const propertyName = 'paths';
const configFile = './.github/codeql/config.yml';
// Valid paths are not modified
t.deepEqual(configUtils.validateAndSanitisePath("foo/bar", propertyName, configFile, logging_1.getRunnerLogger(true)), "foo/bar");
t.deepEqual(configUtils.validateAndSanitisePath('foo/bar', propertyName, configFile), 'foo/bar');
// Trailing stars are stripped
t.deepEqual(configUtils.validateAndSanitisePath("foo/**", propertyName, configFile, logging_1.getRunnerLogger(true)), "foo/");
t.deepEqual(configUtils.validateAndSanitisePath('foo/**', propertyName, configFile), 'foo/');
});
//# sourceMappingURL=config-utils.test.js.map

File diff suppressed because one or more lines are too long

View File

@@ -1,3 +0,0 @@
{
"bundleVersion": "codeql-bundle-20200826"
}

17
lib/error-matcher.js generated
View File

@@ -1,17 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// exported only for testing purposes
exports.namedMatchersForTesting = {
/*
In due course it may be possible to remove the regex, if/when javascript also exits with code 32.
*/
noSourceCodeFound: {
exitCode: 32,
outputRegex: new RegExp("No JavaScript or TypeScript code found\\."),
message: "No code found during the build. Please see:\n" +
"https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning#no-code-found-during-the-build",
},
};
// we collapse the matches into an array for use in execErrorCatcher
exports.errorMatchers = Object.values(exports.namedMatchersForTesting);
//# sourceMappingURL=error-matcher.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"error-matcher.js","sourceRoot":"","sources":["../src/error-matcher.ts"],"names":[],"mappings":";;AAQA,qCAAqC;AACxB,QAAA,uBAAuB,GAAoC;IACtE;;MAEE;IACF,iBAAiB,EAAE;QACjB,QAAQ,EAAE,EAAE;QACZ,WAAW,EAAE,IAAI,MAAM,CAAC,2CAA2C,CAAC;QACpE,OAAO,EACL,+CAA+C;YAC/C,yJAAyJ;KAC5J;CACF,CAAC;AAEF,oEAAoE;AACvD,QAAA,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC,+BAAuB,CAAC,CAAC"}

View File

@@ -1,29 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const error_matcher_1 = require("./error-matcher");
/*
NB We test the regexes for all the matchers against example log output snippets.
*/
ava_1.default("noSourceCodeFound matches against example javascript output", async (t) => {
t.assert(testErrorMatcher("noSourceCodeFound", `
2020-09-07T17:39:53.9050522Z [2020-09-07 17:39:53] [build] Done extracting /opt/hostedtoolcache/CodeQL/0.0.0-20200630/x64/codeql/javascript/tools/data/externs/web/ie_vml.js (3 ms)
2020-09-07T17:39:53.9051849Z [2020-09-07 17:39:53] [build-err] No JavaScript or TypeScript code found.
2020-09-07T17:39:53.9052444Z [2020-09-07 17:39:53] [build-err] No JavaScript or TypeScript code found.
2020-09-07T17:39:53.9251124Z [2020-09-07 17:39:53] [ERROR] Spawned process exited abnormally (code 255; tried to run: [/opt/hostedtoolcache/CodeQL/0.0.0-20200630/x64/codeql/javascript/tools/autobuild.sh])
`));
});
function testErrorMatcher(matcherName, logSample) {
if (!(matcherName in error_matcher_1.namedMatchersForTesting)) {
throw new Error(`Unknown matcher ${matcherName}`);
}
const regex = error_matcher_1.namedMatchersForTesting[matcherName].outputRegex;
if (regex === undefined) {
throw new Error(`Cannot test matcher ${matcherName} with null regex`);
}
return regex.test(logSample);
}
//# sourceMappingURL=error-matcher.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"error-matcher.test.js","sourceRoot":"","sources":["../src/error-matcher.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,mDAA0D;AAE1D;;EAEE;AAEF,aAAI,CAAC,6DAA6D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9E,CAAC,CAAC,MAAM,CACN,gBAAgB,CACd,mBAAmB,EACnB;;;;;GAKH,CACE,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,gBAAgB,CAAC,WAAmB,EAAE,SAAiB;IAC9D,IAAI,CAAC,CAAC,WAAW,IAAI,uCAAuB,CAAC,EAAE;QAC7C,MAAM,IAAI,KAAK,CAAC,mBAAmB,WAAW,EAAE,CAAC,CAAC;KACnD;IACD,MAAM,KAAK,GAAG,uCAAuB,CAAC,WAAW,CAAC,CAAC,WAAW,CAAC;IAC/D,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,uBAAuB,WAAW,kBAAkB,CAAC,CAAC;KACvE;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AAC/B,CAAC"}

View File

@@ -7,32 +7,26 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const util = __importStar(require("./util"));
/**
* Check out repository at the given ref, and return the directory of the checkout.
*/
async function checkoutExternalRepository(repository, ref, githubUrl, tempDir, logger) {
logger.info(`Checking out ${repository}`);
const checkoutLocation = path.join(tempDir, repository, ref);
if (!checkoutLocation.startsWith(tempDir)) {
// this still permits locations that mess with sibling repositories in `tempDir`, but that is acceptable
throw new Error(`'${repository}@${ref}' is not a valid repository and reference.`);
}
async function checkoutExternalRepository(repository, ref) {
const folder = util.getRequiredEnvParam('RUNNER_TEMP');
core.info('Checking out ' + repository);
const checkoutLocation = path.join(folder, repository);
if (!fs.existsSync(checkoutLocation)) {
const repoURL = `${githubUrl}/${repository}`;
await new toolrunnner.ToolRunner("git", [
"clone",
repoURL,
checkoutLocation,
]).exec();
await new toolrunnner.ToolRunner("git", [
`--work-tree=${checkoutLocation}`,
`--git-dir=${checkoutLocation}/.git`,
"checkout",
ref,
]).exec();
const repoURL = 'https://github.com/' + repository + '.git';
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
await exec.exec('git', [
'--work-tree=' + checkoutLocation,
'--git-dir=' + checkoutLocation + '/.git',
'checkout', ref,
]);
}
return checkoutLocation;
}

View File

@@ -1 +1 @@
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,0EAA4D;AAC5D,uCAAyB;AACzB,2CAA6B;AAI7B;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,SAAiB,EACjB,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,GAAG,SAAS,IAAI,UAAU,EAAE,CAAC;QAC7C,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,KAAK,EAAE;YACtC,OAAO;YACP,OAAO;YACP,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,KAAK,EAAE;YACtC,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC"}
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAE7B,6CAA+B;AAE/B;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAAC,UAAkB,EAAE,GAAW;IAC9E,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC;IAEvD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,UAAU,CAAC,CAAC;IAExC,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;IACvD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,UAAU,GAAG,MAAM,CAAC;QAC5D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;QAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YACrB,cAAc,GAAG,gBAAgB;YACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;YACzC,UAAU,EAAE,GAAG;SAChB,CAAC,CAAC;KACJ;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAjBD,gEAiBC"}

View File

@@ -1,4 +1,7 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
@@ -6,90 +9,20 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const externalQueries = __importStar(require("./external-queries"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
ava_1.default("checkoutExternalQueries", async (t) => {
await util.withTmpDir(async (tmpDir) => {
// Create a test repo in a subdir of the temp dir.
// It should have a default branch with two commits after the initial commit, where
// - the first commit contains files 'a' and 'b'
// - the second commit contains only 'a'
// Place the repo in a subdir because we're going to checkout a copy in tmpDir
const testRepoBaseDir = path.join(tmpDir, "test-repo-dir");
const repoName = "some/repo";
const repoPath = path.join(testRepoBaseDir, repoName);
const repoGitDir = path.join(repoPath, ".git");
// Run the given git command, and return the output.
// Passes --git-dir and --work-tree.
// Any stderr output is suppressed until the command fails.
const runGit = async function (command) {
let stdout = "";
let stderr = "";
command = [
`--git-dir=${repoGitDir}`,
`--work-tree=${repoPath}`,
...command,
];
console.log(`Running: git ${command.join(" ")}`);
try {
await new toolrunnner.ToolRunner("git", command, {
silent: true,
listeners: {
stdout: (data) => {
stdout += data.toString();
},
stderr: (data) => {
stderr += data.toString();
},
},
}).exec();
}
catch (e) {
console.log(`Command failed: git ${command.join(" ")}`);
process.stderr.write(stderr);
throw e;
}
return stdout.trim();
};
fs.mkdirSync(repoPath, { recursive: true });
await runGit(["init", repoPath]);
await runGit(["config", "user.email", "test@github.com"]);
await runGit(["config", "user.name", "Test Test"]);
fs.writeFileSync(path.join(repoPath, "a"), "a content");
await runGit(["add", "a"]);
await runGit(["commit", "-m", "commit1"]);
fs.writeFileSync(path.join(repoPath, "b"), "b content");
await runGit(["add", "b"]);
await runGit(["commit", "-m", "commit1"]);
const commit1Sha = await runGit(["rev-parse", "HEAD"]);
fs.unlinkSync(path.join(repoPath, "b"));
await runGit(["add", "b"]);
await runGit(["commit", "-m", "commit2"]);
const commit2Sha = await runGit(["rev-parse", "HEAD"]);
// Checkout the first commit, which should contain 'a' and 'b'
t.false(fs.existsSync(path.join(tmpDir, repoName)));
await externalQueries.checkoutExternalRepository(repoName, commit1Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true));
t.true(fs.existsSync(path.join(tmpDir, repoName)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "a")));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "b")));
// Checkout the second commit as well, which should only contain 'a'
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
await externalQueries.checkoutExternalRepository(repoName, commit2Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "a")));
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
process.env["RUNNER_TEMP"] = tmpDir;
await externalQueries.checkoutExternalRepository("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b");
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in the default branch
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
});
});
//# sourceMappingURL=external-queries.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,0EAA4D;AAC5D,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,KAAK,EAAE,OAAO,EAAE;oBAC/C,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CAAC,CAAC,IAAI,EAAE,CAAC;aACX;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QAEnD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,oEAAsD;AACtD,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACxC,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QACpC,MAAM,eAAe,CAAC,0BAA0B,CAAC,kBAAkB,EAAE,0CAA0C,CAAC,CAAC;QAEjH,mGAAmG;QACnG,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IAC/E,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

119
lib/finalize-db.js generated Normal file
View File

@@ -0,0 +1,119 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const io = __importStar(require("@actions/io"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const sharedEnv = __importStar(require("./shared-environment"));
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
async function setupPythonExtractor() {
const codeqlPython = process.env["CODEQL_PYTHON"];
if (codeqlPython === undefined || codeqlPython.length === 0) {
// If CODEQL_PYTHON is not set, no dependencies were installed, so we don't need to do anything
return;
}
let output = '';
const options = {
listeners: {
stdout: (data) => {
output += data.toString();
}
}
};
await exec.exec(codeqlPython, ['-c', 'import os; import pip; print(os.path.dirname(os.path.dirname(pip.__file__)))'], options);
core.info('Setting LGTM_INDEX_IMPORT_PATH=' + output);
process.env['LGTM_INDEX_IMPORT_PATH'] = output;
output = '';
await exec.exec(codeqlPython, ['-c', 'import sys; print(sys.version_info[0])'], options);
core.info('Setting LGTM_PYTHON_SETUP_VERSION=' + output);
process.env['LGTM_PYTHON_SETUP_VERSION'] = output;
}
async function createdDBForScannedLanguages(databaseFolder) {
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
if (scannedLanguages) {
const codeql = codeql_1.getCodeQL();
for (const language of scannedLanguages.split(',')) {
core.startGroup('Extracting ' + language);
if (language === 'python') {
await setupPythonExtractor();
}
await codeql.extractScannedLanguage(path.join(databaseFolder, language), language);
core.endGroup();
}
}
}
async function finalizeDatabaseCreation(databaseFolder, config) {
await createdDBForScannedLanguages(databaseFolder);
const codeql = codeql_1.getCodeQL();
for (const language of config.languages) {
core.startGroup('Finalizing ' + language);
await codeql.finalizeDatabase(path.join(databaseFolder, language));
core.endGroup();
}
}
// Runs queries and creates sarif files in the given folder
async function runQueries(databaseFolder, sarifFolder, config) {
const codeql = codeql_1.getCodeQL();
for (let database of fs.readdirSync(databaseFolder)) {
core.startGroup('Analyzing ' + database);
const queries = config.queries[database] || [];
if (queries.length === 0) {
throw new Error('Unable to analyse ' + database + ' as no queries were selected for this language');
}
// Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows.
const querySuite = path.join(databaseFolder, database + '-queries.qls');
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
fs.writeFileSync(querySuite, querySuiteContents);
core.debug('Query suite file for ' + database + '...\n' + querySuiteContents);
const sarifFile = path.join(sarifFolder, database + '.sarif');
await codeql.databaseAnalyze(path.join(databaseFolder, database), sarifFile, querySuite);
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
core.endGroup();
}
}
async function run() {
try {
if (util.should_abort('finish', true) || !await util.reportActionStarting('finish')) {
return;
}
const config = await configUtils.getConfig();
core.exportVariable(sharedEnv.ODASA_TRACER_CONFIGURATION, '');
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
const databaseFolder = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_DATABASE_DIR);
const sarifFolder = core.getInput('output');
await io.mkdirP(sarifFolder);
core.info('Finalizing database creation');
await finalizeDatabaseCreation(databaseFolder, config);
core.info('Analyzing database');
await runQueries(databaseFolder, sarifFolder, config);
if ('true' === core.getInput('upload')) {
if (!await upload_lib.upload(sarifFolder)) {
await util.reportActionFailed('finish', 'upload');
return;
}
}
}
catch (error) {
core.setFailed(error.message);
await util.reportActionFailed('finish', error.message, error.stack);
return;
}
await util.reportActionSucceeded('finish');
}
run().catch(e => {
core.setFailed("analyze action failed: " + e);
console.log(e);
});
//# sourceMappingURL=finalize-db.js.map

1
lib/finalize-db.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"finalize-db.js","sourceRoot":"","sources":["../src/finalize-db.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,gDAAkC;AAClC,uCAAyB;AACzB,2CAA6B;AAE7B,qCAAqC;AACrC,4DAA8C;AAC9C,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAG/B,KAAK,UAAU,oBAAoB;IACjC,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IAClD,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3D,+FAA+F;QAC/F,OAAO;KACR;IAED,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,OAAO,GAAG;QACd,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC;IAEF,MAAM,IAAI,CAAC,IAAI,CACb,YAAY,EACZ,CAAC,IAAI,EAAE,8EAA8E,CAAC,EACtF,OAAO,CAAC,CAAC;IACX,IAAI,CAAC,IAAI,CAAC,iCAAiC,GAAG,MAAM,CAAC,CAAC;IACtD,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;IAE/C,MAAM,GAAG,EAAE,CAAC;IACZ,MAAM,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,IAAI,EAAE,wCAAwC,CAAC,EAAE,OAAO,CAAC,CAAC;IACzF,IAAI,CAAC,IAAI,CAAC,oCAAoC,GAAG,MAAM,CAAC,CAAC;IACzD,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC,GAAG,MAAM,CAAC;AACpD,CAAC;AAED,KAAK,UAAU,4BAA4B,CAAC,cAAsB;IAChE,MAAM,gBAAgB,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,+BAA+B,CAAC,CAAC;IAChF,IAAI,gBAAgB,EAAE;QACpB,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,KAAK,MAAM,QAAQ,IAAI,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE;YAClD,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;YAE1C,IAAI,QAAQ,KAAK,QAAQ,EAAE;gBACzB,MAAM,oBAAoB,EAAE,CAAC;aAC9B;YAED,MAAM,MAAM,CAAC,sBAAsB,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,EAAE,QAAQ,CAAC,CAAC;YACnF,IAAI,CAAC,QAAQ,EAAE,CAAC;SACjB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CAAC,cAAsB,EAAE,MAA0B;IACxF,MAAM,4BAA4B,CAAC,cAAc,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;QAC1C,MAAM,MAAM,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC,CAAC;QACnE,IAAI,CAAC,QAAQ,EAAE,CAAC;KACjB;AACH,CAAC;AAED,2DAA2D;AAC3D,KAAK,UAAU,UAAU,CAAC,cAAsB,EAAE,WAAmB,EAAE,MAA0B;IAC/F,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,IAAI,QAAQ,IAAI,EAAE,CAAC,WAAW,CAAC,cAAc,CAAC,EAAE;QACnD,IAAI,CAAC,UAAU,CAAC,YAAY,GAAG,QAAQ,CAAC,CAAC;QAEzC,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;QAC/C,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,oBAAoB,GAAG,QAAQ,GAAG,gDAAgD,CAAC,CAAC;SACrG;QAED,uEAAuE;QACvE,2EAA2E;QAC3E,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,GAAG,cAAc,CAAC,CAAC;QACxE,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACxE,EAAE,CAAC,aAAa,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;QACjD,IAAI,CAAC,KAAK,CAAC,uBAAuB,GAAG,QAAQ,GAAG,OAAO,GAAG,kBAAkB,CAAC,CAAC;QAE9E,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,QAAQ,GAAG,QAAQ,CAAC,CAAC;QAE9D,MAAM,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,EAAE,SAAS,EAAE,UAAU,CAAC,CAAC;QAEzF,IAAI,CAAC,KAAK,CAAC,6BAA6B,GAAG,QAAQ,GAAG,eAAe,GAAG,SAAS,GAAG,GAAG,CAAC,CAAC;QACzF,IAAI,CAAC,QAAQ,EAAE,CAAC;KACjB;AACH,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,QAAQ,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,QAAQ,CAAC,EAAE;YACnF,OAAO;SACR;QACD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,EAAE,CAAC;QAE7C,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,0BAA0B,EAAE,EAAE,CAAC,CAAC;QAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;QAEzD,MAAM,cAAc,GAAG,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;QAEtF,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;QAC5C,MAAM,EAAE,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;QAE7B,IAAI,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;QAC1C,MAAM,wBAAwB,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;QAEvD,IAAI,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QAChC,MAAM,UAAU,CAAC,cAAc,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;QAEtD,IAAI,MAAM,KAAK,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACtC,IAAI,CAAC,MAAM,UAAU,CAAC,MAAM,CAAC,WAAW,CAAC,EAAE;gBACzC,MAAM,IAAI,CAAC,kBAAkB,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;gBAClD,OAAO;aACR;SACF;KAEF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,IAAI,CAAC,kBAAkB,CAAC,QAAQ,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QACpE,OAAO;KACR;IAED,MAAM,IAAI,CAAC,qBAAqB,CAAC,QAAQ,CAAC,CAAC;AAC7C,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,yBAAyB,GAAG,CAAC,CAAC,CAAC;IAC9C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

2
lib/finalize-db.test.js generated Normal file
View File

@@ -0,0 +1,2 @@
"use strict";
//# sourceMappingURL=finalize-db.test.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"finalize-db.test.js","sourceRoot":"","sources":["../src/finalize-db.test.ts"],"names":[],"mappings":""}

75
lib/fingerprints.js generated
View File

@@ -10,12 +10,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const fs = __importStar(require("fs"));
const long_1 = __importDefault(require("long"));
const tab = "\t".charCodeAt(0);
const space = " ".charCodeAt(0);
const lf = "\n".charCodeAt(0);
const cr = "\r".charCodeAt(0);
const tab = '\t'.charCodeAt(0);
const space = ' '.charCodeAt(0);
const lf = '\n'.charCodeAt(0);
const cr = '\r'.charCodeAt(0);
const BLOCK_SIZE = 100;
const MOD = long_1.default.fromInt(37); // L
// Compute the starting point for the hash mod
@@ -47,7 +48,7 @@ function hash(callback, input) {
const lineNumbers = Array(BLOCK_SIZE).fill(-1);
// The current hash value, updated as we read each character
let hash = long_1.default.ZERO;
const firstMod = computeFirstMod();
let firstMod = computeFirstMod();
// The current index in the window, will wrap around to zero when we reach BLOCK_SIZE
let index = 0;
// The line number of the character we are currently processing from the input
@@ -61,12 +62,12 @@ function hash(callback, input) {
const hashCounts = {};
// Output the current hash and line number to the callback function
const outputHash = function () {
const hashValue = hash.toUnsigned().toString(16);
let hashValue = hash.toUnsigned().toString(16);
if (!hashCounts[hashValue]) {
hashCounts[hashValue] = 0;
}
hashCounts[hashValue]++;
callback(lineNumbers[index], `${hashValue}:${hashCounts[hashValue]}`);
callback(lineNumbers[index], hashValue + ":" + hashCounts[hashValue]);
lineNumbers[index] = -1;
};
// Update the current hash value and increment the index in the window
@@ -121,7 +122,7 @@ function hash(callback, input) {
exports.hash = hash;
// Generate a hash callback function that updates the given result in-place
// when it recieves a hash for the correct line number. Ignores hashes for other lines.
function locationUpdateCallback(result, location, logger) {
function locationUpdateCallback(result, location) {
var _a, _b;
let locationStartLine = (_b = (_a = location.physicalLocation) === null || _a === void 0 ? void 0 : _a.region) === null || _b === void 0 ? void 0 : _b.startLine;
if (locationStartLine === undefined) {
@@ -145,7 +146,10 @@ function locationUpdateCallback(result, location, logger) {
result.partialFingerprints.primaryLocationLineHash = hash;
}
else if (existingFingerprint !== hash) {
logger.warning(`Calculated fingerprint of ${hash} for file ${location.physicalLocation.artifactLocation.uri} line ${lineNumber}, but found existing inconsistent fingerprint value ${existingFingerprint}`);
core.warning("Calculated fingerprint of " + hash +
" for file " + location.physicalLocation.artifactLocation.uri +
" line " + lineNumber +
", but found existing inconsistent fingerprint value " + existingFingerprint);
}
};
}
@@ -153,48 +157,48 @@ function locationUpdateCallback(result, location, logger) {
// the source file so we can hash it.
// If possible returns a absolute file path for the source file,
// or if not possible then returns undefined.
function resolveUriToFile(location, artifacts, checkoutPath, logger) {
function resolveUriToFile(location, artifacts) {
// This may be referencing an artifact
if (!location.uri && location.index !== undefined) {
if (typeof location.index !== "number" ||
if (typeof location.index !== 'number' ||
location.index < 0 ||
location.index >= artifacts.length ||
typeof artifacts[location.index].location !== "object") {
logger.debug(`Ignoring location as URI "${location.index}" is invalid`);
typeof artifacts[location.index].location !== 'object') {
core.debug('Ignoring location as index "' + location.index + '" is invalid');
return undefined;
}
location = artifacts[location.index].location;
}
// Get the URI and decode
if (typeof location.uri !== "string") {
logger.debug(`Ignoring location as index "${location.uri}" is invalid`);
if (typeof location.uri !== 'string') {
core.debug('Ignoring location as uri "' + location.uri + '" is invalid');
return undefined;
}
let uri = decodeURIComponent(location.uri);
// Remove a file scheme, and abort if the scheme is anything else
const fileUriPrefix = "file://";
const fileUriPrefix = 'file://';
if (uri.startsWith(fileUriPrefix)) {
uri = uri.substring(fileUriPrefix.length);
}
if (uri.indexOf("://") !== -1) {
logger.debug(`Ignoring location URI "${uri}" as the scheme is not recognised`);
if (uri.indexOf('://') !== -1) {
core.debug('Ignoring location URI "' + uri + "' as the scheme is not recognised");
return undefined;
}
// Discard any absolute paths that aren't in the src root
const srcRootPrefix = `${checkoutPath}/`;
if (uri.startsWith("/") && !uri.startsWith(srcRootPrefix)) {
logger.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
const srcRootPrefix = process.env['GITHUB_WORKSPACE'] + '/';
if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
core.debug('Ignoring location URI "' + uri + "' as it is outside of the src root");
return undefined;
}
// Just assume a relative path is relative to the src root.
// This is not necessarily true but should be a good approximation
// and here we likely want to err on the side of handling more cases.
if (!uri.startsWith("/")) {
if (!uri.startsWith('/')) {
uri = srcRootPrefix + uri;
}
// Check the file exists
if (!fs.existsSync(uri)) {
logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
core.debug("Unable to compute fingerprint for non-existent file: " + uri);
return undefined;
}
return uri;
@@ -202,43 +206,42 @@ function resolveUriToFile(location, artifacts, checkoutPath, logger) {
exports.resolveUriToFile = resolveUriToFile;
// Compute fingerprints for results in the given sarif file
// and return an updated sarif file contents.
function addFingerprints(sarifContents, checkoutPath, logger) {
var _a, _b;
const sarif = JSON.parse(sarifContents);
function addFingerprints(sarifContents) {
let sarif = JSON.parse(sarifContents);
// Gather together results for the same file and construct
// callbacks to accept hashes for that file and update the location
const callbacksByFile = {};
for (const run of sarif.runs || []) {
// We may need the list of artifacts to resolve against
const artifacts = run.artifacts || [];
let artifacts = run.artifacts || [];
for (const result of run.results || []) {
// Check the primary location is defined correctly and is in the src root
const primaryLocation = (result.locations || [])[0];
if (!((_b = (_a = primaryLocation) === null || _a === void 0 ? void 0 : _a.physicalLocation) === null || _b === void 0 ? void 0 : _b.artifactLocation)) {
logger.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`);
if (!primaryLocation ||
!primaryLocation.physicalLocation ||
!primaryLocation.physicalLocation.artifactLocation) {
core.debug("Unable to compute fingerprint for invalid location: " + JSON.stringify(primaryLocation));
continue;
}
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts, checkoutPath, logger);
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts);
if (!filepath) {
continue;
}
if (!callbacksByFile[filepath]) {
callbacksByFile[filepath] = [];
}
callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation, logger));
callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation));
}
}
// Now hash each file that was found
for (const [filepath, callbacks] of Object.entries(callbacksByFile)) {
Object.entries(callbacksByFile).forEach(([filepath, callbacks]) => {
// A callback that forwards the hash to all other callbacks for that file
const teeCallback = function (lineNumber, hash) {
for (const c of Object.values(callbacks)) {
c(lineNumber, hash);
}
Object.values(callbacks).forEach(c => c(lineNumber, hash));
};
const fileContents = fs.readFileSync(filepath).toString();
hash(teeCallback, fileContents);
}
});
return JSON.stringify(sarif);
}
exports.addFingerprints = addFingerprints;

File diff suppressed because one or more lines are too long

View File

@@ -14,12 +14,11 @@ const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const fingerprints = __importStar(require("./fingerprints"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
testing_utils_1.setupTests(ava_1.default);
function testHash(t, input, expectedHashes) {
let index = 0;
const callback = function (lineNumber, hash) {
let callback = function (lineNumber, hash) {
t.is(lineNumber, index + 1);
t.is(hash, expectedHashes[index]);
index++;
@@ -27,7 +26,7 @@ function testHash(t, input, expectedHashes) {
fingerprints.hash(callback, input);
t.is(index, input.split(/\r\n|\r|\n/).length);
}
ava_1.default("hash", (t) => {
ava_1.default('hash', (t) => {
// Try empty file
testHash(t, "", ["c129715d7a2bc9a3:1"]);
// Try various combinations of newline characters
@@ -35,7 +34,7 @@ ava_1.default("hash", (t) => {
"271789c17abda88f:1",
"54703d4cd895b18:1",
"180aee12dab6264:1",
"a23a3dc5e078b07b:1",
"a23a3dc5e078b07b:1"
]);
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
"8b7cf3e952e7aeb2:1",
@@ -93,85 +92,68 @@ ava_1.default("hash", (t) => {
"a9cf91f7bbf1862b:1",
"55ec222b86bcae53:1",
"cc97dc7b1d7d8f7b:1",
"c129715d7a2bc9a3:1",
]);
testHash(t, "x = 2\nx = 1\nprint(x)\nx = 3\nprint(x)\nx = 4\nprint(x)\n", [
"e54938cc54b302f1:1",
"bb609acbe9138d60:1",
"1131fd5871777f34:1",
"5c482a0f8b35ea28:1",
"54517377da7028d2:1",
"2c644846cb18d53e:1",
"f1b89f20de0d133:1",
"c129715d7a2bc9a3:1",
"c129715d7a2bc9a3:1"
]);
});
function testResolveUriToFile(uri, index, artifactsURIs) {
const location = { uri, index };
const artifacts = artifactsURIs.map((uri) => ({ location: { uri } }));
return fingerprints.resolveUriToFile(location, artifacts, process.cwd(), logging_1.getRunnerLogger(true));
const location = { "uri": uri, "index": index };
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
return fingerprints.resolveUriToFile(location, artifacts);
}
ava_1.default("resolveUriToFile", (t) => {
ava_1.default('resolveUriToFile', t => {
// The resolveUriToFile method checks that the file exists and is in the right directory
// so we need to give it real files to look at. We will use this file as an example.
// For this to work we require the current working directory to be a parent, but this
// should generally always be the case so this is fine.
const cwd = process.cwd();
const filepath = __filename;
t.true(filepath.startsWith(`${cwd}/`));
t.true(filepath.startsWith(cwd + '/'));
const relativeFilepaht = filepath.substring(cwd.length + 1);
process.env['GITHUB_WORKSPACE'] = cwd;
// Absolute paths are unmodified
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
t.is(testResolveUriToFile(`file://${filepath}`, undefined, []), filepath);
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
// Relative paths are made absolute
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
t.is(testResolveUriToFile(`file://${relativeFilepaht}`, undefined, []), filepath);
t.is(testResolveUriToFile('file://' + relativeFilepaht, undefined, []), filepath);
// Absolute paths outside the src root are discarded
t.is(testResolveUriToFile("/src/foo/bar.js", undefined, []), undefined);
t.is(testResolveUriToFile("file:///src/foo/bar.js", undefined, []), undefined);
t.is(testResolveUriToFile('/src/foo/bar.js', undefined, []), undefined);
t.is(testResolveUriToFile('file:///src/foo/bar.js', undefined, []), undefined);
// Other schemes are discarded
t.is(testResolveUriToFile(`https://${filepath}`, undefined, []), undefined);
t.is(testResolveUriToFile(`ftp://${filepath}`, undefined, []), undefined);
t.is(testResolveUriToFile('https://' + filepath, undefined, []), undefined);
t.is(testResolveUriToFile('ftp://' + filepath, undefined, []), undefined);
// Invalid URIs are discarded
t.is(testResolveUriToFile(1, undefined, []), undefined);
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
// Non-existant files are discarded
t.is(testResolveUriToFile(`${filepath}2`, undefined, []), undefined);
t.is(testResolveUriToFile(filepath + '2', undefined, []), undefined);
// Index is resolved
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
t.is(testResolveUriToFile(undefined, 1, ["foo", filepath]), filepath);
t.is(testResolveUriToFile(undefined, 1, ['foo', filepath]), filepath);
// Invalid indexes are discarded
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
t.is(testResolveUriToFile(undefined, "0", [filepath]), undefined);
t.is(testResolveUriToFile(undefined, '0', [filepath]), undefined);
});
ava_1.default("addFingerprints", (t) => {
ava_1.default('addFingerprints', t => {
// Run an end-to-end test on a test file
let input = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
.toString();
let expected = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.expected.sarif`)
.toString();
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.input.sarif').toString();
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.expected.sarif').toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
const checkoutPath = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, logging_1.getRunnerLogger(true)), expected);
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
t.deepEqual(fingerprints.addFingerprints(input), expected);
});
ava_1.default("missingRegions", (t) => {
ava_1.default('missingRegions', t => {
// Run an end-to-end test on a test file
let input = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
.toString();
let expected = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.expected.sarif`)
.toString();
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.input.sarif').toString();
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.expected.sarif').toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
const checkoutPath = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, logging_1.getRunnerLogger(true)), expected);
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
t.deepEqual(fingerprints.addFingerprints(input), expected);
});
//# sourceMappingURL=fingerprints.test.js.map

File diff suppressed because one or more lines are too long

104
lib/init-action.js generated
View File

@@ -1,104 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const init_1 = require("./init");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
async function sendSuccessStatusReport(startedAt, config) {
var _a;
const statusReportBase = await actionsUtil.createStatusReportBase("init", "success", startedAt);
const languages = config.languages.join(",");
const workflowLanguages = actionsUtil.getOptionalInput("languages");
const paths = (config.originalUserInput.paths || []).join(",");
const pathsIgnore = (config.originalUserInput["paths-ignore"] || []).join(",");
const disableDefaultQueries = config.originalUserInput["disable-default-queries"]
? languages
: "";
const queries = [];
let queriesInput = (_a = actionsUtil.getOptionalInput("queries")) === null || _a === void 0 ? void 0 : _a.trim();
if (queriesInput === undefined || queriesInput.startsWith("+")) {
queries.push(...(config.originalUserInput.queries || []).map((q) => q.uses));
}
if (queriesInput !== undefined) {
queriesInput = queriesInput.startsWith("+")
? queriesInput.substr(1)
: queriesInput;
queries.push(...queriesInput.split(","));
}
const statusReport = {
...statusReportBase,
languages,
workflow_languages: workflowLanguages || "",
paths,
paths_ignore: pathsIgnore,
disable_default_queries: disableDefaultQueries,
queries: queries.join(","),
};
await actionsUtil.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
const logger = logging_1.getActionsLogger();
let config;
let codeql;
try {
actionsUtil.prepareLocalRunEnvironment();
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "starting", startedAt), true))) {
return;
}
codeql = await init_1.initCodeQL(actionsUtil.getOptionalInput("tools"), actionsUtil.getRequiredInput("token"), actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"), actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), "actions", logger);
config = await init_1.initConfig(actionsUtil.getOptionalInput("languages"), actionsUtil.getOptionalInput("queries"), actionsUtil.getOptionalInput("config-file"), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), codeql, actionsUtil.getRequiredEnvParam("GITHUB_WORKSPACE"), actionsUtil.getRequiredInput("token"), actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"), logger);
try {
await init_1.installPythonDeps(codeql, logger);
}
catch (err) {
logger.warning(`${err.message} You can call this action with 'setup-python-dependencies: false' to disable this process`);
}
}
catch (e) {
core.setFailed(e.message);
console.log(e);
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "aborted", startedAt, e.message));
return;
}
try {
// Forward Go flags
const goFlags = process.env["GOFLAGS"];
if (goFlags) {
core.exportVariable("GOFLAGS", goFlags);
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
}
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
const codeqlRam = process.env["CODEQL_RAM"] || "6500";
core.exportVariable("CODEQL_RAM", codeqlRam);
const tracerConfig = await init_1.runInit(codeql, config);
if (tracerConfig !== undefined) {
for (const [key, value] of Object.entries(tracerConfig.env)) {
core.exportVariable(key, value);
}
if (process.platform === "win32") {
await init_1.injectWindowsTracer("Runner.Worker.exe", undefined, config, codeql, tracerConfig);
}
}
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "failure", startedAt, error.message, error.stack));
return;
}
await sendSuccessStatusReport(startedAt, config);
}
run().catch((e) => {
core.setFailed(`init action failed: ${e}`);
console.log(e);
});
//# sourceMappingURL=init-action.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"init-action.js","sourceRoot":"","sources":["../src/init-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAG9C,iCAMgB;AAChB,uCAA6C;AAC7C,6CAAkD;AAkBlD,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,MAA0B;;IAE1B,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,MAAM,EACN,SAAS,EACT,SAAS,CACV,CAAC;IAEF,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC7C,MAAM,iBAAiB,GAAG,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAC;IACpE,MAAM,KAAK,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,WAAW,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CACvE,GAAG,CACJ,CAAC;IACF,MAAM,qBAAqB,GAAG,MAAM,CAAC,iBAAiB,CACpD,yBAAyB,CAC1B;QACC,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,EAAE,CAAC;IAEP,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,SAAG,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,0CAAE,IAAI,EAAE,CAAC;IACnE,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;QAC9D,OAAO,CAAC,IAAI,CACV,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,OAAO,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAC/D,CAAC;KACH;IACD,IAAI,YAAY,KAAK,SAAS,EAAE;QAC9B,YAAY,GAAG,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC;YACzC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC;YACxB,CAAC,CAAC,YAAY,CAAC;QACjB,OAAO,CAAC,IAAI,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;KAC1C;IAED,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,SAAS;QACT,kBAAkB,EAAE,iBAAiB,IAAI,EAAE;QAC3C,KAAK;QACL,YAAY,EAAE,WAAW;QACzB,uBAAuB,EAAE,qBAAqB;QAC9C,OAAO,EAAE,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC;KAC3B,CAAC;IAEF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,IAAI,MAA0B,CAAC;IAC/B,IAAI,MAAc,CAAC;IAEnB,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CAAC,MAAM,EAAE,UAAU,EAAE,SAAS,CAAC,EACvE,IAAI,CACL,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,GAAG,MAAM,iBAAU,CACvB,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,SAAS,EACT,MAAM,CACP,CAAC;QACF,MAAM,GAAG,MAAM,iBAAU,CACvB,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,EACzC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EACvC,WAAW,CAAC,gBAAgB,CAAC,aAAa,CAAC,EAC3C,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAC9C,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,MAAM,EACN,WAAW,CAAC,mBAAmB,CAAC,kBAAkB,CAAC,EACnD,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,MAAM,CACP,CAAC;QAEF,IAAI;YACF,MAAM,wBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;SACzC;QAAC,OAAO,GAAG,EAAE;YACZ,MAAM,CAAC,OAAO,CACZ,GAAG,GAAG,CAAC,OAAO,2FAA2F,CAC1G,CAAC;SACH;KACF;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,CAAC,CAAC,OAAO,CACV,CACF,CAAC;QACF,OAAO;KACR;IAED,IAAI;QACF,mBAAmB;QACnB,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QACvC,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YACxC,IAAI,CAAC,OAAO,CACV,6GAA6G,CAC9G,CAAC;SACH;QAED,mGAAmG;QACnG,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,MAAM,CAAC;QACtD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;QAE7C,MAAM,YAAY,GAAG,MAAM,cAAO,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACnD,IAAI,YAAY,KAAK,SAAS,EAAE;YAC9B,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,EAAE;gBAC3D,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;aACjC;YAED,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,0BAAmB,CACvB,mBAAmB,EACnB,SAAS,EACT,MAAM,EACN,MAAM,EACN,YAAY,CACb,CAAC;aACH;SACF;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;AACnD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,uBAAuB,CAAC,EAAE,CAAC,CAAC;IAC3C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

155
lib/init.js generated
View File

@@ -1,155 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const tracer_config_1 = require("./tracer-config");
const util = __importStar(require("./util"));
async function initCodeQL(codeqlURL, githubAuth, githubUrl, tempDir, toolsDir, mode, logger) {
logger.startGroup("Setup CodeQL tools");
const codeql = await codeql_1.setupCodeQL(codeqlURL, githubAuth, githubUrl, tempDir, toolsDir, mode, logger);
await codeql.printVersion();
logger.endGroup();
return codeql;
}
exports.initCodeQL = initCodeQL;
async function initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) {
logger.startGroup("Load language configuration");
const config = await configUtils.initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger);
analysisPaths.printPathFiltersWarning(config, logger);
logger.endGroup();
return config;
}
exports.initConfig = initConfig;
async function runInit(codeql, config) {
const sourceRoot = path.resolve();
fs.mkdirSync(util.getCodeQLDatabasesDir(config.tempDir), { recursive: true });
// TODO: replace this code once CodeQL supports multi-language tracing
for (const language of config.languages) {
// Init language database
await codeql.databaseInit(util.getCodeQLDatabasePath(config.tempDir, language), language, sourceRoot);
}
return await tracer_config_1.getCombinedTracerConfig(config, codeql);
}
exports.runInit = runInit;
// Runs a powershell script to inject the tracer into a parent process
// so it can tracer future processes, hopefully including the build process.
// If processName is given then injects into the nearest parent process with
// this name, otherwise uses the processLevel-th parent if defined, otherwise
// defaults to the 3rd parent as a rough guess.
async function injectWindowsTracer(processName, processLevel, config, codeql, tracerConfig) {
let script;
if (processName !== undefined) {
script = `
Param(
[Parameter(Position=0)]
[String]
$tracer
)
$id = $PID
while ($true) {
$p = Get-CimInstance -Class Win32_Process -Filter "ProcessId = $id"
Write-Host "Found process: $p"
if ($p -eq $null) {
throw "Could not determine ${processName} process"
}
if ($p[0].Name -eq "${processName}") {
Break
} else {
$id = $p[0].ParentProcessId
}
}
Write-Host "Final process: $p"
Invoke-Expression "&$tracer --inject=$id"`;
}
else {
// If the level is not defined then guess at the 3rd parent process.
// This won't be correct in every setting but it should be enough in most settings,
// and overestimating is likely better in this situation so we definitely trace
// what we want, though this does run the risk of interfering with future CI jobs.
// Note that the default of 3 doesn't work on github actions, so we include a
// special case in the script that checks for Runner.Worker.exe so we can still work
// on actions if the runner is invoked there.
processLevel = processLevel || 3;
script = `
Param(
[Parameter(Position=0)]
[String]
$tracer
)
$id = $PID
for ($i = 0; $i -le ${processLevel}; $i++) {
$p = Get-CimInstance -Class Win32_Process -Filter "ProcessId = $id"
Write-Host "Parent process \${i}: $p"
if ($p -eq $null) {
throw "Process tree ended before reaching required level"
}
# Special case just in case the runner is used on actions
if ($p[0].Name -eq "Runner.Worker.exe") {
Write-Host "Found Runner.Worker.exe process which means we are running on GitHub Actions"
Write-Host "Aborting search early and using process: $p"
Break
} else {
$id = $p[0].ParentProcessId
}
}
Write-Host "Final process: $p"
Invoke-Expression "&$tracer --inject=$id"`;
}
const injectTracerPath = path.join(config.tempDir, "inject-tracer.ps1");
fs.writeFileSync(injectTracerPath, script);
await new toolrunnner.ToolRunner("powershell", [
"-ExecutionPolicy",
"Bypass",
"-file",
injectTracerPath,
path.resolve(path.dirname(codeql.getPath()), "tools", "win64", "tracer.exe"),
], { env: { ODASA_TRACER_CONFIGURATION: tracerConfig.spec } }).exec();
}
exports.injectWindowsTracer = injectWindowsTracer;
async function installPythonDeps(codeql, logger) {
logger.startGroup("Setup Python dependencies");
if (process.platform !== "linux") {
logger.info("Currently, auto-installing python dependancies is only supported on linux");
logger.endGroup();
return;
}
const scriptsFolder = path.resolve(__dirname, "../python-setup");
// Setup tools on the Github hosted runners
if (process.env["ImageOS"] !== undefined) {
try {
await new toolrunnner.ToolRunner(path.join(scriptsFolder, "install_tools.sh")).exec();
}
catch (e) {
// This script tries to install some needed tools in the runner. It should not fail, but if it does
// we just abort the process without failing the action
logger.endGroup();
logger.warning("Unable to download and extract the tools needed for installing the python dependecies");
}
}
// Install dependencies
try {
await new toolrunnner.ToolRunner(path.join(scriptsFolder, "auto_install_packages.py"), [path.dirname(codeql.getPath())]).exec();
}
catch (e) {
logger.endGroup();
logger.warning("We were unable to install your python dependencies.");
}
logger.endGroup();
}
exports.installPythonDeps = installPythonDeps;
//# sourceMappingURL=init.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,0EAA4D;AAC5D,uCAAyB;AACzB,2CAA6B;AAE7B,gEAAkD;AAClD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAAkB,EAClB,SAAiB,EACjB,OAAe,EACf,QAAgB,EAChB,IAAe,EACf,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,MAAM,GAAG,MAAM,oBAAW,CAC9B,SAAS,EACT,UAAU,EACV,SAAS,EACT,OAAO,EACP,QAAQ,EACR,IAAI,EACJ,MAAM,CACP,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAtBD,gCAsBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,UAAkB,EAClB,SAAiB,EACjB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,UAAU,EACV,SAAS,EACT,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9BD,gCA8BC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE9E,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnBD,0BAmBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;gDAiBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,WAAW,CAAC,UAAU,CAC9B,YAAY,EACZ;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AAxFD,kDAwFC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,MAAM,CAAC,IAAI,CACT,2EAA2E,CAC5E,CAAC;QACF,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,OAAO;KACR;IAED,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,2CAA2C;IAC3C,IAAI,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,SAAS,EAAE;QACxC,IAAI;YACF,MAAM,IAAI,WAAW,CAAC,UAAU,CAC9B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QAAC,OAAO,CAAC,EAAE;YACV,mGAAmG;YACnG,uDAAuD;YACvD,MAAM,CAAC,QAAQ,EAAE,CAAC;YAClB,MAAM,CAAC,OAAO,CACZ,uFAAuF,CACxF,CAAC;SACH;KACF;IAED,uBAAuB;IACvB,IAAI;QACF,MAAM,IAAI,WAAW,CAAC,UAAU,CAC9B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,0BAA0B,CAAC,EACpD,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CACjC,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CAAC,qDAAqD,CAAC,CAAC;KACvE;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAxCD,8CAwCC"}

43
lib/languages.js generated
View File

@@ -1,43 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// All the languages supported by CodeQL
var Language;
(function (Language) {
Language["csharp"] = "csharp";
Language["cpp"] = "cpp";
Language["go"] = "go";
Language["java"] = "java";
Language["javascript"] = "javascript";
Language["python"] = "python";
})(Language = exports.Language || (exports.Language = {}));
// Additional names for languages
const LANGUAGE_ALIASES = {
c: Language.cpp,
"c++": Language.cpp,
"c#": Language.csharp,
typescript: Language.javascript,
};
// Translate from user input or GitHub's API names for languages to CodeQL's names for languages
function parseLanguage(language) {
// Normalise to lower case
language = language.toLowerCase();
// See if it's an exact match
if (language in Language) {
return language;
}
// Check language aliases
if (language in LANGUAGE_ALIASES) {
return LANGUAGE_ALIASES[language];
}
return undefined;
}
exports.parseLanguage = parseLanguage;
function isTracedLanguage(language) {
return ["cpp", "java", "csharp"].includes(language);
}
exports.isTracedLanguage = isTracedLanguage;
function isScannedLanguage(language) {
return !isTracedLanguage(language);
}
exports.isScannedLanguage = isScannedLanguage;
//# sourceMappingURL=languages.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;AAAA,wCAAwC;AACxC,IAAY,QAOX;AAPD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;AACnB,CAAC,EAPW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAOnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAAiC;IACrD,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACtD,CAAC;AAFD,4CAEC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}

44
lib/languages.test.js generated
View File

@@ -1,44 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const languages_1 = require("./languages");
const testing_utils_1 = require("./testing-utils");
testing_utils_1.setupTests(ava_1.default);
ava_1.default("parseLangauge", async (t) => {
// Exact matches
t.deepEqual(languages_1.parseLanguage("csharp"), languages_1.Language.csharp);
t.deepEqual(languages_1.parseLanguage("cpp"), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage("go"), languages_1.Language.go);
t.deepEqual(languages_1.parseLanguage("java"), languages_1.Language.java);
t.deepEqual(languages_1.parseLanguage("javascript"), languages_1.Language.javascript);
t.deepEqual(languages_1.parseLanguage("python"), languages_1.Language.python);
// Aliases
t.deepEqual(languages_1.parseLanguage("c"), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage("c++"), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage("c#"), languages_1.Language.csharp);
t.deepEqual(languages_1.parseLanguage("typescript"), languages_1.Language.javascript);
// Not matches
t.deepEqual(languages_1.parseLanguage("foo"), undefined);
t.deepEqual(languages_1.parseLanguage(" "), undefined);
t.deepEqual(languages_1.parseLanguage(""), undefined);
});
ava_1.default("isTracedLanguage", async (t) => {
t.true(languages_1.isTracedLanguage(languages_1.Language.cpp));
t.true(languages_1.isTracedLanguage(languages_1.Language.java));
t.true(languages_1.isTracedLanguage(languages_1.Language.csharp));
t.false(languages_1.isTracedLanguage(languages_1.Language.go));
t.false(languages_1.isTracedLanguage(languages_1.Language.javascript));
t.false(languages_1.isTracedLanguage(languages_1.Language.python));
});
ava_1.default("isScannedLanguage", async (t) => {
t.false(languages_1.isScannedLanguage(languages_1.Language.cpp));
t.false(languages_1.isScannedLanguage(languages_1.Language.java));
t.false(languages_1.isScannedLanguage(languages_1.Language.csharp));
t.true(languages_1.isScannedLanguage(languages_1.Language.go));
t.true(languages_1.isScannedLanguage(languages_1.Language.javascript));
t.true(languages_1.isScannedLanguage(languages_1.Language.python));
});
//# sourceMappingURL=languages.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"languages.test.js","sourceRoot":"","sources":["../src/languages.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,2CAKqB;AACrB,mDAA6C;AAE7C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,gBAAgB;IAChB,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IACtD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,MAAM,CAAC,EAAE,oBAAQ,CAAC,IAAI,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAC9D,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAEtD,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAE9D,cAAc;IACd,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC7C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,SAAS,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAC;AAC5C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IACxC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE1C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mBAAmB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACpC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACzC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE5C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC"}

26
lib/logging.js generated
View File

@@ -1,26 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
function getActionsLogger() {
return core;
}
exports.getActionsLogger = getActionsLogger;
function getRunnerLogger(debugMode) {
return {
debug: debugMode ? console.debug : () => undefined,
info: console.info,
warning: console.warn,
error: console.error,
startGroup: () => undefined,
endGroup: () => undefined,
};
}
exports.getRunnerLogger = getRunnerLogger;
//# sourceMappingURL=logging.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../src/logging.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAYtC,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC;AACd,CAAC;AAFD,4CAEC;AAED,SAAgB,eAAe,CAAC,SAAkB;IAChD,OAAO;QACL,KAAK,EAAE,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,SAAS;QAClD,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,OAAO,EAAE,OAAO,CAAC,IAAI;QACrB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AATD,0CASC"}

14
lib/repository.js generated
View File

@@ -1,14 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function parseRepositoryNwo(input) {
const parts = input.split("/");
if (parts.length !== 2) {
throw new Error(`"${input}" is not a valid repository name`);
}
return {
owner: parts[0],
repo: parts[1],
};
}
exports.parseRepositoryNwo = parseRepositoryNwo;
//# sourceMappingURL=repository.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"repository.js","sourceRoot":"","sources":["../src/repository.ts"],"names":[],"mappings":";;AAMA,SAAgB,kBAAkB,CAAC,KAAa;IAC9C,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QACtB,MAAM,IAAI,KAAK,CAAC,IAAI,KAAK,kCAAkC,CAAC,CAAC;KAC9D;IACD,OAAO;QACL,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC;QACf,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;KACf,CAAC;AACJ,CAAC;AATD,gDASC"}

271
lib/runner.js generated
View File

@@ -1,271 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const commander_1 = require("commander");
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path"));
const analyze_1 = require("./analyze");
const autobuild_1 = require("./autobuild");
const codeql_1 = require("./codeql");
const config_utils_1 = require("./config-utils");
const init_1 = require("./init");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const util_1 = require("./util");
const program = new commander_1.Command();
program.version("0.0.1");
function parseGithubUrl(inputUrl) {
try {
const url = new URL(inputUrl);
// If we detect this is trying to be to github.com
// then return with a fixed canonical URL.
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
return "https://github.com";
}
// Remove the API prefix if it's present
if (url.pathname.indexOf("/api/v3") !== -1) {
url.pathname = url.pathname.substring(0, url.pathname.indexOf("/api/v3"));
}
return url.toString();
}
catch (e) {
throw new Error(`"${inputUrl}" is not a valid URL`);
}
}
function getTempDir(userInput) {
const tempDir = path.join(userInput || process.cwd(), "codeql-runner");
if (!fs.existsSync(tempDir)) {
fs.mkdirSync(tempDir, { recursive: true });
}
return tempDir;
}
function getToolsDir(userInput) {
const toolsDir = userInput || path.join(os.homedir(), "codeql-runner-tools");
if (!fs.existsSync(toolsDir)) {
fs.mkdirSync(toolsDir, { recursive: true });
}
return toolsDir;
}
const codeqlEnvJsonFilename = "codeql-env.json";
// Imports the environment from codeqlEnvJsonFilename if not already present
function importTracerEnvironment(config) {
if (!("ODASA_TRACER_CONFIGURATION" in process.env)) {
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
const env = JSON.parse(fs.readFileSync(jsonEnvFile).toString("utf-8"));
for (const key of Object.keys(env)) {
process.env[key] = env[key];
}
}
}
// Allow the user to specify refs in full refs/heads/branch format
// or just the short branch name and prepend "refs/heads/" to it.
function parseRef(userInput) {
if (userInput.startsWith("refs/")) {
return userInput;
}
else {
return `refs/heads/${userInput}`;
}
}
// Parses the --trace-process-name arg from process.argv, or returns undefined
function parseTraceProcessName() {
for (let i = 0; i < process.argv.length - 1; i++) {
if (process.argv[i] === "--trace-process-name") {
return process.argv[i + 1];
}
}
return undefined;
}
// Parses the --trace-process-level arg from process.argv, or returns undefined
function parseTraceProcessLevel() {
for (let i = 0; i < process.argv.length - 1; i++) {
if (process.argv[i] === "--trace-process-level") {
const v = parseInt(process.argv[i + 1], 10);
return isNaN(v) ? undefined : v;
}
}
return undefined;
}
program
.command("init")
.description("Initializes CodeQL")
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--languages <languages>", "Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.")
.option("--queries <queries>", "Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.")
.option("--config-file <file>", "Path to config file.")
.option("--codeql-path <path>", "Path to a copy of the CodeQL CLI executable to use. Otherwise downloads a copy.")
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
.option("--tools-dir <dir>", "Directory to use for CodeQL tools and other files to store between runs. Default is a subdirectory of the home directory.")
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--debug", "Print more verbose output", false)
// This prevents a message like: error: unknown option '--trace-process-level'
// Remove this if commander.js starts supporting hidden options.
.allowUnknownOption()
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
try {
const tempDir = getTempDir(cmd.tempDir);
const toolsDir = getToolsDir(cmd.toolsDir);
// Wipe the temp dir
logger.info(`Cleaning temp directory ${tempDir}`);
fs.rmdirSync(tempDir, { recursive: true });
fs.mkdirSync(tempDir, { recursive: true });
let codeql;
if (cmd.codeqlPath !== undefined) {
codeql = codeql_1.getCodeQL(cmd.codeqlPath);
}
else {
codeql = await init_1.initCodeQL(undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), tempDir, toolsDir, "runner", logger);
}
const config = await init_1.initConfig(cmd.languages, cmd.queries, cmd.configFile, repository_1.parseRepositoryNwo(cmd.repository), tempDir, toolsDir, codeql, cmd.checkoutPath || process.cwd(), cmd.githubAuth, parseGithubUrl(cmd.githubUrl), logger);
const tracerConfig = await init_1.runInit(codeql, config);
if (tracerConfig === undefined) {
return;
}
if (process.platform === "win32") {
await init_1.injectWindowsTracer(parseTraceProcessName(), parseTraceProcessLevel(), config, codeql, tracerConfig);
}
// Always output a json file of the env that can be consumed programatically
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
fs.writeFileSync(jsonEnvFile, JSON.stringify(tracerConfig.env));
if (process.platform === "win32") {
const batEnvFile = path.join(config.tempDir, "codeql-env.bat");
const batEnvFileContents = Object.entries(tracerConfig.env)
.map(([key, value]) => `Set ${key}=${value}`)
.join("\n");
fs.writeFileSync(batEnvFile, batEnvFileContents);
const powershellEnvFile = path.join(config.tempDir, "codeql-env.sh");
const powershellEnvFileContents = Object.entries(tracerConfig.env)
.map(([key, value]) => `$env:${key}="${value}"`)
.join("\n");
fs.writeFileSync(powershellEnvFile, powershellEnvFileContents);
logger.info(`\nCodeQL environment output to "${jsonEnvFile}", "${batEnvFile}" and "${powershellEnvFile}". ` +
`Please export these variables to future processes so the build can be traced. ` +
`If using cmd/batch run "call ${batEnvFile}" ` +
`or if using PowerShell run "cat ${powershellEnvFile} | Invoke-Expression".`);
}
else {
// Assume that anything that's not windows is using a unix-style shell
const shEnvFile = path.join(config.tempDir, "codeql-env.sh");
const shEnvFileContents = Object.entries(tracerConfig.env)
// Some vars contain ${LIB} that we do not want to be expanded when executing this script
.map(([key, value]) => `export ${key}="${value.replace(/\$/g, "\\$")}"`)
.join("\n");
fs.writeFileSync(shEnvFile, shEnvFileContents);
logger.info(`\nCodeQL environment output to "${jsonEnvFile}" and "${shEnvFile}". ` +
`Please export these variables to future processes so the build can be traced, ` +
`for example by running ". ${shEnvFile}".`);
}
}
catch (e) {
logger.error("Init failed");
logger.error(e);
process.exitCode = 1;
}
});
program
.command("autobuild")
.description("Attempts to automatically build code")
.option("--language <language>", "The language to build. Otherwise will detect the dominant compiled language.")
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
.option("--debug", "Print more verbose output", false)
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
try {
const config = await config_utils_1.getConfig(getTempDir(cmd.tempDir), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. " +
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
}
importTracerEnvironment(config);
let language = undefined;
if (cmd.language !== undefined) {
language = languages_1.parseLanguage(cmd.language);
if (language === undefined || !config.languages.includes(language)) {
throw new Error(`"${cmd.language}" is not a recognised language. ` +
`Known languages in this project are ${config.languages.join(", ")}.`);
}
}
else {
language = autobuild_1.determineAutobuildLanguage(config, logger);
}
if (language !== undefined) {
await autobuild_1.runAutobuild(language, config, logger);
}
}
catch (e) {
logger.error("Autobuild failed");
logger.error(e);
process.exitCode = 1;
}
});
program
.command("analyze")
.description("Finishes extracting code and runs CodeQL queries")
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--no-upload", "Do not upload results after analysis.")
.option("--output-dir <dir>", "Directory to output SARIF files to. Default is in the temp directory.")
.option("--ram <ram>", "Amount of memory to use when running queries. Default is to use all available memory.")
.option("--no-add-snippets", "Specify whether to include code snippets in the sarif output.")
.option("--threads <threads>", "Number of threads to use when running queries. " +
"Default is to use all available cores.")
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
.option("--debug", "Print more verbose output", false)
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
try {
const tempDir = getTempDir(cmd.tempDir);
const outputDir = cmd.outputDir || path.join(tempDir, "codeql-sarif");
const config = await config_utils_1.getConfig(getTempDir(cmd.tempDir), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. " +
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
}
await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), cmd.upload, "runner", outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), util_1.getThreadsFlag(cmd.threads, logger), config, logger);
}
catch (e) {
logger.error("Analyze failed");
logger.error(e);
process.exitCode = 1;
}
});
program
.command("upload")
.description("Uploads a SARIF file, or all SARIF files from a directory, to code scanning")
.requiredOption("--sarif-file <file>", "SARIF file to upload, or a directory containing multiple SARIF files. (Required)")
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--debug", "Print more verbose output", false)
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
try {
await upload_lib.upload(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), "runner", logger);
}
catch (e) {
logger.error("Upload failed");
logger.error(e);
process.exitCode = 1;
}
});
program.parse(process.argv);
//# sourceMappingURL=runner.js.map

File diff suppressed because one or more lines are too long

76
lib/setup-tools.js generated Normal file
View File

@@ -0,0 +1,76 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const toolcache = __importStar(require("@actions/tool-cache"));
const path = __importStar(require("path"));
const semver = __importStar(require("semver"));
class CodeQLSetup {
constructor(codeqlDist) {
this.dist = codeqlDist;
this.tools = path.join(this.dist, 'tools');
this.cmd = path.join(codeqlDist, 'codeql');
// TODO check process.arch ?
if (process.platform === 'win32') {
this.platform = 'win64';
if (this.cmd.endsWith('codeql')) {
this.cmd += ".exe";
}
}
else if (process.platform === 'linux') {
this.platform = 'linux64';
}
else if (process.platform === 'darwin') {
this.platform = 'osx64';
}
else {
throw new Error("Unsupported plaform: " + process.platform);
}
}
}
exports.CodeQLSetup = CodeQLSetup;
async function setupCodeQL() {
try {
const codeqlURL = core.getInput('tools', { required: true });
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL);
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
if (codeqlFolder) {
core.debug(`CodeQL found in cache ${codeqlFolder}`);
}
else {
const codeqlPath = await toolcache.downloadTool(codeqlURL);
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
}
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
}
catch (e) {
core.error(e);
throw new Error("Unable to download and extract CodeQL CLI");
}
}
exports.setupCodeQL = setupCodeQL;
function getCodeQLURLVersion(url) {
const match = url.match(/\/codeql-bundle-(.*)\//);
if (match === null || match.length < 2) {
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
}
let version = match[1];
if (!semver.valid(version)) {
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
version = '0.0.0-' + version;
}
const s = semver.clean(version);
if (!s) {
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
}
return s;
}
exports.getCodeQLURLVersion = getCodeQLURLVersion;
//# sourceMappingURL=setup-tools.js.map

1
lib/setup-tools.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"setup-tools.js","sourceRoot":"","sources":["../src/setup-tools.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,+DAAiD;AACjD,2CAA6B;AAC7B,+CAAiC;AAEjC,MAAa,WAAW;IAMtB,YAAY,UAAkB;QAC5B,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAC3C,4BAA4B;QAC5B,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;YACxB,IAAI,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBAC/B,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC;aACpB;SACF;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YACvC,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;SAC3B;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACxC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;SACzB;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;SAC7D;IACH,CAAC;CACF;AAxBD,kCAwBC;AAEM,KAAK,UAAU,WAAW;IAC/B,IAAI;QACF,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;QAC7D,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAExD,IAAI,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,gBAAgB,CAAC,CAAC;QAC9D,IAAI,YAAY,EAAE;YAChB,IAAI,CAAC,KAAK,CAAC,yBAAyB,YAAY,EAAE,CAAC,CAAC;SACrD;aAAM;YACL,MAAM,UAAU,GAAG,MAAM,SAAS,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,eAAe,GAAG,MAAM,SAAS,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC/D,YAAY,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,eAAe,EAAE,QAAQ,EAAE,gBAAgB,CAAC,CAAC;SACtF;QACD,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAE3D;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACd,MAAM,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC;KAC9D;AACH,CAAC;AAnBD,kCAmBC;AAED,SAAgB,mBAAmB,CAAC,GAAW;IAE7C,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,CAAC,wBAAwB,CAAC,CAAC;IAClD,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,wBAAwB,GAAG,iCAAiC,CAAC,CAAC;KAC/E;IAED,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IAEvB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE;QAC1B,IAAI,CAAC,KAAK,CAAC,kBAAkB,OAAO,gEAAgE,OAAO,GAAG,CAAC,CAAC;QAChH,OAAO,GAAG,QAAQ,GAAG,OAAO,CAAC;KAC9B;IAED,MAAM,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;IAChC,IAAI,CAAC,CAAC,EAAE;QACN,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,iDAAiD,OAAO,UAAU,CAAC,CAAC;KAC/G;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AApBD,kDAoBC"}

60
lib/setup-tools.test.js generated Normal file
View File

@@ -0,0 +1,60 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolcache = __importStar(require("@actions/tool-cache"));
const ava_1 = __importDefault(require("ava"));
const nock_1 = __importDefault(require("nock"));
const path = __importStar(require("path"));
const setupTools = __importStar(require("./setup-tools"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.silenceDebugOutput(ava_1.default);
ava_1.default('download codeql bundle cache', async (t) => {
await util.withTmpDir(async (tmpDir) => {
process.env['GITHUB_WORKSPACE'] = tmpDir;
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
const versions = ['20200601', '20200610'];
for (let i = 0; i < versions.length; i++) {
const version = versions[i];
nock_1.default('https://example.com')
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
await setupTools.setupCodeQL();
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
}
const cachedVersions = toolcache.findAllVersions('CodeQL');
t.is(cachedVersions.length, 2);
});
});
ava_1.default('parse codeql bundle url version', t => {
const tests = {
'20200601': '0.0.0-20200601',
'20200601.0': '0.0.0-20200601.0',
'20200601.0.0': '20200601.0.0',
'1.2.3': '1.2.3',
'1.2.3-alpha': '1.2.3-alpha',
'1.2.3-beta.1': '1.2.3-beta.1',
};
for (const [version, expectedVersion] of Object.entries(tests)) {
const url = `https://github.com/.../codeql-bundle-${version}/...`;
try {
const parsedVersion = setupTools.getCodeQLURLVersion(url);
t.deepEqual(parsedVersion, expectedVersion);
}
catch (e) {
t.fail(e.message);
}
}
});
//# sourceMappingURL=setup-tools.test.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"setup-tools.test.js","sourceRoot":"","sources":["../src/setup-tools.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,0DAA4C;AAC5C,mDAAmD;AACnD,6CAA+B;AAE/B,kCAAkB,CAAC,aAAI,CAAC,CAAC;AAEzB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,UAAU,CAAC,WAAW,EAAE,CAAC;YAE/B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,UAAU,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YAC1D,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC"}

253
lib/setup-tracer.js generated Normal file
View File

@@ -0,0 +1,253 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const io = __importStar(require("@actions/io"));
const toolcache = __importStar(require("@actions/tool-cache"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const sharedEnv = __importStar(require("./shared-environment"));
const util = __importStar(require("./util"));
const CRITICAL_TRACER_VARS = new Set(['SEMMLE_PRELOAD_libtrace',
,
'SEMMLE_RUNNER',
,
'SEMMLE_COPY_EXECUTABLES_ROOT',
,
'SEMMLE_DEPTRACE_SOCKET',
,
'SEMMLE_JAVA_TOOL_OPTIONS'
]);
async function tracerConfig(codeql, database, compilerSpec) {
const env = await codeql.getTracerEnv(database, compilerSpec);
const config = env['ODASA_TRACER_CONFIGURATION'];
const info = { spec: config, env: {} };
// Extract critical tracer variables from the environment
for (let entry of Object.entries(env)) {
const key = entry[0];
const value = entry[1];
// skip ODASA_TRACER_CONFIGURATION as it is handled separately
if (key === 'ODASA_TRACER_CONFIGURATION') {
continue;
}
// skip undefined values
if (typeof value === 'undefined') {
continue;
}
// Keep variables that do not exist in current environment. In addition always keep
// critical and CODEQL_ variables
if (typeof process.env[key] === 'undefined' || CRITICAL_TRACER_VARS.has(key) || key.startsWith('CODEQL_')) {
info.env[key] = value;
}
}
return info;
}
function concatTracerConfigs(configs) {
// A tracer config is a map containing additional environment variables and a tracer 'spec' file.
// A tracer 'spec' file has the following format [log_file, number_of_blocks, blocks_text]
// Merge the environments
const env = {};
let copyExecutables = false;
let envSize = 0;
for (let v of Object.values(configs)) {
for (let e of Object.entries(v.env)) {
const name = e[0];
const value = e[1];
// skip SEMMLE_COPY_EXECUTABLES_ROOT as it is handled separately
if (name === 'SEMMLE_COPY_EXECUTABLES_ROOT') {
copyExecutables = true;
}
else if (name in env) {
if (env[name] !== value) {
throw Error('Incompatible values in environment parameter ' +
name + ': ' + env[name] + ' and ' + value);
}
}
else {
env[name] = value;
envSize += 1;
}
}
}
// Concatenate spec files into a new spec file
let languages = Object.keys(configs);
const cppIndex = languages.indexOf('cpp');
// Make sure cpp is the last language, if it's present since it must be concatenated last
if (cppIndex !== -1) {
let lastLang = languages[languages.length - 1];
languages[languages.length - 1] = languages[cppIndex];
languages[cppIndex] = lastLang;
}
let totalLines = [];
let totalCount = 0;
for (let lang of languages) {
const lines = fs.readFileSync(configs[lang].spec, 'utf8').split(/\r?\n/);
const count = parseInt(lines[1], 10);
totalCount += count;
totalLines.push(...lines.slice(2));
}
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
const spec = path.resolve(tempFolder, 'compound-spec');
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
if (copyExecutables) {
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
envSize += 1;
}
fs.writeFileSync(spec, newSpecContent.join('\n'));
// Prepare the content of the compound environment file
let buffer = Buffer.alloc(4);
buffer.writeInt32LE(envSize, 0);
for (let e of Object.entries(env)) {
const key = e[0];
const value = e[1];
const lineBuffer = new Buffer(key + '=' + value + '\0', 'utf8');
const sizeBuffer = Buffer.alloc(4);
sizeBuffer.writeInt32LE(lineBuffer.length, 0);
buffer = Buffer.concat([buffer, sizeBuffer, lineBuffer]);
}
// Write the compound environment
const envPath = spec + '.environment';
fs.writeFileSync(envPath, buffer);
return { env, spec };
}
async function installPythonDeps(codeql) {
core.startGroup('Setup Python dependencies');
let scriptsFolder = '';
try {
const repoPath = await toolcache.downloadTool('https://github.com/Daverlo/codeql-python-autobuild/archive/master.zip');
const extracted = await toolcache.extractZip(repoPath);
scriptsFolder = path.join(extracted, 'codeql-python-autobuild-master');
}
catch (e) {
// The download should not fail, but in case it fails we just abort trying to setup the python deps
core.warning('Unable to download and extract the scripts needed for installing the python dependecies');
core.endGroup();
return;
}
// Setup tools
try {
await exec.exec(path.join(scriptsFolder, 'install_tools.sh'));
}
catch (e) {
// This script tries to install some needed tools in the runner. It should not fail, but if it does
// we just abort the process without failing the action
core.warning('Unable to download and extract the scripts needed for installing the python dependecies');
core.endGroup();
return;
}
// Install dependencies
try {
await exec.exec(path.join(scriptsFolder, 'auto_install_packages.py'), [codeql.getDir()]);
}
catch (e) {
core.endGroup();
throw new Error('We were unable to install your python dependencies. You can call this action with "setup-python-dependencies: false" to disable this process');
}
core.endGroup();
}
async function run() {
let config;
let codeql;
try {
if (util.should_abort('init', false) || !await util.reportActionStarting('init')) {
return;
}
core.startGroup('Setup CodeQL tools');
codeql = await codeql_1.setupCodeQL();
await codeql.printVersion();
core.endGroup();
core.startGroup('Load language configuration');
config = await configUtils.initConfig();
analysisPaths.includeAndExcludeAnalysisPaths(config);
core.endGroup();
}
catch (e) {
core.setFailed(e.message);
await util.reportActionAborted('init', e.message);
return;
}
try {
const sourceRoot = path.resolve();
// Forward Go flags
const goFlags = process.env['GOFLAGS'];
if (goFlags) {
core.exportVariable('GOFLAGS', goFlags);
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
}
const setupPythonDependencies = core.getInput('setup-python-dependencies', { required: true });
if (config.languages.includes('python') && setupPythonDependencies === 'true') {
await installPythonDeps(codeql);
}
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
core.exportVariable('CODEQL_RAM', codeqlRam);
const databaseFolder = path.resolve(util.getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
await io.mkdirP(databaseFolder);
let tracedLanguages = {};
let scannedLanguages = [];
// TODO: replace this code once CodeQL supports multi-language tracing
for (let language of config.languages) {
const languageDatabase = path.join(databaseFolder, language);
// Init language database
await codeql.databaseInit(languageDatabase, language, sourceRoot);
// TODO: add better detection of 'traced languages' instead of using a hard coded list
if (['cpp', 'java', 'csharp'].includes(language)) {
const config = await tracerConfig(codeql, languageDatabase);
tracedLanguages[language] = config;
}
else {
scannedLanguages.push(language);
}
}
const tracedLanguageKeys = Object.keys(tracedLanguages);
if (tracedLanguageKeys.length > 0) {
const mainTracerConfig = concatTracerConfigs(tracedLanguages);
if (mainTracerConfig.spec) {
for (let entry of Object.entries(mainTracerConfig.env)) {
core.exportVariable(entry[0], entry[1]);
}
core.exportVariable('ODASA_TRACER_CONFIGURATION', mainTracerConfig.spec);
if (process.platform === 'darwin') {
core.exportVariable('DYLD_INSERT_LIBRARIES', path.join(codeql.getDir(), 'tools', 'osx64', 'libtrace.dylib'));
}
else if (process.platform === 'win32') {
await exec.exec('powershell', [
path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
path.resolve(codeql.getDir(), 'tools', 'win64', 'tracer.exe'),
], { env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
}
else {
core.exportVariable('LD_PRELOAD', path.join(codeql.getDir(), 'tools', 'linux64', '${LIB}trace.so'));
}
}
}
core.exportVariable(sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES, scannedLanguages.join(','));
core.exportVariable(sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES, tracedLanguageKeys.join(','));
// TODO: make this a "private" environment variable of the action
core.exportVariable(sharedEnv.CODEQL_ACTION_DATABASE_DIR, databaseFolder);
}
catch (error) {
core.setFailed(error.message);
await util.reportActionFailed('init', error.message, error.stack);
return;
}
await util.reportActionSucceeded('init');
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
}
run().catch(e => {
core.setFailed("init action failed: " + e);
console.log(e);
});
//# sourceMappingURL=setup-tracer.js.map

1
lib/setup-tracer.js.map Normal file

File diff suppressed because one or more lines are too long

View File

@@ -1,10 +1,16 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
exports.CODEQL_ACTION_ANALYSIS_KEY = 'CODEQL_ACTION_ANALYSIS_KEY';
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
// The time at which the first action (normally init) started executing.
// If a workflow invokes a different action without first invoking the init
// action (i.e. the upload action is being used by a third-party integrator)
// then this variable will be assigned the start time of the action invoked
// rather that the init action.
exports.CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
// Populated when the init action completes successfully
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
//# sourceMappingURL=shared-environment.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"}
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,+BAA+B,GAAG,iCAAiC,CAAC;AACpE,QAAA,8BAA8B,GAAG,gCAAgC,CAAC;AAC/E,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,wBAAwB,GAAG,0BAA0B,CAAC;AACnE,wDAAwD;AAC3C,QAAA,4BAA4B,GAAG,8BAA8B,CAAC"}

22
lib/test-utils.js generated Normal file
View File

@@ -0,0 +1,22 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function silenceDebugOutput(test) {
const typedTest = test;
typedTest.beforeEach(t => {
const processStdoutWrite = process.stdout.write.bind(process.stdout);
t.context.write = processStdoutWrite;
process.stdout.write = (str, encoding, cb) => {
// Core library will directly call process.stdout.write for commands
// We don't want :: commands to be executed by the runner during tests
if (!str.match(/^::/)) {
processStdoutWrite(str, encoding, cb);
}
return true;
};
});
typedTest.afterEach(t => {
process.stdout.write = t.context.write;
});
}
exports.silenceDebugOutput = silenceDebugOutput;
//# sourceMappingURL=test-utils.js.map

1
lib/test-utils.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"test-utils.js","sourceRoot":"","sources":["../src/test-utils.ts"],"names":[],"mappings":";;AAEA,SAAgB,kBAAkB,CAAC,IAAwB;IACzD,MAAM,SAAS,GAAG,IAAmC,CAAC;IAEtD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACrB,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,KAAK,GAAG,kBAAkB,CAAC;QACrC,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,GAAQ,EAAE,QAAc,EAAE,EAA0B,EAAE,EAAE;YAC5E,oEAAoE;YACpE,sEAAsE;YACtE,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACnB,kBAAkB,CAAC,GAAG,EAAE,QAAQ,EAAE,EAAE,CAAC,CAAC;aACzC;YACD,OAAO,IAAI,CAAC;QAChB,CAAC,CAAC;IACN,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE;QACpB,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC;IAC3C,CAAC,CAAC,CAAC;AACL,CAAC;AAnBD,gDAmBC"}

12
lib/testing-utils.js generated
View File

@@ -19,19 +19,19 @@ function wrapOutput(context) {
// write(str: Uint8Array | string, encoding?: string, cb?: (err?: Error) => void): boolean;
return (chunk, encoding, cb) => {
// Work out which method overload we are in
if (cb === undefined && typeof encoding === "function") {
if (cb === undefined && typeof encoding === 'function') {
cb = encoding;
encoding = undefined;
}
// Record the output
if (typeof chunk === "string") {
if (typeof chunk === 'string') {
context.testOutput += chunk;
}
else {
context.testOutput += new TextDecoder(encoding || "utf-8").decode(chunk);
context.testOutput += new TextDecoder(encoding || 'utf-8').decode(chunk);
}
// Satisfy contract by calling callback when done
if (cb !== undefined && typeof cb === "function") {
if (cb !== undefined && typeof cb === 'function') {
cb();
}
return true;
@@ -39,7 +39,7 @@ function wrapOutput(context) {
}
function setupTests(test) {
const typedTest = test;
typedTest.beforeEach((t) => {
typedTest.beforeEach(t => {
// Set an empty CodeQL object so that all method calls will fail
// unless the test explicitly sets one up.
CodeQL.setCodeQL({});
@@ -57,7 +57,7 @@ function setupTests(test) {
t.context.env = {};
Object.assign(t.context.env, process.env);
});
typedTest.afterEach.always((t) => {
typedTest.afterEach.always(t => {
// Restore stdout and stderr
// The captured output is only replayed if the test failed
process.stdout.write = t.context.stdoutWrite;

View File

@@ -1 +1 @@
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AACA,kDAA0B;AAE1B,iDAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,eAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC"}
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AACA,kDAA0B;AAE1B,iDAAmC;AAInC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CAAC,KAA0B,EAAE,QAAiB,EAAE,EAA0B,EAAW,EAAE;QAC5F,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACvB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;QAC7B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,eAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC"}

View File

@@ -1,86 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
/**
* Wrapper for toolrunner.Toolrunner which checks for specific return code and/or regex matches in console output.
* Output will be streamed to the live console as well as captured for subsequent processing.
* Returns promise with return code
*
* @param commandLine command to execute
* @param args optional arguments for tool. Escaping is handled by the lib.
* @param matchers defines specific codes and/or regexes that should lead to return of a custom error
* @param options optional exec options. See ExecOptions
* @returns Promise<number> exit code
*/
async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
var _a, _b, _c;
let stdout = "";
let stderr = "";
const listeners = {
stdout: (data) => {
var _a, _b;
stdout += data.toString();
if (((_b = (_a = options) === null || _a === void 0 ? void 0 : _a.listeners) === null || _b === void 0 ? void 0 : _b.stdout) !== undefined) {
options.listeners.stdout(data);
}
else {
// if no stdout listener was originally defined then we match default behavior of Toolrunner
process.stdout.write(data);
}
},
stderr: (data) => {
var _a, _b;
stderr += data.toString();
if (((_b = (_a = options) === null || _a === void 0 ? void 0 : _a.listeners) === null || _b === void 0 ? void 0 : _b.stderr) !== undefined) {
options.listeners.stderr(data);
}
else {
// if no stderr listener was originally defined then we match default behavior of Toolrunner
process.stderr.write(data);
}
},
};
// we capture the original return code or error so that if no match is found we can duplicate the behavior
let returnState;
try {
returnState = await new toolrunnner.ToolRunner(commandLine, args, {
...options,
listeners,
ignoreReturnCode: true,
}).exec();
}
catch (e) {
returnState = e;
}
// if there is a zero return code then we do not apply the matchers
if (returnState === 0)
return returnState;
if (matchers) {
for (const matcher of matchers) {
if (matcher.exitCode === returnState || ((_a = matcher.outputRegex) === null || _a === void 0 ? void 0 : _a.test(stderr)) || ((_b = matcher.outputRegex) === null || _b === void 0 ? void 0 : _b.test(stdout))) {
throw new Error(matcher.message);
}
}
}
if (typeof returnState === "number") {
// only if we were instructed to ignore the return code do we ever return it non-zero
if ((_c = options) === null || _c === void 0 ? void 0 : _c.ignoreReturnCode) {
return returnState;
}
else {
throw new Error(`The process \'${commandLine}\' failed with exit code ${returnState}`);
}
}
else {
throw returnState;
}
}
exports.toolrunnerErrorCatcher = toolrunnerErrorCatcher;
//# sourceMappingURL=toolrunner-error-catcher.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;AACA,0EAA4D;AAI5D;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,aAAA,OAAO,0CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,aAAA,OAAO,0CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,WAAW,EAAE,IAAI,EAAE;YAChE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI;SACvB,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,CAAC;KACjB;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW,WAChC,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,EAAC,WACjC,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,EAAC,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,UAAI,OAAO,0CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,iBAAiB,WAAW,4BAA4B,WAAW,EAAE,CACtE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AArED,wDAqEC"}

View File

@@ -1,145 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const exec = __importStar(require("@actions/exec"));
const ava_1 = __importDefault(require("ava"));
const testing_utils_1 = require("./testing-utils");
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
testing_utils_1.setupTests(ava_1.default);
ava_1.default("matchers are never applied if non-error exit", async (t) => {
const testArgs = buildDummyArgs("foo bar\\nblort qux", "foo bar\\nblort qux", "", 0);
const matchers = [
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "error!!!" },
];
t.deepEqual(await exec.exec("node", testArgs), 0);
t.deepEqual(await toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), 0);
});
ava_1.default("regex matchers are applied to stdout for non-zero exit code", async (t) => {
const testArgs = buildDummyArgs("foo bar\\nblort qux", "", "", 1);
const matchers = [
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
];
await t.throwsAsync(exec.exec("node", testArgs), {
instanceOf: Error,
message: "The process 'node' failed with exit code 1",
});
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
instanceOf: Error,
message: "🦄",
});
});
ava_1.default("regex matchers are applied to stderr for non-zero exit code", async (t) => {
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
const matchers = [
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
];
await t.throwsAsync(exec.exec("node", testArgs), {
instanceOf: Error,
message: "The process 'node' failed with exit code 1",
});
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
instanceOf: Error,
message: "🦄",
});
});
ava_1.default("matcher returns correct error message when multiple matchers defined", async (t) => {
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
const matchers = [
{ exitCode: 456, outputRegex: new RegExp("lorem ipsum"), message: "😩" },
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
{ exitCode: 789, outputRegex: new RegExp("blah blah"), message: "🤦‍♂️" },
];
await t.throwsAsync(exec.exec("node", testArgs), {
instanceOf: Error,
message: "The process 'node' failed with exit code 1",
});
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
instanceOf: Error,
message: "🦄",
});
});
ava_1.default("matcher returns first match to regex when multiple matches", async (t) => {
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 1);
const matchers = [
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "🦄" },
{ exitCode: 789, outputRegex: new RegExp("blah blah"), message: "🤦‍♂️" },
{ exitCode: 987, outputRegex: new RegExp("foo bar"), message: "🚫" },
];
await t.throwsAsync(exec.exec("node", testArgs), {
instanceOf: Error,
message: "The process 'node' failed with exit code 1",
});
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
instanceOf: Error,
message: "🦄",
});
});
ava_1.default("exit code matchers are applied", async (t) => {
const testArgs = buildDummyArgs("non matching string", "foo bar\\nblort qux", "", 123);
const matchers = [
{
exitCode: 123,
outputRegex: new RegExp("this will not match"),
message: "🦄",
},
];
await t.throwsAsync(exec.exec("node", testArgs), {
instanceOf: Error,
message: "The process 'node' failed with exit code 123",
});
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, matchers), {
instanceOf: Error,
message: "🦄",
});
});
ava_1.default("execErrorCatcher respects the ignoreReturnValue option", async (t) => {
const testArgs = buildDummyArgs("standard output", "error output", "", 199);
await t.throwsAsync(toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, [], { ignoreReturnCode: false }), { instanceOf: Error });
t.deepEqual(await toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, [], {
ignoreReturnCode: true,
}), 199);
});
ava_1.default("execErrorCatcher preserves behavior of provided listeners", async (t) => {
const stdoutExpected = "standard output";
const stderrExpected = "error output";
let stdoutActual = "";
let stderrActual = "";
const listeners = {
stdout: (data) => {
stdoutActual += data.toString();
},
stderr: (data) => {
stderrActual += data.toString();
},
};
const testArgs = buildDummyArgs(stdoutExpected, stderrExpected, "", 0);
t.deepEqual(await toolrunner_error_catcher_1.toolrunnerErrorCatcher("node", testArgs, [], {
listeners,
}), 0);
t.deepEqual(stdoutActual, `${stdoutExpected}\n`);
t.deepEqual(stderrActual, `${stderrExpected}\n`);
});
function buildDummyArgs(stdoutContents, stderrContents, desiredErrorMessage, desiredExitCode) {
let command = "";
if (stdoutContents)
command += `console.log("${stdoutContents}");`;
if (stderrContents)
command += `console.error("${stderrContents}");`;
if (command.length === 0)
throw new Error("Must provide contents for either stdout or stderr");
if (desiredErrorMessage)
command += `throw new Error("${desiredErrorMessage}");`;
if (desiredExitCode)
command += `process.exitCode = ${desiredExitCode};`;
return ["-e", command];
}
//# sourceMappingURL=toolrunner-error-catcher.test.js.map

File diff suppressed because one or more lines are too long

151
lib/tracer-config.js generated
View File

@@ -1,151 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const languages_1 = require("./languages");
const util = __importStar(require("./util"));
const CRITICAL_TRACER_VARS = new Set([
"SEMMLE_PRELOAD_libtrace",
,
"SEMMLE_RUNNER",
,
"SEMMLE_COPY_EXECUTABLES_ROOT",
,
"SEMMLE_DEPTRACE_SOCKET",
,
"SEMMLE_JAVA_TOOL_OPTIONS",
]);
async function getTracerConfigForLanguage(codeql, config, language) {
const env = await codeql.getTracerEnv(util.getCodeQLDatabasePath(config.tempDir, language));
const spec = env["ODASA_TRACER_CONFIGURATION"];
const info = { spec, env: {} };
// Extract critical tracer variables from the environment
for (const entry of Object.entries(env)) {
const key = entry[0];
const value = entry[1];
// skip ODASA_TRACER_CONFIGURATION as it is handled separately
if (key === "ODASA_TRACER_CONFIGURATION") {
continue;
}
// skip undefined values
if (typeof value === "undefined") {
continue;
}
// Keep variables that do not exist in current environment. In addition always keep
// critical and CODEQL_ variables
if (typeof process.env[key] === "undefined" ||
CRITICAL_TRACER_VARS.has(key) ||
key.startsWith("CODEQL_")) {
info.env[key] = value;
}
}
return info;
}
exports.getTracerConfigForLanguage = getTracerConfigForLanguage;
function concatTracerConfigs(tracerConfigs, config) {
// A tracer config is a map containing additional environment variables and a tracer 'spec' file.
// A tracer 'spec' file has the following format [log_file, number_of_blocks, blocks_text]
// Merge the environments
const env = {};
let copyExecutables = false;
let envSize = 0;
for (const v of Object.values(tracerConfigs)) {
for (const e of Object.entries(v.env)) {
const name = e[0];
const value = e[1];
// skip SEMMLE_COPY_EXECUTABLES_ROOT as it is handled separately
if (name === "SEMMLE_COPY_EXECUTABLES_ROOT") {
copyExecutables = true;
}
else if (name in env) {
if (env[name] !== value) {
throw Error(`Incompatible values in environment parameter ${name}: ${env[name]} and ${value}`);
}
}
else {
env[name] = value;
envSize += 1;
}
}
}
// Concatenate spec files into a new spec file
const languages = Object.keys(tracerConfigs);
const cppIndex = languages.indexOf("cpp");
// Make sure cpp is the last language, if it's present since it must be concatenated last
if (cppIndex !== -1) {
const lastLang = languages[languages.length - 1];
languages[languages.length - 1] = languages[cppIndex];
languages[cppIndex] = lastLang;
}
const totalLines = [];
let totalCount = 0;
for (const lang of languages) {
const lines = fs
.readFileSync(tracerConfigs[lang].spec, "utf8")
.split(/\r?\n/);
const count = parseInt(lines[1], 10);
totalCount += count;
totalLines.push(...lines.slice(2));
}
const newLogFilePath = path.resolve(config.tempDir, "compound-build-tracer.log");
const spec = path.resolve(config.tempDir, "compound-spec");
const compoundTempFolder = path.resolve(config.tempDir, "compound-temp");
const newSpecContent = [
newLogFilePath,
totalCount.toString(10),
...totalLines,
];
if (copyExecutables) {
env["SEMMLE_COPY_EXECUTABLES_ROOT"] = compoundTempFolder;
envSize += 1;
}
fs.writeFileSync(spec, newSpecContent.join("\n"));
// Prepare the content of the compound environment file
let buffer = Buffer.alloc(4);
buffer.writeInt32LE(envSize, 0);
for (const e of Object.entries(env)) {
const key = e[0];
const value = e[1];
const lineBuffer = new Buffer(`${key}=${value}\0`, "utf8");
const sizeBuffer = Buffer.alloc(4);
sizeBuffer.writeInt32LE(lineBuffer.length, 0);
buffer = Buffer.concat([buffer, sizeBuffer, lineBuffer]);
}
// Write the compound environment
const envPath = `${spec}.environment`;
fs.writeFileSync(envPath, buffer);
return { env, spec };
}
exports.concatTracerConfigs = concatTracerConfigs;
async function getCombinedTracerConfig(config, codeql) {
// Abort if there are no traced languages as there's nothing to do
const tracedLanguages = config.languages.filter(languages_1.isTracedLanguage);
if (tracedLanguages.length === 0) {
return undefined;
}
// Get all the tracer configs and combine them together
const tracedLanguageConfigs = {};
for (const language of tracedLanguages) {
tracedLanguageConfigs[language] = await getTracerConfigForLanguage(codeql, config, language);
}
const mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
// Add a couple more variables
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
const codeQLDir = path.dirname(codeql.getPath());
if (process.platform === "darwin") {
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
}
else if (process.platform !== "win32") {
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
}
return mainTracerConfig;
}
exports.getCombinedTracerConfig = getCombinedTracerConfig;
//# sourceMappingURL=tracer-config.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"tracer-config.js","sourceRoot":"","sources":["../src/tracer-config.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAI7B,2CAAyD;AACzD,6CAA+B;AAO/B,MAAM,oBAAoB,GAAG,IAAI,GAAG,CAAC;IACnC,yBAAyB;IACzB,AAD0B;IAE1B,eAAe;IACf,AADgB;IAEhB,8BAA8B;IAC9B,AAD+B;IAE/B,wBAAwB;IACxB,AADyB;IAEzB,0BAA0B;CAC3B,CAAC,CAAC;AAEI,KAAK,UAAU,0BAA0B,CAC9C,MAAc,EACd,MAA0B,EAC1B,QAAkB;IAElB,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,YAAY,CACnC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CACrD,CAAC;IAEF,MAAM,IAAI,GAAG,GAAG,CAAC,4BAA4B,CAAC,CAAC;IAC/C,MAAM,IAAI,GAAiB,EAAE,IAAI,EAAE,GAAG,EAAE,EAAE,EAAE,CAAC;IAE7C,yDAAyD;IACzD,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACvC,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACrB,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACvB,8DAA8D;QAC9D,IAAI,GAAG,KAAK,4BAA4B,EAAE;YACxC,SAAS;SACV;QACD,wBAAwB;QACxB,IAAI,OAAO,KAAK,KAAK,WAAW,EAAE;YAChC,SAAS;SACV;QACD,mFAAmF;QACnF,iCAAiC;QACjC,IACE,OAAO,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,WAAW;YACvC,oBAAoB,CAAC,GAAG,CAAC,GAAG,CAAC;YAC7B,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EACzB;YACA,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;SACvB;KACF;IACD,OAAO,IAAI,CAAC;AACd,CAAC;AAnCD,gEAmCC;AAED,SAAgB,mBAAmB,CACjC,aAA+C,EAC/C,MAA0B;IAE1B,iGAAiG;IACjG,0FAA0F;IAE1F,yBAAyB;IACzB,MAAM,GAAG,GAA8B,EAAE,CAAC;IAC1C,IAAI,eAAe,GAAG,KAAK,CAAC;IAC5B,IAAI,OAAO,GAAG,CAAC,CAAC;IAChB,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE;QAC5C,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;YACrC,MAAM,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YAClB,MAAM,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YACnB,gEAAgE;YAChE,IAAI,IAAI,KAAK,8BAA8B,EAAE;gBAC3C,eAAe,GAAG,IAAI,CAAC;aACxB;iBAAM,IAAI,IAAI,IAAI,GAAG,EAAE;gBACtB,IAAI,GAAG,CAAC,IAAI,CAAC,KAAK,KAAK,EAAE;oBACvB,MAAM,KAAK,CACT,gDAAgD,IAAI,KAAK,GAAG,CAAC,IAAI,CAAC,QAAQ,KAAK,EAAE,CAClF,CAAC;iBACH;aACF;iBAAM;gBACL,GAAG,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC;gBAClB,OAAO,IAAI,CAAC,CAAC;aACd;SACF;KACF;IAED,8CAA8C;IAC9C,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;IAC7C,MAAM,QAAQ,GAAG,SAAS,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IAC1C,yFAAyF;IACzF,IAAI,QAAQ,KAAK,CAAC,CAAC,EAAE;QACnB,MAAM,QAAQ,GAAG,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QACjD,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC,QAAQ,CAAC,CAAC;QACtD,SAAS,CAAC,QAAQ,CAAC,GAAG,QAAQ,CAAC;KAChC;IAED,MAAM,UAAU,GAAa,EAAE,CAAC;IAChC,IAAI,UAAU,GAAG,CAAC,CAAC;IACnB,KAAK,MAAM,IAAI,IAAI,SAAS,EAAE;QAC5B,MAAM,KAAK,GAAG,EAAE;aACb,YAAY,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,MAAM,CAAC;aAC9C,KAAK,CAAC,OAAO,CAAC,CAAC;QAClB,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QACrC,UAAU,IAAI,KAAK,CAAC;QACpB,UAAU,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;KACpC;IAED,MAAM,cAAc,GAAG,IAAI,CAAC,OAAO,CACjC,MAAM,CAAC,OAAO,EACd,2BAA2B,CAC5B,CAAC;IACF,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;IAC3D,MAAM,kBAAkB,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;IACzE,MAAM,cAAc,GAAG;QACrB,cAAc;QACd,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QACvB,GAAG,UAAU;KACd,CAAC;IAEF,IAAI,eAAe,EAAE;QACnB,GAAG,CAAC,8BAA8B,CAAC,GAAG,kBAAkB,CAAC;QACzD,OAAO,IAAI,CAAC,CAAC;KACd;IAED,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;IAElD,uDAAuD;IACvD,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAC7B,MAAM,CAAC,YAAY,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;IAChC,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACnC,MAAM,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACjB,MAAM,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACnB,MAAM,UAAU,GAAG,IAAI,MAAM,CAAC,GAAG,GAAG,IAAI,KAAK,IAAI,EAAE,MAAM,CAAC,CAAC;QAC3D,MAAM,UAAU,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACnC,UAAU,CAAC,YAAY,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC9C,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC,CAAC;KAC1D;IACD,iCAAiC;IACjC,MAAM,OAAO,GAAG,GAAG,IAAI,cAAc,CAAC;IACtC,EAAE,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IAElC,OAAO,EAAE,GAAG,EAAE,IAAI,EAAE,CAAC;AACvB,CAAC;AAvFD,kDAuFC;AAEM,KAAK,UAAU,uBAAuB,CAC3C,MAA0B,EAC1B,MAAc;IAEd,kEAAkE;IAClE,MAAM,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IAClE,IAAI,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE;QAChC,OAAO,SAAS,CAAC;KAClB;IAED,uDAAuD;IACvD,MAAM,qBAAqB,GAAqC,EAAE,CAAC;IACnE,KAAK,MAAM,QAAQ,IAAI,eAAe,EAAE;QACtC,qBAAqB,CAAC,QAAQ,CAAC,GAAG,MAAM,0BAA0B,CAChE,MAAM,EACN,MAAM,EACN,QAAQ,CACT,CAAC;KACH;IACD,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,qBAAqB,EAAE,MAAM,CAAC,CAAC;IAE5E,8BAA8B;IAC9B,gBAAgB,CAAC,GAAG,CAAC,4BAA4B,CAAC,GAAG,gBAAgB,CAAC,IAAI,CAAC;IAC3E,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC;IACjD,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACjC,gBAAgB,CAAC,GAAG,CAAC,uBAAuB,CAAC,GAAG,IAAI,CAAC,IAAI,CACvD,SAAS,EACT,OAAO,EACP,OAAO,EACP,gBAAgB,CACjB,CAAC;KACH;SAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QACvC,gBAAgB,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI,CAC5C,SAAS,EACT,OAAO,EACP,SAAS,EACT,gBAAgB,CACjB,CAAC;KACH;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAzCD,0DAyCC"}

View File

@@ -1,278 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const codeql_1 = require("./codeql");
const languages_1 = require("./languages");
const testing_utils_1 = require("./testing-utils");
const tracer_config_1 = require("./tracer-config");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
function getTestConfig(tmpDir) {
return {
languages: [languages_1.Language.java],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
};
}
// A very minimal setup
ava_1.default("getTracerConfigForLanguage - minimal setup", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const codeQL = codeql_1.setCodeQL({
async getTracerEnv() {
return {
ODASA_TRACER_CONFIGURATION: "abc",
foo: "bar",
};
},
});
const result = await tracer_config_1.getTracerConfigForLanguage(codeQL, config, languages_1.Language.javascript);
t.deepEqual(result, { spec: "abc", env: { foo: "bar" } });
});
});
// Existing vars should not be overwritten, unless they are critical or prefixed with CODEQL_
ava_1.default("getTracerConfigForLanguage - existing / critical vars", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
// Set up some variables in the environment
process.env["foo"] = "abc";
process.env["SEMMLE_PRELOAD_libtrace"] = "abc";
process.env["SEMMLE_RUNNER"] = "abc";
process.env["SEMMLE_COPY_EXECUTABLES_ROOT"] = "abc";
process.env["SEMMLE_DEPTRACE_SOCKET"] = "abc";
process.env["SEMMLE_JAVA_TOOL_OPTIONS"] = "abc";
process.env["SEMMLE_DEPTRACE_SOCKET"] = "abc";
process.env["CODEQL_VAR"] = "abc";
// Now CodeQL returns all these variables, and one more, with different values
const codeQL = codeql_1.setCodeQL({
async getTracerEnv() {
return {
ODASA_TRACER_CONFIGURATION: "abc",
foo: "bar",
baz: "qux",
SEMMLE_PRELOAD_libtrace: "SEMMLE_PRELOAD_libtrace",
SEMMLE_RUNNER: "SEMMLE_RUNNER",
SEMMLE_COPY_EXECUTABLES_ROOT: "SEMMLE_COPY_EXECUTABLES_ROOT",
SEMMLE_DEPTRACE_SOCKET: "SEMMLE_DEPTRACE_SOCKET",
SEMMLE_JAVA_TOOL_OPTIONS: "SEMMLE_JAVA_TOOL_OPTIONS",
CODEQL_VAR: "CODEQL_VAR",
};
},
});
const result = await tracer_config_1.getTracerConfigForLanguage(codeQL, config, languages_1.Language.javascript);
t.deepEqual(result, {
spec: "abc",
env: {
// Should contain all variables except 'foo', because that already existed in the
// environment with a different value, and is not deemed a "critical" variable.
baz: "qux",
SEMMLE_PRELOAD_libtrace: "SEMMLE_PRELOAD_libtrace",
SEMMLE_RUNNER: "SEMMLE_RUNNER",
SEMMLE_COPY_EXECUTABLES_ROOT: "SEMMLE_COPY_EXECUTABLES_ROOT",
SEMMLE_DEPTRACE_SOCKET: "SEMMLE_DEPTRACE_SOCKET",
SEMMLE_JAVA_TOOL_OPTIONS: "SEMMLE_JAVA_TOOL_OPTIONS",
CODEQL_VAR: "CODEQL_VAR",
},
});
});
});
ava_1.default("concatTracerConfigs - minimal configs correctly combined", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec1 = path.join(tmpDir, "spec1");
fs.writeFileSync(spec1, "foo.log\n2\nabc\ndef");
const tc1 = {
spec: spec1,
env: {
a: "a",
b: "b",
},
};
const spec2 = path.join(tmpDir, "spec2");
fs.writeFileSync(spec2, "foo.log\n1\nghi");
const tc2 = {
spec: spec2,
env: {
c: "c",
},
};
const result = tracer_config_1.concatTracerConfigs({ javascript: tc1, python: tc2 }, config);
t.deepEqual(result, {
spec: path.join(tmpDir, "compound-spec"),
env: {
a: "a",
b: "b",
c: "c",
},
});
t.true(fs.existsSync(result.spec));
t.deepEqual(fs.readFileSync(result.spec, "utf8"), `${path.join(tmpDir, "compound-build-tracer.log")}\n3\nabc\ndef\nghi`);
});
});
ava_1.default("concatTracerConfigs - conflicting env vars", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec = path.join(tmpDir, "spec");
fs.writeFileSync(spec, "foo.log\n0");
// Ok if env vars have the same name and the same value
t.deepEqual(tracer_config_1.concatTracerConfigs({
javascript: { spec, env: { a: "a", b: "b" } },
python: { spec, env: { b: "b", c: "c" } },
}, config).env, {
a: "a",
b: "b",
c: "c",
});
// Throws if env vars have same name but different values
const e = t.throws(() => tracer_config_1.concatTracerConfigs({
javascript: { spec, env: { a: "a", b: "b" } },
python: { spec, env: { b: "c" } },
}, config));
t.deepEqual(e.message, "Incompatible values in environment parameter b: b and c");
});
});
ava_1.default("concatTracerConfigs - cpp spec lines come last if present", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec1 = path.join(tmpDir, "spec1");
fs.writeFileSync(spec1, "foo.log\n2\nabc\ndef");
const tc1 = {
spec: spec1,
env: {
a: "a",
b: "b",
},
};
const spec2 = path.join(tmpDir, "spec2");
fs.writeFileSync(spec2, "foo.log\n1\nghi");
const tc2 = {
spec: spec2,
env: {
c: "c",
},
};
const result = tracer_config_1.concatTracerConfigs({ cpp: tc1, python: tc2 }, config);
t.deepEqual(result, {
spec: path.join(tmpDir, "compound-spec"),
env: {
a: "a",
b: "b",
c: "c",
},
});
t.true(fs.existsSync(result.spec));
t.deepEqual(fs.readFileSync(result.spec, "utf8"), `${path.join(tmpDir, "compound-build-tracer.log")}\n3\nghi\nabc\ndef`);
});
});
ava_1.default("concatTracerConfigs - SEMMLE_COPY_EXECUTABLES_ROOT is updated to point to compound spec", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec = path.join(tmpDir, "spec");
fs.writeFileSync(spec, "foo.log\n0");
const result = tracer_config_1.concatTracerConfigs({
javascript: { spec, env: { a: "a", b: "b" } },
python: { spec, env: { SEMMLE_COPY_EXECUTABLES_ROOT: "foo" } },
}, config);
t.deepEqual(result.env, {
a: "a",
b: "b",
SEMMLE_COPY_EXECUTABLES_ROOT: path.join(tmpDir, "compound-temp"),
});
});
});
ava_1.default("concatTracerConfigs - compound environment file is created correctly", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec1 = path.join(tmpDir, "spec1");
fs.writeFileSync(spec1, "foo.log\n2\nabc\ndef");
const tc1 = {
spec: spec1,
env: {
a: "a",
},
};
const spec2 = path.join(tmpDir, "spec2");
fs.writeFileSync(spec2, "foo.log\n1\nghi");
const tc2 = {
spec: spec2,
env: {
foo: "bar_baz",
},
};
const result = tracer_config_1.concatTracerConfigs({ javascript: tc1, python: tc2 }, config);
const envPath = `${result.spec}.environment`;
t.true(fs.existsSync(envPath));
const buffer = fs.readFileSync(envPath);
// Contents is binary data
t.deepEqual(buffer.length, 28);
t.deepEqual(buffer.readInt32LE(0), 2); // number of env vars
t.deepEqual(buffer.readInt32LE(4), 4); // length of env var definition
t.deepEqual(buffer.toString("utf8", 8, 12), "a=a\0"); // [key]=[value]\0
t.deepEqual(buffer.readInt32LE(12), 12); // length of env var definition
t.deepEqual(buffer.toString("utf8", 16, 28), "foo=bar_baz\0"); // [key]=[value]\0
});
});
ava_1.default("getCombinedTracerConfig - return undefined when no languages are traced languages", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
// No traced languages
config.languages = [languages_1.Language.javascript, languages_1.Language.python];
const codeQL = codeql_1.setCodeQL({
async getTracerEnv() {
return {
ODASA_TRACER_CONFIGURATION: "abc",
foo: "bar",
};
},
});
t.deepEqual(await tracer_config_1.getCombinedTracerConfig(config, codeQL), undefined);
});
});
ava_1.default("getCombinedTracerConfig - valid spec file", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec = path.join(tmpDir, "spec");
fs.writeFileSync(spec, "foo.log\n2\nabc\ndef");
const codeQL = codeql_1.setCodeQL({
async getTracerEnv() {
return {
ODASA_TRACER_CONFIGURATION: spec,
foo: "bar",
};
},
});
const result = await tracer_config_1.getCombinedTracerConfig(config, codeQL);
const expectedEnv = {
foo: "bar",
ODASA_TRACER_CONFIGURATION: result.spec,
};
if (process.platform === "darwin") {
expectedEnv["DYLD_INSERT_LIBRARIES"] = path.join(path.dirname(codeQL.getPath()), "tools", "osx64", "libtrace.dylib");
}
else if (process.platform !== "win32") {
expectedEnv["LD_PRELOAD"] = path.join(path.dirname(codeQL.getPath()), "tools", "linux64", "${LIB}trace.so");
}
t.deepEqual(result, {
spec: path.join(tmpDir, "compound-spec"),
env: expectedEnv,
});
});
});
//# sourceMappingURL=tracer-config.test.js.map

File diff suppressed because one or more lines are too long

21
lib/tracer-env.js generated Normal file
View File

@@ -0,0 +1,21 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const env = {};
for (let entry of Object.entries(process.env)) {
const key = entry[0];
const value = entry[1];
if (typeof value !== 'undefined' && key !== '_' && !key.startsWith('JAVA_MAIN_CLASS_')) {
env[key] = value;
}
}
process.stdout.write(process.argv[2]);
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');
//# sourceMappingURL=tracer-env.js.map

1
lib/tracer-env.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"tracer-env.js","sourceRoot":"","sources":["../src/tracer-env.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AAEzB,MAAM,GAAG,GAAG,EAAE,CAAC;AACf,KAAK,IAAI,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;IAC7C,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACrB,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACvB,IAAI,OAAO,KAAK,KAAK,WAAW,IAAI,GAAG,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;QACtF,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KAClB;CACF;AACD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,CAAC"}

227
lib/upload-lib.js generated
View File

@@ -23,18 +23,18 @@ const util = __importStar(require("./util"));
// Takes a list of paths to sarif files and combines them together,
// returning the contents of the combined sarif file.
function combineSarifFiles(sarifFiles) {
const combinedSarif = {
let combinedSarif = {
version: null,
runs: [],
runs: []
};
for (const sarifFile of sarifFiles) {
const sarifObject = JSON.parse(fs.readFileSync(sarifFile, "utf8"));
for (let sarifFile of sarifFiles) {
let sarifObject = JSON.parse(fs.readFileSync(sarifFile, 'utf8'));
// Check SARIF version
if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version;
}
else if (combinedSarif.version !== sarifObject.version) {
throw `Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`;
throw "Different SARIF versions encountered: " + combinedSarif.version + " and " + sarifObject.version;
}
combinedSarif.runs.push(...sarifObject.runs);
}
@@ -43,49 +43,74 @@ function combineSarifFiles(sarifFiles) {
exports.combineSarifFiles = combineSarifFiles;
// Upload the given payload.
// If the request fails then this will retry a small number of times.
async function uploadPayload(payload, repositoryNwo, githubAuth, githubUrl, mode, logger) {
logger.info("Uploading results");
async function uploadPayload(payload) {
core.info('Uploading results');
// If in test mode we don't want to upload the results
const testMode = process.env["TEST_MODE"] === "true" || false;
const testMode = process.env['TEST_MODE'] === 'true' || false;
if (testMode) {
return;
return true;
}
const client = api.getApiClient(githubAuth, githubUrl);
const reqURL = mode === "actions"
? "PUT /repos/:owner/:repo/code-scanning/analysis"
: "POST /repos/:owner/:repo/code-scanning/sarifs";
const response = await client.request(reqURL, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
data: payload,
});
logger.debug(`response status: ${response.status}`);
logger.info("Successfully uploaded results");
const [owner, repo] = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
// Make up to 4 attempts to upload, and sleep for these
// number of seconds between each attempt.
// We don't want to backoff too much to avoid wasting action
// minutes, but just waiting a little bit could maybe help.
const backoffPeriods = [1, 5, 15];
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
const response = await api.getApiClient().request("PUT /repos/:owner/:repo/code-scanning/analysis", ({
owner: owner,
repo: repo,
data: payload,
}));
core.debug('response status: ' + response.status);
const statusCode = response.status;
if (statusCode === 202) {
core.info("Successfully uploaded results");
return true;
}
const requestID = response.headers["x-github-request-id"];
// On any other status code that's not 5xx mark the upload as failed
if (!statusCode || statusCode < 500 || statusCode >= 600) {
core.setFailed('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
return false;
}
// On a 5xx status code we may retry the request
if (attempt < backoffPeriods.length) {
// Log the failure as a warning but don't mark the action as failed yet
core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
' seconds: (' + statusCode + ') ' + JSON.stringify(response.data));
// Sleep for the backoff period
await new Promise(r => setTimeout(r, backoffPeriods[attempt] * 1000));
continue;
}
else {
// If the upload fails with 5xx then we assume it is a temporary problem
// and not an error that the user has caused or can fix.
// We avoid marking the job as failed to avoid breaking CI workflows.
core.error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
return false;
}
}
return false;
}
// Uploads a single sarif file or a directory of sarif files
// depending on what the path happens to refer to.
// Returns true iff the upload occurred and succeeded
async function upload(sarifPath, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger) {
const sarifFiles = [];
if (!fs.existsSync(sarifPath)) {
throw new Error(`Path does not exist: ${sarifPath}`);
}
if (fs.lstatSync(sarifPath).isDirectory()) {
const paths = fs
.readdirSync(sarifPath)
.filter((f) => f.endsWith(".sarif"))
.map((f) => path.resolve(sarifPath, f));
for (const path of paths) {
sarifFiles.push(path);
}
async function upload(input) {
if (fs.lstatSync(input).isDirectory()) {
const sarifFiles = fs.readdirSync(input)
.filter(f => f.endsWith(".sarif"))
.map(f => path.resolve(input, f));
if (sarifFiles.length === 0) {
throw new Error(`No SARIF files found to upload in "${sarifPath}".`);
core.setFailed("No SARIF files found to upload in \"" + input + "\".");
return false;
}
return await uploadFiles(sarifFiles);
}
else {
sarifFiles.push(sarifPath);
return await uploadFiles([input]);
}
return await uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger);
}
exports.upload = upload;
// Counts the number of results in the given SARIF file
@@ -98,83 +123,87 @@ function countResultsInSarif(sarif) {
}
exports.countResultsInSarif = countResultsInSarif;
// Validates that the given file path refers to a valid SARIF file.
// Throws an error if the file is invalid.
function validateSarifFileSchema(sarifFilePath, logger) {
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, "utf8"));
const schema = require("../src/sarif_v2.1.0_schema.json");
// Returns a non-empty list of error message if the file is invalid,
// otherwise returns the empty list if the file is valid.
function validateSarifFileSchema(sarifFilePath) {
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
const result = new jsonschema.Validator().validate(sarif, schema);
if (!result.valid) {
// Output the more verbose error messages in groups as these may be very large.
for (const error of result.errors) {
logger.startGroup(`Error details: ${error.stack}`);
logger.info(JSON.stringify(error, null, 2));
logger.endGroup();
}
// Set the main error message to the stacks of all the errors.
if (result.valid) {
return true;
}
else {
// Set the failure message to the stacks of all the errors.
// This should be of a manageable size and may even give enough to fix the error.
const sarifErrors = result.errors.map((e) => `- ${e.stack}`);
throw new Error(`Unable to upload "${sarifFilePath}" as it is not valid SARIF:\n${sarifErrors.join("\n")}`);
const errorMessages = result.errors.map(e => "- " + e.stack);
core.setFailed("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + errorMessages.join("\n"));
// Also output the more verbose error messages in groups as these may be very large.
for (const error of result.errors) {
core.startGroup("Error details: " + error.stack);
core.info(JSON.stringify(error, null, 2));
core.endGroup();
}
return false;
}
}
exports.validateSarifFileSchema = validateSarifFileSchema;
// Uploads the given set of sarif files.
// Returns true iff the upload occurred and succeeded
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger) {
logger.info(`Uploading sarif files: ${JSON.stringify(sarifFiles)}`);
if (mode === "actions") {
// This check only works on actions as env vars don't persist between calls to the runner
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
if (process.env[sentinelEnvVar]) {
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
async function uploadFiles(sarifFiles) {
core.startGroup("Uploading results");
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
if (process.env[sentinelEnvVar]) {
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
return false;
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
// Validate that the files we were asked to upload are all valid SARIF files
for (const file of sarifFiles) {
validateSarifFileSchema(file, logger);
if (!validateSarifFileSchema(file)) {
return false;
}
}
const commitOid = await util.getCommitOid();
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
const ref = util.getRef();
const analysisKey = await util.getAnalysisKey();
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
let sarifPayload = combineSarifFiles(sarifFiles);
sarifPayload = fingerprints.addFingerprints(sarifPayload, checkoutPath, logger);
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = file_url_1.default(checkoutPath);
sarifPayload = fingerprints.addFingerprints(sarifPayload);
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
let checkoutPath = core.getInput('checkout_path');
let checkoutURI = file_url_1.default(checkoutPath);
const workflowRunID = parseInt(workflowRunIDStr, 10);
if (Number.isNaN(workflowRunID)) {
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
return false;
}
let matrix = core.getInput('matrix');
if (matrix === "null" || matrix === "") {
matrix = undefined;
}
const toolNames = util.getToolNames(sarifPayload);
let payload;
if (mode === "actions") {
payload = JSON.stringify({
commit_oid: commitOid,
ref,
analysis_key: analysisKey,
analysis_name: analysisName,
sarif: zipped_sarif,
workflow_run_id: workflowRunID,
checkout_uri: checkoutURI,
environment,
started_at: process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT],
tool_names: toolNames,
});
}
else {
payload = JSON.stringify({
commit_sha: commitOid,
ref,
sarif: zipped_sarif,
checkout_uri: checkoutURI,
tool_name: toolNames[0],
});
}
const payload = JSON.stringify({
"commit_oid": commitOid,
"ref": ref,
"analysis_key": analysisKey,
"analysis_name": analysisName,
"sarif": zipped_sarif,
"workflow_run_id": workflowRunID,
"checkout_uri": checkoutURI,
"environment": matrix,
"started_at": startedAt,
"tool_names": toolNames,
});
// Log some useful debug info about the info
const rawUploadSizeBytes = sarifPayload.length;
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
const zippedUploadSizeBytes = zipped_sarif.length;
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
const numResultInSarif = countResultsInSarif(sarifPayload);
logger.debug(`Number of results in upload: ${numResultInSarif}`);
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
// Make the upload
await uploadPayload(payload, repositoryNwo, githubAuth, githubUrl, mode, logger);
return {
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,
num_results_in_sarif: numResultInSarif,
};
const succeeded = await uploadPayload(payload);
core.endGroup();
return succeeded;
}
//# sourceMappingURL=upload-lib.js.map

File diff suppressed because one or more lines are too long

15
lib/upload-lib.test.js generated
View File

@@ -11,16 +11,17 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const uploadLib = __importStar(require("./upload-lib"));
testing_utils_1.setupTests(ava_1.default);
ava_1.default("validateSarifFileSchema - valid", (t) => {
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile, logging_1.getRunnerLogger(true)));
ava_1.default('validateSarifFileSchema - valid', t => {
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
t.true(uploadLib.validateSarifFileSchema(inputFile));
});
ava_1.default("validateSarifFileSchema - invalid", (t) => {
const inputFile = `${__dirname}/../src/testdata/invalid-sarif.sarif`;
t.throws(() => uploadLib.validateSarifFileSchema(inputFile, logging_1.getRunnerLogger(true)));
ava_1.default('validateSarifFileSchema - invalid', t => {
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
t.false(uploadLib.validateSarifFileSchema(inputFile));
// validateSarifFileSchema calls core.setFailed which sets the exit code on error
process.exitCode = 0;
});
//# sourceMappingURL=upload-lib.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,uCAA4C;AAC5C,mDAA6C;AAC7C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,MAAM,SAAS,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CACf,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC9C,MAAM,SAAS,GAAG,GAAG,SAAS,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CACZ,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC"}
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,mDAA2C;AAC3C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;IACtD,iFAAiF;IACjF,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC"}

View File

@@ -1,43 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
async function sendSuccessStatusReport(startedAt, uploadStats) {
const statusReportBase = await actionsUtil.createStatusReportBase("upload-sarif", "success", startedAt);
const statusReport = {
...statusReportBase,
...uploadStats,
};
await actionsUtil.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", "starting", startedAt), true))) {
return;
}
try {
const uploadStats = await upload_lib.upload(actionsUtil.getRequiredInput("sarif_file"), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), actionsUtil.getRequiredInput("token"), actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"), "actions", logging_1.getActionsLogger());
await sendSuccessStatusReport(startedAt, uploadStats);
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", "failure", startedAt, error.message, error.stack));
return;
}
}
run().catch((e) => {
core.setFailed(`codeql/upload-sarif action failed: ${e}`);
console.log(e);
});
//# sourceMappingURL=upload-sarif-action.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAM3C,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,EACD,IAAI,CACL,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,MAAM,WAAW,CAAC,YAAY,EAAE,EAChC,WAAW,CAAC,MAAM,EAAE,EACpB,MAAM,WAAW,CAAC,cAAc,EAAE,EAClC,WAAW,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAClD,WAAW,CAAC,gBAAgB,EAAE,EAC9B,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EACtC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,SAAS,EACT,0BAAgB,EAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,sCAAsC,CAAC,EAAE,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

35
lib/upload-sarif.js generated Normal file
View File

@@ -0,0 +1,35 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
async function run() {
if (util.should_abort('upload-sarif', false) || !await util.reportActionStarting('upload-sarif')) {
return;
}
try {
if (await upload_lib.upload(core.getInput('sarif_file'))) {
await util.reportActionSucceeded('upload-sarif');
}
else {
await util.reportActionFailed('upload-sarif', 'upload');
}
}
catch (error) {
core.setFailed(error.message);
await util.reportActionFailed('upload-sarif', error.message, error.stack);
return;
}
}
run().catch(e => {
core.setFailed("codeql/upload-sarif action failed: " + e);
console.log(e);
});
//# sourceMappingURL=upload-sarif.js.map

Some files were not shown because too many files have changed in this diff Show More