Merge branch 'main' into query-overriding

This commit is contained in:
Sam Partington
2020-08-25 10:39:53 +01:00
3443 changed files with 451694 additions and 2619 deletions

View File

@@ -2,33 +2,44 @@ import test from 'ava';
import * as analysisPaths from './analysis-paths';
import {setupTests} from './testing-utils';
import * as util from './util';
setupTests(test);
test("emptyPaths", async t => {
const config = {
languages: [],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_FILTERS'], undefined);
return await util.withTmpDir(async tmpDir => {
const config = {
languages: [],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: '',
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_FILTERS'], undefined);
});
});
test("nonEmptyPaths", async t => {
const config = {
languages: [],
queries: {},
paths: ['path1', 'path2', '**/path3'],
pathsIgnore: ['path4', 'path5', 'path6/**'],
originalUserInput: {},
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path4\npath5');
t.is(process.env['LGTM_INDEX_FILTERS'], 'include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**');
return await util.withTmpDir(async tmpDir => {
const config = {
languages: [],
queries: {},
paths: ['path1', 'path2', '**/path3'],
pathsIgnore: ['path4', 'path5', 'path6/**'],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: '',
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path4\npath5');
t.is(process.env['LGTM_INDEX_FILTERS'], 'include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**');
});
});

View File

@@ -2,11 +2,40 @@ import * as core from "@actions/core";
import * as github from "@actions/github";
import consoleLogLevel from "console-log-level";
export const getApiClient = function() {
import { getRequiredEnvParam, isLocalRun } from "./util";
export const getApiClient = function(githubAuth: string, githubApiUrl: string, allowLocalRun = false) {
if (isLocalRun() && !allowLocalRun) {
throw new Error('Invalid API call in local run');
}
return new github.GitHub(
core.getInput('token'),
{
auth: parseAuth(githubAuth),
baseUrl: githubApiUrl,
userAgent: "CodeQL Action",
log: consoleLogLevel({ level: "debug" })
});
};
// Parses the user input as either a single token,
// or a username and password / PAT.
function parseAuth(auth: string): string {
// Check if it's a username:password pair
const c = auth.indexOf(':');
if (c !== -1) {
return 'basic ' + Buffer.from(auth).toString('base64');
}
// Otherwise use the token as it is
return auth;
}
// Temporary function to aid in the transition to running on and off of github actions.
// Once all code has been coverted this function should be removed or made canonical
// and called only from the action entrypoints.
export function getActionsApiClient(allowLocalRun = false) {
return getApiClient(
core.getInput('token'),
getRequiredEnvParam('GITHUB_API_URL'),
allowLocalRun);
}

View File

@@ -1,7 +1,8 @@
import * as core from '@actions/core';
import { getCodeQL } from './codeql';
import * as sharedEnv from './shared-environment';
import * as config_utils from './config-utils';
import { isTracedLanguage } from './languages';
import * as util from './util';
interface AutobuildStatusReport extends util.StatusReportBase {
@@ -36,16 +37,18 @@ async function run() {
const startedAt = new Date();
let language;
try {
if (util.should_abort('autobuild', true) ||
!await util.sendStatusReport(await util.createStatusReportBase('autobuild', 'starting', startedAt), true)) {
util.prepareLocalRunEnvironment();
if (!await util.sendStatusReport(await util.createStatusReportBase('autobuild', 'starting', startedAt), true)) {
return;
}
const config = await config_utils.getConfig(util.getRequiredEnvParam('RUNNER_TEMP'));
// Attempt to find a language to autobuild
// We want pick the dominant language in the repo from the ones we're able to build
// The languages are sorted in order specified by user or by lines of code if we got
// them from the GitHub API, so try to build the first language on the list.
const autobuildLanguages = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]?.split(',') || [];
const autobuildLanguages = config.languages.filter(isTracedLanguage);
language = autobuildLanguages[0];
if (!language) {
@@ -60,13 +63,14 @@ async function run() {
}
core.startGroup(`Attempting to automatically build ${language} code`);
const codeQL = getCodeQL();
const codeQL = getCodeQL(config.codeQLCmd);
await codeQL.runAutobuild(language);
core.endGroup();
} catch (error) {
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
console.log(error);
await sendCompletedStatusReport(startedAt, [language], language, error);
return;
}

78
src/cli.ts Normal file
View File

@@ -0,0 +1,78 @@
import { Command } from 'commander';
import * as path from 'path';
import { getCLILogger } from './logging';
import { parseRepositoryNwo } from './repository';
import * as upload_lib from './upload-lib';
const program = new Command();
program.version('0.0.1');
interface UploadArgs {
sarifFile: string;
repository: string;
commit: string;
ref: string;
githubUrl: string;
githubAuth: string;
checkoutPath: string | undefined;
}
function parseGithubApiUrl(inputUrl: string): string {
try {
const url = new URL(inputUrl);
// If we detect this is trying to be to github.com
// then return with a fixed canonical URL.
if (url.hostname === 'github.com' || url.hostname === 'api.github.com') {
return 'https://api.github.com';
}
// Add the API path if it's not already present.
if (url.pathname.indexOf('/api/v3') === -1) {
url.pathname = path.join(url.pathname, 'api', 'v3');
}
return url.toString();
} catch (e) {
throw new Error(`"${inputUrl}" is not a valid URL`);
}
}
const logger = getCLILogger();
program
.command('upload')
.description('Uploads a SARIF file, or all SARIF files from a directory, to code scanning')
.requiredOption('--sarif-file <file>', 'SARIF file to upload; can also be a directory for uploading multiple')
.requiredOption('--repository <repository>', 'Repository name')
.requiredOption('--commit <commit>', 'SHA of commit that was analyzed')
.requiredOption('--ref <ref>', 'Name of ref that was analyzed')
.requiredOption('--github-url <url>', 'URL of GitHub instance')
.requiredOption('--github-auth <auth>', 'GitHub Apps token, or of the form "username:token" if using a personal access token')
.option('--checkout-path <path>', 'Checkout path (default: current working directory)')
.action(async (cmd: UploadArgs) => {
try {
await upload_lib.upload(
cmd.sarifFile,
parseRepositoryNwo(cmd.repository),
cmd.commit,
cmd.ref,
undefined,
undefined,
undefined,
cmd.checkoutPath || process.cwd(),
undefined,
cmd.githubAuth,
parseGithubApiUrl(cmd.githubUrl),
'cli',
logger);
} catch (e) {
logger.error('Upload failed');
logger.error(e);
process.exitCode = 1;
}
});
program.parse(process.argv);

View File

@@ -63,3 +63,35 @@ test('parse codeql bundle url version', t => {
}
}
});
test('getExtraOptions works for explicit paths', t => {
t.deepEqual(codeql.getExtraOptions({}, ['foo'], []), []);
t.deepEqual(codeql.getExtraOptions({foo: [42]}, ['foo'], []), ['42']);
t.deepEqual(codeql.getExtraOptions({foo: {bar: [42]}}, ['foo', 'bar'], []), ['42']);
});
test('getExtraOptions works for wildcards', t => {
t.deepEqual(codeql.getExtraOptions({'*': [42]}, ['foo'], []), ['42']);
});
test('getExtraOptions works for wildcards and explicit paths', t => {
let o1 = {'*': [42], foo: [87]};
t.deepEqual(codeql.getExtraOptions(o1, ['foo'], []), ['42', '87']);
let o2 = {'*': [42], foo: [87]};
t.deepEqual(codeql.getExtraOptions(o2, ['foo', 'bar'], []), ['42']);
let o3 = {'*': [42], foo: { '*': [87], bar: [99]}};
let p = ['foo', 'bar'];
t.deepEqual(codeql.getExtraOptions(o3, p, []), ['42', '87', '99']);
});
test('getExtraOptions throws for bad content', t => {
t.throws(() => codeql.getExtraOptions({'*': 42}, ['foo'], []));
t.throws(() => codeql.getExtraOptions({foo: 87}, ['foo'], []));
t.throws(() => codeql.getExtraOptions({'*': [42], foo: { '*': 87, bar: [99]}}, ['foo', 'bar'], []));
});

View File

@@ -1,17 +1,46 @@
import * as core from '@actions/core';
import * as exec from '@actions/exec';
import * as http from '@actions/http-client';
import { IHeaders } from '@actions/http-client/interfaces';
import * as toolcache from '@actions/tool-cache';
import * as fs from 'fs';
import * as path from 'path';
import * as semver from 'semver';
import * as stream from 'stream';
import * as globalutil from 'util';
import uuidV4 from 'uuid/v4';
import * as api from './api-client';
import * as defaults from './defaults.json'; // Referenced from codeql-action-sync-tool!
import { Language } from './languages';
import * as util from './util';
type Options = (string|number|boolean)[];
/**
* Extra command line options for the codeql commands.
*/
interface ExtraOptions {
'*'?: Options;
database?: {
'*'?: Options,
init?: Options,
'trace-command'?: Options,
analyze?: Options,
finalize?: Options
};
resolve?: {
'*'?: Options,
extractor?: Options,
queries?: Options
};
}
export interface CodeQL {
/**
* Get the directory where the CodeQL executable is located.
* Get the path of the CodeQL executable.
*/
getDir(): string;
getPath(): string;
/**
* Print version information about CodeQL.
*/
@@ -24,16 +53,16 @@ export interface CodeQL {
/**
* Run 'codeql database init'.
*/
databaseInit(databasePath: string, language: string, sourceRoot: string): Promise<void>;
databaseInit(databasePath: string, language: Language, sourceRoot: string): Promise<void>;
/**
* Runs the autobuilder for the given language.
*/
runAutobuild(language: string): Promise<void>;
runAutobuild(language: Language): Promise<void>;
/**
* Extract code for a scanned language using 'codeql database trace-command'
* and running the language extracter.
*/
extractScannedLanguage(database: string, language: string): Promise<void>;
extractScannedLanguage(database: string, language: Language): Promise<void>;
/**
* Finalize a database using 'codeql database finalize'.
*/
@@ -68,22 +97,111 @@ export interface ResolveQueriesOutput {
*/
let cachedCodeQL: CodeQL | undefined = undefined;
/**
* Environment variable used to store the location of the CodeQL CLI executable.
* Value is set by setupCodeQL and read by getCodeQL.
*/
const CODEQL_ACTION_CMD = "CODEQL_ACTION_CMD";
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
const CODEQL_BUNDLE_NAME = "codeql-bundle.tar.gz";
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
function getCodeQLActionRepository(): string {
// Actions do not know their own repository name,
// so we currently use this hack to find the name based on where our files are.
// This can be removed once the change to the runner in https://github.com/actions/runner/pull/585 is deployed.
const runnerTemp = util.getRequiredEnvParam("RUNNER_TEMP");
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
const relativeScriptPath = path.relative(actionsDirectory, __filename);
// This handles the case where the Action does not come from an Action repository,
// e.g. our integration tests which use the Action code from the current checkout.
if (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath)) {
return CODEQL_DEFAULT_ACTION_REPOSITORY;
}
const relativeScriptPathParts = relativeScriptPath.split(path.sep);
return relativeScriptPathParts[0] + "/" + relativeScriptPathParts[1];
}
async function getCodeQLBundleDownloadURL(): Promise<string> {
const codeQLActionRepository = getCodeQLActionRepository();
const potentialDownloadSources = [
// This GitHub instance, and this Action.
[util.getInstanceAPIURL(), codeQLActionRepository],
// This GitHub instance, and the canonical Action.
[util.getInstanceAPIURL(), CODEQL_DEFAULT_ACTION_REPOSITORY],
// GitHub.com, and the canonical Action.
[util.GITHUB_DOTCOM_API_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
];
// We now filter out any duplicates.
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
const uniqueDownloadSources = potentialDownloadSources.filter((url, index, self) => index === self.indexOf(url));
for (let downloadSource of uniqueDownloadSources) {
let [apiURL, repository] = downloadSource;
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
if (apiURL === util.GITHUB_DOTCOM_API_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
break;
}
let [repositoryOwner, repositoryName] = repository.split("/");
try {
const release = await api.getActionsApiClient().repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: CODEQL_BUNDLE_VERSION
});
for (let asset of release.data.assets) {
if (asset.name === CODEQL_BUNDLE_NAME) {
core.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
return asset.url;
}
}
} catch (e) {
core.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
}
}
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${CODEQL_BUNDLE_NAME}`;
}
// We have to download CodeQL manually because the toolcache doesn't support Accept headers.
// This can be removed once https://github.com/actions/toolkit/pull/530 is merged and released.
async function toolcacheDownloadTool(url: string, headers?: IHeaders): Promise<string> {
const client = new http.HttpClient('CodeQL Action');
const dest = path.join(util.getRequiredEnvParam('RUNNER_TEMP'), uuidV4());
const response: http.HttpClientResponse = await client.get(url, headers);
if (response.message.statusCode !== 200) {
const err = new toolcache.HTTPError(response.message.statusCode);
core.info(
`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`
);
throw err;
}
const pipeline = globalutil.promisify(stream.pipeline);
fs.mkdirSync(path.dirname(dest), { recursive: true });
await pipeline(response.message, fs.createWriteStream(dest));
return dest;
}
export async function setupCodeQL(): Promise<CodeQL> {
try {
const codeqlURL = core.getInput('tools', { required: true });
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL);
let codeqlURL = core.getInput('tools');
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
if (codeqlFolder) {
core.debug(`CodeQL found in cache ${codeqlFolder}`);
} else {
const codeqlPath = await toolcache.downloadTool(codeqlURL);
if (!codeqlURL) {
codeqlURL = await getCodeQLBundleDownloadURL();
}
const headers: IHeaders = {accept: 'application/octet-stream'};
// We only want to provide an authorization header if we are downloading
// from the same GitHub instance the Action is running on.
// This avoids leaking Enterprise tokens to dotcom.
if (codeqlURL.startsWith(util.getInstanceAPIURL() + "/")) {
core.debug('Downloading CodeQL bundle with token.');
let token = core.getInput('token', { required: true });
headers.authorization = `token ${token}`;
} else {
core.debug('Downloading CodeQL bundle without token.');
}
let codeqlPath = await toolcacheDownloadTool(codeqlURL, headers);
core.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
}
@@ -92,11 +210,10 @@ export async function setupCodeQL(): Promise<CodeQL> {
if (process.platform === 'win32') {
codeqlCmd += ".exe";
} else if (process.platform !== 'linux' && process.platform !== 'darwin') {
throw new Error("Unsupported plaform: " + process.platform);
throw new Error("Unsupported platform: " + process.platform);
}
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
core.exportVariable(CODEQL_ACTION_CMD, codeqlCmd);
return cachedCodeQL;
} catch (e) {
@@ -127,16 +244,24 @@ export function getCodeQLURLVersion(url: string): string {
return s;
}
export function getCodeQL(): CodeQL {
/**
* Use the CodeQL executable located at the given path.
*/
export function getCodeQL(cmd: string): CodeQL {
if (cachedCodeQL === undefined) {
const codeqlCmd = util.getRequiredEnvParam(CODEQL_ACTION_CMD);
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
cachedCodeQL = getCodeQLForCmd(cmd);
}
return cachedCodeQL;
}
function resolveFunction<T>(partialCodeql: Partial<CodeQL>, methodName: string): T {
function resolveFunction<T>(
partialCodeql: Partial<CodeQL>,
methodName: string,
defaultImplementation?: T): T {
if (typeof partialCodeql[methodName] !== 'function') {
if (defaultImplementation !== undefined) {
return defaultImplementation;
}
const dummyMethod = () => {
throw new Error('CodeQL ' + methodName + ' method not correctly defined');
};
@@ -151,9 +276,9 @@ function resolveFunction<T>(partialCodeql: Partial<CodeQL>, methodName: string):
* Accepts a partial object and any undefined methods will be implemented
* to immediately throw an exception indicating which method is missing.
*/
export function setCodeQL(partialCodeql: Partial<CodeQL>) {
export function setCodeQL(partialCodeql: Partial<CodeQL>): CodeQL {
cachedCodeQL = {
getDir: resolveFunction(partialCodeql, 'getDir'),
getPath: resolveFunction(partialCodeql, 'getPath', () => '/tmp/dummy-path'),
printVersion: resolveFunction(partialCodeql, 'printVersion'),
getTracerEnv: resolveFunction(partialCodeql, 'getTracerEnv'),
databaseInit: resolveFunction(partialCodeql, 'databaseInit'),
@@ -163,12 +288,27 @@ export function setCodeQL(partialCodeql: Partial<CodeQL>) {
resolveQueries: resolveFunction(partialCodeql, 'resolveQueries'),
databaseAnalyze: resolveFunction(partialCodeql, 'databaseAnalyze')
};
return cachedCodeQL;
}
/**
* Get the cached CodeQL object. Should only be used from tests.
*
* TODO: Work out a good way for tests to get this from the test context
* instead of having to have this method.
*/
export function getCachedCodeQL(): CodeQL {
if (cachedCodeQL === undefined) {
// Should never happen as setCodeQL is called by testing-utils.setupTests
throw new Error('cachedCodeQL undefined');
}
return cachedCodeQL;
}
function getCodeQLForCmd(cmd: string): CodeQL {
return {
getDir: function() {
return path.dirname(cmd);
getPath: function() {
return cmd;
},
printVersion: async function() {
await exec.exec(cmd, [
@@ -184,22 +324,24 @@ function getCodeQLForCmd(cmd: string): CodeQL {
'trace-command',
databasePath,
...compilerSpecArg,
...getExtraOptionsFromEnv(['database', 'trace-command']),
process.execPath,
path.resolve(__dirname, 'tracer-env.js'),
envFile
]);
return JSON.parse(fs.readFileSync(envFile, 'utf-8'));
},
databaseInit: async function(databasePath: string, language: string, sourceRoot: string) {
databaseInit: async function(databasePath: string, language: Language, sourceRoot: string) {
await exec.exec(cmd, [
'database',
'init',
databasePath,
'--language=' + language,
'--source-root=' + sourceRoot,
...getExtraOptionsFromEnv(['database', 'init']),
]);
},
runAutobuild: async function(language: string) {
runAutobuild: async function(language: Language) {
const cmdName = process.platform === 'win32' ? 'autobuild.cmd' : 'autobuild.sh';
const autobuildCmd = path.join(path.dirname(cmd), language, 'tools', cmdName);
@@ -213,7 +355,7 @@ function getCodeQLForCmd(cmd: string): CodeQL {
await exec.exec(autobuildCmd);
},
extractScannedLanguage: async function(databasePath: string, language: string) {
extractScannedLanguage: async function(databasePath: string, language: Language) {
// Get extractor location
let extractorPath = '';
await exec.exec(
@@ -222,7 +364,8 @@ function getCodeQLForCmd(cmd: string): CodeQL {
'resolve',
'extractor',
'--format=json',
'--language=' + language
'--language=' + language,
...getExtraOptionsFromEnv(['resolve', 'extractor']),
],
{
silent: true,
@@ -240,6 +383,7 @@ function getCodeQLForCmd(cmd: string): CodeQL {
await exec.exec(cmd, [
'database',
'trace-command',
...getExtraOptionsFromEnv(['database', 'trace-command']),
databasePath,
'--',
traceCommand
@@ -249,6 +393,7 @@ function getCodeQLForCmd(cmd: string): CodeQL {
await exec.exec(cmd, [
'database',
'finalize',
...getExtraOptionsFromEnv(['database', 'finalize']),
databasePath
]);
},
@@ -257,7 +402,8 @@ function getCodeQLForCmd(cmd: string): CodeQL {
'resolve',
'queries',
...queries,
'--format=bylanguage'
'--format=bylanguage',
...getExtraOptionsFromEnv(['resolve', 'queries'])
];
if (extraSearchPath !== undefined) {
codeqlArgs.push('--search-path', extraSearchPath);
@@ -283,8 +429,58 @@ function getCodeQLForCmd(cmd: string): CodeQL {
'--format=sarif-latest',
'--output=' + sarifFile,
'--no-sarif-add-snippets',
...getExtraOptionsFromEnv(['database', 'analyze']),
querySuite
]);
}
};
}
/**
* Gets the options for `path` of `options` as an array of extra option strings.
*/
function getExtraOptionsFromEnv(path: string[]) {
let options: ExtraOptions = util.getExtraOptionsEnvParam();
return getExtraOptions(options, path, []);
}
/**
* Gets the options for `path` of `options` as an array of extra option strings.
*
* - the special terminal step name '*' in `options` matches all path steps
* - throws an exception if this conversion is impossible.
*/
export /* exported for testing */ function getExtraOptions(
options: any,
path: string[],
pathInfo: string[]): string[] {
/**
* Gets `options` as an array of extra option strings.
*
* - throws an exception mentioning `pathInfo` if this conversion is impossible.
*/
function asExtraOptions(options: any, pathInfo: string[]): string[] {
if (options === undefined) {
return [];
}
if (!Array.isArray(options)) {
const msg =
`The extra options for '${pathInfo.join('.')}' ('${JSON.stringify(options)}') are not in an array.`;
throw new Error(msg);
}
return options.map(o => {
const t = typeof o;
if (t !== 'string' && t !== 'number' && t !== 'boolean') {
const msg =
`The extra option for '${pathInfo.join('.')}' ('${JSON.stringify(o)}') is not a primitive value.`;
throw new Error(msg);
}
return o + '';
});
}
let all = asExtraOptions(options?.['*'], pathInfo.concat('*'));
let specific = path.length === 0 ?
asExtraOptions(options, pathInfo) :
getExtraOptions(options?.[path[0]], path?.slice(1), pathInfo.concat(path[0]));
return all.concat(specific);
}

View File

@@ -5,8 +5,9 @@ import * as path from 'path';
import sinon from 'sinon';
import * as api from './api-client';
import * as CodeQL from './codeql';
import { getCachedCodeQL, setCodeQL } from './codeql';
import * as configUtils from './config-utils';
import { Language } from "./languages";
import {setupTests} from './testing-utils';
import * as util from './util';
@@ -36,6 +37,19 @@ function mockGetContents(content: GetContentsResponse): sinon.SinonStub<any, any
return spyGetContents;
}
function mockListLanguages(languages: string[]) {
// Passing an auth token is required, so we just use a dummy value
let client = new github.GitHub('123');
const response = {
data: {},
};
for (const language of languages) {
response.data[language] = 123;
}
sinon.stub(client.repos, "listLanguages").resolves(response as any);
sinon.stub(api, "getApiClient").value(() => client);
}
test("load empty config", async t => {
return await util.withTmpDir(async tmpDir => {
process.env['RUNNER_TEMP'] = tmpDir;
@@ -44,7 +58,7 @@ test("load empty config", async t => {
setInput('config-file', undefined);
setInput('languages', 'javascript,python');
CodeQL.setCodeQL({
const codeQL = setCodeQL({
resolveQueries: async function() {
return {
byLanguage: {},
@@ -54,9 +68,9 @@ test("load empty config", async t => {
},
});
const config = await configUtils.initConfig();
const config = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
t.deepEqual(config, await configUtils.getDefaultConfig());
t.deepEqual(config, await configUtils.getDefaultConfig(tmpDir, tmpDir, codeQL));
});
});
@@ -68,7 +82,7 @@ test("loading config saves config", async t => {
setInput('config-file', undefined);
setInput('languages', 'javascript,python');
CodeQL.setCodeQL({
const codeQL = setCodeQL({
resolveQueries: async function() {
return {
byLanguage: {},
@@ -80,18 +94,18 @@ test("loading config saves config", async t => {
// Sanity check the saved config file does not already exist
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile()));
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// Sanity check that getConfig throws before we have called initConfig
await t.throwsAsync(configUtils.getConfig);
await t.throwsAsync(() => configUtils.getConfig(tmpDir));
const config1 = await configUtils.initConfig();
const config1 = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
// The saved config file should now exist
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile()));
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// And that same newly-initialised config should now be returned by getConfig
const config2 = await configUtils.getConfig();
const config2 = await configUtils.getConfig(tmpDir);
t.deepEqual(config1, config2);
});
});
@@ -104,7 +118,7 @@ test("load input outside of workspace", async t => {
setInput('config-file', '../input');
try {
await configUtils.initConfig();
await configUtils.initConfig(tmpDir, tmpDir, getCachedCodeQL());
throw new Error('initConfig did not throw error');
} catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
@@ -121,7 +135,7 @@ test("load non-local input with invalid repo syntax", async t => {
setInput('config-file', 'octo-org/codeql-config@main');
try {
await configUtils.initConfig();
await configUtils.initConfig(tmpDir, tmpDir, getCachedCodeQL());
throw new Error('initConfig did not throw error');
} catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage('octo-org/codeql-config@main')));
@@ -139,7 +153,7 @@ test("load non-existent input", async t => {
setInput('languages', 'javascript');
try {
await configUtils.initConfig();
await configUtils.initConfig(tmpDir, tmpDir, getCachedCodeQL());
throw new Error('initConfig did not throw error');
} catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
@@ -152,7 +166,7 @@ test("load non-empty input", async t => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
CodeQL.setCodeQL({
const codeQL = setCodeQL({
resolveQueries: async function() {
return {
byLanguage: {
@@ -183,7 +197,7 @@ test("load non-empty input", async t => {
// And the config we expect it to parse to
const expectedConfig: configUtils.Config = {
languages: ['javascript'],
languages: [Language.javascript],
queries: {'javascript': ['/foo/a.ql', '/bar/b.ql']},
pathsIgnore: ['a', 'b'],
paths: ['c/d'],
@@ -194,13 +208,16 @@ test("load non-empty input", async t => {
'paths-ignore': ['a', 'b'],
paths: ['c/d'],
},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: codeQL.getPath(),
};
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
setInput('languages', 'javascript');
const actualConfig = await configUtils.initConfig();
const actualConfig = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
@@ -219,12 +236,14 @@ test("default queries are used", async t => {
// with the correct arguments.
const resolveQueriesArgs: {queries: string[], extraSearchPath: string | undefined}[] = [];
CodeQL.setCodeQL({
const codeQL = setCodeQL({
resolveQueries: async function(queries: string[], extraSearchPath: string | undefined) {
resolveQueriesArgs.push({queries, extraSearchPath});
return {
byLanguage: {
'javascript': {},
'javascript': {
'foo.ql': {},
},
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
@@ -245,7 +264,7 @@ test("default queries are used", async t => {
setInput('config-file', 'input');
setInput('languages', 'javascript');
await configUtils.initConfig();
await configUtils.initConfig(tmpDir, tmpDir, codeQL);
// Check resolve queries was called correctly
t.deepEqual(resolveQueriesArgs.length, 1);
@@ -270,7 +289,7 @@ test("Queries can be specified in config file", async t => {
fs.mkdirSync(path.join(tmpDir, 'foo'));
const resolveQueriesArgs: {queries: string[], extraSearchPath: string | undefined}[] = [];
CodeQL.setCodeQL({
const codeQL = setCodeQL({
resolveQueries: async function(queries: string[], extraSearchPath: string | undefined) {
resolveQueriesArgs.push({queries, extraSearchPath});
// Return what we're given, just in the right format for a resolved query
@@ -290,7 +309,7 @@ test("Queries can be specified in config file", async t => {
setInput('languages', 'javascript');
const config = await configUtils.initConfig();
const config = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
// Check resolveQueries was called correctly
// It'll be called once for the default queries
@@ -326,7 +345,7 @@ test("Queries from config file can be overridden in workflow file", async t => {
fs.mkdirSync(path.join(tmpDir, 'override'));
const resolveQueriesArgs: {queries: string[], extraSearchPath: string | undefined}[] = [];
CodeQL.setCodeQL({
const codeQL = setCodeQL({
resolveQueries: async function(queries: string[], extraSearchPath: string | undefined) {
resolveQueriesArgs.push({queries, extraSearchPath});
// Return what we're given, just in the right format for a resolved query
@@ -346,7 +365,7 @@ test("Queries from config file can be overridden in workflow file", async t => {
setInput('languages', 'javascript');
const config = await configUtils.initConfig();
const config = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
// Check resolveQueries was called correctly
// It'll be called once for the default queries and once for `./override`,
@@ -373,7 +392,7 @@ test("Multiple queries can be specified in workflow file, no config file require
setInput('queries', './override1,./override2');
const resolveQueriesArgs: {queries: string[], extraSearchPath: string | undefined}[] = [];
CodeQL.setCodeQL({
const codeQL = setCodeQL({
resolveQueries: async function(queries: string[], extraSearchPath: string | undefined) {
resolveQueriesArgs.push({queries, extraSearchPath});
// Return what we're given, just in the right format for a resolved query
@@ -393,7 +412,7 @@ test("Multiple queries can be specified in workflow file, no config file require
setInput('languages', 'javascript');
const config = await configUtils.initConfig();
const config = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
// Check resolveQueries was called correctly:
// It'll be called once for the default queries,
@@ -422,7 +441,7 @@ test("Invalid queries in workflow file handled correctly", async t => {
// This function just needs to be type-correct; it doesn't need to do anything,
// since we're deliberately passing in invalid data
CodeQL.setCodeQL({
const codeQL = setCodeQL({
resolveQueries: async function(_queries: string[], _extraSearchPath: string | undefined) {
return {
byLanguage: {
@@ -435,7 +454,7 @@ test("Invalid queries in workflow file handled correctly", async t => {
});
try {
await configUtils.initConfig();
await configUtils.initConfig(tmpDir, tmpDir, codeQL);
t.fail('initConfig did not throw error');
} catch (err) {
t.deepEqual(err, new Error(configUtils.getQueryUsesInvalid(undefined, "foo/bar@v1@v3")));
@@ -448,10 +467,14 @@ test("API client used when reading remote config", async t => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
CodeQL.setCodeQL({
const codeQL = setCodeQL({
resolveQueries: async function() {
return {
byLanguage: {},
byLanguage: {
'javascript': {
'foo.ql': {},
},
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
@@ -481,7 +504,7 @@ test("API client used when reading remote config", async t => {
setInput('config-file', 'octo-org/codeql-config/config.yaml@main');
setInput('languages', 'javascript');
await configUtils.initConfig();
await configUtils.initConfig(tmpDir, tmpDir, codeQL);
t.assert(spyGetContents.called);
});
});
@@ -497,7 +520,7 @@ test("Remote config handles the case where a directory is provided", async t =>
const repoReference = 'octo-org/codeql-config/config.yaml@main';
setInput('config-file', repoReference);
try {
await configUtils.initConfig();
await configUtils.initConfig(tmpDir, tmpDir, getCachedCodeQL());
throw new Error('initConfig did not throw error');
} catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileDirectoryGivenMessage(repoReference)));
@@ -518,7 +541,7 @@ test("Invalid format of remote config handled correctly", async t => {
const repoReference = 'octo-org/codeql-config/config.yaml@main';
setInput('config-file', repoReference);
try {
await configUtils.initConfig();
await configUtils.initConfig(tmpDir, tmpDir, getCachedCodeQL());
throw new Error('initConfig did not throw error');
} catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference)));
@@ -526,6 +549,38 @@ test("Invalid format of remote config handled correctly", async t => {
});
});
test("No detected languages", async t => {
return await util.withTmpDir(async tmpDir => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
mockListLanguages([]);
try {
await configUtils.initConfig(tmpDir, tmpDir, getCachedCodeQL());
throw new Error('initConfig did not throw error');
} catch (err) {
t.deepEqual(err, new Error(configUtils.getNoLanguagesError()));
}
});
});
test("Unknown languages", async t => {
return await util.withTmpDir(async tmpDir => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('languages', 'ruby,english');
try {
await configUtils.initConfig(tmpDir, tmpDir, getCachedCodeQL());
throw new Error('initConfig did not throw error');
} catch (err) {
t.deepEqual(err, new Error(configUtils.getUnknownLanguagesError(['ruby', 'english'])));
}
});
});
function doInvalidInputTest(
testName: string,
inputFileContents: string,
@@ -536,7 +591,7 @@ function doInvalidInputTest(
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
CodeQL.setCodeQL({
const codeQL = setCodeQL({
resolveQueries: async function() {
return {
byLanguage: {},
@@ -552,7 +607,7 @@ function doInvalidInputTest(
setInput('languages', 'javascript');
try {
await configUtils.initConfig();
await configUtils.initConfig(tmpDir, tmpDir, codeQL);
throw new Error('initConfig did not throw error');
} catch (err) {
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));

View File

@@ -1,12 +1,12 @@
import * as core from '@actions/core';
import * as io from '@actions/io';
import * as fs from 'fs';
import * as yaml from 'js-yaml';
import * as path from 'path';
import * as api from './api-client';
import { getCodeQL, ResolveQueriesOutput } from './codeql';
import { CodeQL, ResolveQueriesOutput } from './codeql';
import * as externalQueries from "./external-queries";
import { Language, parseLanguage } from "./languages";
import * as util from './util';
// Property names from the user-supplied config file.
@@ -38,7 +38,7 @@ export interface Config {
/**
* Set of languages to run analysis for.
*/
languages: string[];
languages: Language[];
/**
* Map from language to query files.
* Will only contain .ql files and not other kinds of files,
@@ -61,6 +61,20 @@ export interface Config {
* top-level field above.
*/
originalUserInput: UserConfig;
/**
* Directory to use for temporary files that should be
* deleted at the end of the job.
*/
tempDir: string;
/**
* Directory to use for the tool cache.
* This may be persisted between jobs but this is not guaranteed.
*/
toolCacheDir: string;
/**
* Path of the CodeQL executable.
*/
codeQLCmd: string;
}
/**
@@ -110,13 +124,13 @@ function validateQueries(resolvedQueries: ResolveQueriesOutput) {
* Run 'codeql resolve queries' and add the results to resultMap
*/
async function runResolveQueries(
codeQL: CodeQL,
resultMap: { [language: string]: string[] },
toResolve: string[],
extraSearchPath: string | undefined,
errorOnInvalidQueries: boolean) {
const codeQl = getCodeQL();
const resolvedQueries = await codeQl.resolveQueries(toResolve, extraSearchPath);
const resolvedQueries = await codeQL.resolveQueries(toResolve, extraSearchPath);
for (const [language, queries] of Object.entries(resolvedQueries.byLanguage)) {
if (resultMap[language] === undefined) {
@@ -133,9 +147,9 @@ async function runResolveQueries(
/**
* Get the set of queries included by default.
*/
async function addDefaultQueries(languages: string[], resultMap: { [language: string]: string[] }) {
async function addDefaultQueries(codeQL: CodeQL, languages: string[], resultMap: { [language: string]: string[] }) {
const suites = languages.map(l => l + '-code-scanning.qls');
await runResolveQueries(resultMap, suites, undefined, false);
await runResolveQueries(codeQL, resultMap, suites, undefined, false);
}
// The set of acceptable values for built-in suites from the codeql bundle
@@ -147,6 +161,7 @@ const builtinSuites = ['security-extended', 'security-and-quality'] as const;
*/
async function addBuiltinSuiteQueries(
languages: string[],
codeQL: CodeQL,
resultMap: { [language: string]: string[] },
suiteName: string,
configFile?: string) {
@@ -157,13 +172,14 @@ async function addBuiltinSuiteQueries(
}
const suites = languages.map(l => l + '-' + suiteName + '.qls');
await runResolveQueries(resultMap, suites, undefined, false);
await runResolveQueries(codeQL, resultMap, suites, undefined, false);
}
/**
* Retrieve the set of queries at localQueryPath and add them to resultMap.
*/
async function addLocalQueries(
codeQL: CodeQL,
resultMap: { [language: string]: string[] },
localQueryPath: string,
configFile?: string) {
@@ -189,13 +205,19 @@ async function addLocalQueries(
// Get the root of the current repo to use when resolving query dependencies
const rootOfRepo = util.getRequiredEnvParam('GITHUB_WORKSPACE');
await runResolveQueries(resultMap, [absoluteQueryPath], rootOfRepo, true);
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], rootOfRepo, true);
}
/**
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
*/
async function addRemoteQueries(resultMap: { [language: string]: string[] }, queryUses: string, configFile?: string) {
async function addRemoteQueries(
codeQL: CodeQL,
resultMap: { [language: string]: string[] },
queryUses: string,
tempDir: string,
configFile?: string) {
let tok = queryUses.split('@');
if (tok.length !== 2) {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
@@ -217,13 +239,13 @@ async function addRemoteQueries(resultMap: { [language: string]: string[] }, que
const nwo = tok[0] + '/' + tok[1];
// Checkout the external repository
const rootOfRepo = await externalQueries.checkoutExternalRepository(nwo, ref);
const rootOfRepo = await externalQueries.checkoutExternalRepository(nwo, ref, tempDir);
const queryPath = tok.length > 2
? path.join(rootOfRepo, tok.slice(2).join('/'))
: rootOfRepo;
await runResolveQueries(resultMap, [queryPath], rootOfRepo, true);
await runResolveQueries(codeQL, resultMap, [queryPath], rootOfRepo, true);
}
/**
@@ -236,8 +258,10 @@ async function addRemoteQueries(resultMap: { [language: string]: string[] }, que
*/
async function parseQueryUses(
languages: string[],
codeQL: CodeQL,
resultMap: { [language: string]: string[] },
queryUses: string,
tempDir: string,
configFile?: string) {
queryUses = queryUses.trim();
@@ -247,18 +271,18 @@ async function parseQueryUses(
// Check for the local path case before we start trying to parse the repository name
if (queryUses.startsWith("./")) {
await addLocalQueries(resultMap, queryUses.slice(2), configFile);
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), configFile);
return;
}
// Check for one of the builtin suites
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
await addBuiltinSuiteQueries(languages, resultMap, queryUses, configFile);
await addBuiltinSuiteQueries(languages, codeQL, resultMap, queryUses, configFile);
return;
}
// Otherwise, must be a reference to another repo
await addRemoteQueries(resultMap, queryUses, configFile);
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, configFile);
}
// Regex validating stars in paths or paths-ignore entries.
@@ -409,31 +433,29 @@ function getConfigFilePropertyError(configFile: string | undefined, property: st
}
}
export function getNoLanguagesError(): string {
return "Did not detect any languages to analyze. " +
"Please update input in workflow or check that GitHub detects the correct languages in your repository.";
}
export function getUnknownLanguagesError(languages: string[]): string {
return "Did not recognise the following languages: " + languages.join(', ');
}
/**
* Gets the set of languages in the current repository
*/
async function getLanguagesInRepo(): Promise<string[]> {
// Translate between GitHub's API names for languages and ours
const codeqlLanguages = {
'C': 'cpp',
'C++': 'cpp',
'C#': 'csharp',
'Go': 'go',
'Java': 'java',
'JavaScript': 'javascript',
'TypeScript': 'javascript',
'Python': 'python',
};
async function getLanguagesInRepo(): Promise<Language[]> {
let repo_nwo = process.env['GITHUB_REPOSITORY']?.split("/");
if (repo_nwo) {
let owner = repo_nwo[0];
let repo = repo_nwo[1];
core.debug(`GitHub repo ${owner} ${repo}`);
const response = await api.getApiClient().request("GET /repos/:owner/:repo/languages", ({
const response = await api.getActionsApiClient(true).repos.listLanguages({
owner,
repo
}));
});
core.debug("Languages API response: " + JSON.stringify(response));
@@ -441,10 +463,11 @@ async function getLanguagesInRepo(): Promise<string[]> {
// When we pick a language to autobuild we want to pick the most popular traced language
// Since sets in javascript maintain insertion order, using a set here and then splatting it
// into an array gives us an array of languages ordered by popularity
let languages: Set<string> = new Set();
for (let lang in response.data) {
if (lang in codeqlLanguages) {
languages.add(codeqlLanguages[lang]);
let languages: Set<Language> = new Set();
for (let lang of Object.keys(response.data)) {
let parsedLang = parseLanguage(lang);
if (parsedLang !== undefined) {
languages.add(parsedLang);
}
}
return [...languages];
@@ -459,8 +482,11 @@ async function getLanguagesInRepo(): Promise<string[]> {
* The result is obtained from the action input parameter 'languages' if that
* has been set, otherwise it is deduced as all languages in the repo that
* can be analysed.
*
* If no languages could be detected from either the workflow or the repository
* then throw an error.
*/
async function getLanguages(): Promise<string[]> {
async function getLanguages(): Promise<Language[]> {
// Obtain from action input 'languages' if set
let languages = core.getInput('languages', { required: false })
@@ -475,7 +501,28 @@ async function getLanguages(): Promise<string[]> {
core.info("Automatically detected languages: " + JSON.stringify(languages));
}
return languages;
// If the languages parameter was not given and no languages were
// detected then fail here as this is a workflow configuration error.
if (languages.length === 0) {
throw new Error(getNoLanguagesError());
}
// Make sure they are supported
const parsedLanguages: Language[] = [];
const unknownLanguages: string[] = [];
for (let language of languages) {
const parsedLanguage = parseLanguage(language);
if (parsedLanguage === undefined) {
unknownLanguages.push(language);
} else if (parsedLanguages.indexOf(parsedLanguage) === -1) {
parsedLanguages.push(parsedLanguage);
}
}
if (unknownLanguages.length > 0) {
throw new Error(getUnknownLanguagesError(unknownLanguages));
}
return parsedLanguages;
}
/**
@@ -483,14 +530,16 @@ async function getLanguages(): Promise<string[]> {
* (and thus added), otherwise false
*/
async function addQueriesFromWorkflowIfRequired(
codeQL: CodeQL,
languages: string[],
resultMap: { [language: string]: string[] },
tempDir: string,
configFile?: string
): Promise<boolean> {
const queryUses = core.getInput('queries');
if (queryUses) {
for (const query of queryUses.split(',')) {
await parseQueryUses(languages, resultMap, query, configFile);
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, configFile);
}
return true;
}
@@ -501,11 +550,11 @@ async function addQueriesFromWorkflowIfRequired(
/**
* Get the default config for when the user has not supplied one.
*/
export async function getDefaultConfig(): Promise<Config> {
export async function getDefaultConfig(tempDir: string, toolCacheDir: string, codeQL: CodeQL): Promise<Config> {
const languages = await getLanguages();
const queries = {};
await addDefaultQueries(languages, queries);
await addQueriesFromWorkflowIfRequired(languages, queries);
await addDefaultQueries(codeQL, languages, queries);
await addQueriesFromWorkflowIfRequired(codeQL, languages, queries, tempDir);
return {
languages: languages,
@@ -513,13 +562,16 @@ export async function getDefaultConfig(): Promise<Config> {
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(),
};
}
/**
* Load the config from the given file.
*/
async function loadConfig(configFile: string): Promise<Config> {
async function loadConfig(configFile: string, tempDir: string, toolCacheDir: string, codeQL: CodeQL): Promise<Config> {
let parsedYAML: UserConfig;
if (isLocal(configFile)) {
@@ -544,11 +596,6 @@ async function loadConfig(configFile: string): Promise<Config> {
}
const languages = await getLanguages();
// If the languages parameter was not given and no languages were
// detected then fail here as this is a workflow configuration error.
if (languages.length === 0) {
throw new Error("Did not detect any languages to analyze. Please update input in workflow.");
}
const queries = {};
const pathsIgnore: string[] = [];
@@ -562,12 +609,14 @@ async function loadConfig(configFile: string): Promise<Config> {
disableDefaultQueries = parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY]!;
}
if (!disableDefaultQueries) {
await addDefaultQueries(languages, queries);
await addDefaultQueries(codeQL, languages, queries);
}
// If queries were provided using `with` in the action configuration,
// they should take precedence over the queries in the config file
const addedQueriesFromAction = await addQueriesFromWorkflowIfRequired(languages, queries, configFile);
const addedQueriesFromAction = await addQueriesFromWorkflowIfRequired(
codeQL, languages, queries, tempDir, configFile
);
if (!addedQueriesFromAction && QUERIES_PROPERTY in parsedYAML) {
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
throw new Error(getQueriesInvalid(configFile));
@@ -576,7 +625,7 @@ async function loadConfig(configFile: string): Promise<Config> {
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueryUsesInvalid(configFile));
}
await parseQueryUses(languages, queries, query[QUERIES_USES_PROPERTY], configFile);
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, configFile);
}
}
@@ -604,12 +653,24 @@ async function loadConfig(configFile: string): Promise<Config> {
});
}
// The list of queries should not be empty for any language. If it is then
// it is a user configuration error.
for (const language of languages) {
if (queries[language] === undefined || queries[language].length === 0) {
throw new Error(`Did not detect any queries to run for ${language}. ` +
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
}
}
return {
languages,
queries,
pathsIgnore,
paths,
originalUserInput: parsedYAML
originalUserInput: parsedYAML,
tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(),
};
}
@@ -619,16 +680,16 @@ async function loadConfig(configFile: string): Promise<Config> {
* This will parse the config from the user input if present, or generate
* a default config. The parsed config is then stored to a known location.
*/
export async function initConfig(): Promise<Config> {
export async function initConfig(tempDir: string, toolCacheDir: string, codeQL: CodeQL): Promise<Config> {
const configFile = core.getInput('config-file');
let config: Config;
// If no config file was provided create an empty one
if (configFile === '') {
core.debug('No configuration file was provided');
config = await getDefaultConfig();
config = await getDefaultConfig(tempDir, toolCacheDir, codeQL);
} else {
config = await loadConfig(configFile);
config = await loadConfig(configFile, tempDir, toolCacheDir, codeQL);
}
// Save the config so we can easily access it again in the future
@@ -668,7 +729,7 @@ async function getRemoteConfig(configFile: string): Promise<UserConfig> {
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
}
const response = await api.getApiClient().repos.getContents({
const response = await api.getActionsApiClient(true).repos.getContents({
owner: pieces.groups.owner,
repo: pieces.groups.repo,
path: pieces.groups.path,
@@ -687,18 +748,11 @@ async function getRemoteConfig(configFile: string): Promise<UserConfig> {
return yaml.safeLoad(Buffer.from(fileContents, 'base64').toString('binary'));
}
/**
* Get the directory where the parsed config will be stored.
*/
function getPathToParsedConfigFolder(): string {
return util.getRequiredEnvParam('RUNNER_TEMP');
}
/**
* Get the file path where the parsed config will be stored.
*/
export function getPathToParsedConfigFile(): string {
return path.join(getPathToParsedConfigFolder(), 'config');
export function getPathToParsedConfigFile(tempDir: string): string {
return path.join(tempDir, 'config');
}
/**
@@ -706,8 +760,9 @@ export function getPathToParsedConfigFile(): string {
*/
async function saveConfig(config: Config) {
const configString = JSON.stringify(config);
await io.mkdirP(getPathToParsedConfigFolder());
fs.writeFileSync(getPathToParsedConfigFile(), configString, 'utf8');
const configFile = getPathToParsedConfigFile(config.tempDir);
fs.mkdirSync(path.dirname(configFile), { recursive: true });
fs.writeFileSync(configFile, configString, 'utf8');
core.debug('Saved config:');
core.debug(configString);
}
@@ -720,8 +775,8 @@ async function saveConfig(config: Config) {
* stored to a known location. On the second and further calls, this will
* return the contents of the parsed config from the known location.
*/
export async function getConfig(): Promise<Config> {
const configFile = getPathToParsedConfigFile();
export async function getConfig(tempDir: string): Promise<Config> {
const configFile = getPathToParsedConfigFile(tempDir);
if (!fs.existsSync(configFile)) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
}

3
src/defaults.json Normal file
View File

@@ -0,0 +1,3 @@
{
"bundleVersion": "codeql-bundle-20200630"
}

View File

@@ -10,8 +10,10 @@ setupTests(test);
test("checkoutExternalQueries", async t => {
await util.withTmpDir(async tmpDir => {
process.env["RUNNER_TEMP"] = tmpDir;
await externalQueries.checkoutExternalRepository("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b");
await externalQueries.checkoutExternalRepository(
"github/codeql-go",
"df4c6869212341b601005567381944ed90906b6b",
tmpDir);
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in the default branch
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));

View File

@@ -3,17 +3,13 @@ import * as exec from '@actions/exec';
import * as fs from 'fs';
import * as path from 'path';
import * as util from './util';
/**
* Check out repository at the given ref, and return the directory of the checkout.
*/
export async function checkoutExternalRepository(repository: string, ref: string): Promise<string> {
const folder = util.getRequiredEnvParam('RUNNER_TEMP');
export async function checkoutExternalRepository(repository: string, ref: string, tempDir: string): Promise<string> {
core.info('Checking out ' + repository);
const checkoutLocation = path.join(folder, repository);
const checkoutLocation = path.join(tempDir, repository);
if (!fs.existsSync(checkoutLocation)) {
const repoURL = 'https://github.com/' + repository + '.git';
await exec.exec('git', ['clone', repoURL, checkoutLocation]);

View File

@@ -1,10 +1,12 @@
import * as core from '@actions/core';
import * as io from '@actions/io';
import * as fs from 'fs';
import * as path from 'path';
import { getCodeQL } from './codeql';
import * as configUtils from './config-utils';
import { isScannedLanguage } from './languages';
import { getActionsLogger } from './logging';
import { parseRepositoryNwo } from './repository';
import * as sharedEnv from './shared-environment';
import * as upload_lib from './upload-lib';
import * as util from './util';
@@ -56,11 +58,10 @@ async function sendStatusReport(
await util.sendStatusReport(statusReport);
}
async function createdDBForScannedLanguages(databaseFolder: string) {
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
if (scannedLanguages) {
const codeql = getCodeQL();
for (const language of scannedLanguages.split(',')) {
async function createdDBForScannedLanguages(databaseFolder: string, config: configUtils.Config) {
const codeql = getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
if (isScannedLanguage(language)) {
core.startGroup('Extracting ' + language);
await codeql.extractScannedLanguage(path.join(databaseFolder, language), language);
core.endGroup();
@@ -69,9 +70,9 @@ async function createdDBForScannedLanguages(databaseFolder: string) {
}
async function finalizeDatabaseCreation(databaseFolder: string, config: configUtils.Config) {
await createdDBForScannedLanguages(databaseFolder);
await createdDBForScannedLanguages(databaseFolder, config);
const codeql = getCodeQL();
const codeql = getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
core.startGroup('Finalizing ' + language);
await codeql.finalizeDatabase(path.join(databaseFolder, language));
@@ -85,7 +86,7 @@ async function runQueries(
sarifFolder: string,
config: configUtils.Config): Promise<QueriesStatusReport> {
const codeql = getCodeQL();
const codeql = getCodeQL(config.codeQLCmd);
for (let language of fs.readdirSync(databaseFolder)) {
core.startGroup('Analyzing ' + language);
@@ -125,19 +126,19 @@ async function run() {
let queriesStats: QueriesStatusReport | undefined = undefined;
let uploadStats: upload_lib.UploadStatusReport | undefined = undefined;
try {
if (util.should_abort('finish', true) ||
!await util.sendStatusReport(await util.createStatusReportBase('finish', 'starting', startedAt), true)) {
util.prepareLocalRunEnvironment();
if (!await util.sendStatusReport(await util.createStatusReportBase('finish', 'starting', startedAt), true)) {
return;
}
const config = await configUtils.getConfig();
const config = await configUtils.getConfig(util.getRequiredEnvParam('RUNNER_TEMP'));
core.exportVariable(sharedEnv.ODASA_TRACER_CONFIGURATION, '');
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
const databaseFolder = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_DATABASE_DIR);
const databaseFolder = util.getCodeQLDatabasesDir(config.tempDir);
const sarifFolder = core.getInput('output');
await io.mkdirP(sarifFolder);
fs.mkdirSync(sarifFolder, { recursive: true });
core.info('Finalizing database creation');
await finalizeDatabaseCreation(databaseFolder, config);
@@ -146,11 +147,25 @@ async function run() {
queriesStats = await runQueries(databaseFolder, sarifFolder, config);
if ('true' === core.getInput('upload')) {
uploadStats = await upload_lib.upload(sarifFolder);
uploadStats = await upload_lib.upload(
sarifFolder,
parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')),
await util.getCommitOid(),
util.getRef(),
await util.getAnalysisKey(),
util.getRequiredEnvParam('GITHUB_WORKFLOW'),
util.getWorkflowRunID(),
core.getInput('checkout_path'),
core.getInput('matrix'),
core.getInput('token'),
util.getRequiredEnvParam('GITHUB_API_URL'),
'actions',
getActionsLogger());
}
} catch (error) {
core.setFailed(error.message);
console.log(error);
await sendStatusReport(startedAt, queriesStats, uploadStats, error);
return;
}

View File

@@ -4,6 +4,7 @@ import * as fs from 'fs';
import * as path from 'path';
import * as fingerprints from './fingerprints';
import { getCLILogger } from './logging';
import {setupTests} from './testing-utils';
setupTests(test);
@@ -115,7 +116,7 @@ test('hash', (t: ava.Assertions) => {
function testResolveUriToFile(uri: any, index: any, artifactsURIs: any[]) {
const location = { "uri": uri, "index": index };
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
return fingerprints.resolveUriToFile(location, artifacts);
return fingerprints.resolveUriToFile(location, artifacts, getCLILogger());
}
test('resolveUriToFile', t => {
@@ -174,7 +175,7 @@ test('addFingerprints', t => {
// The URIs in the SARIF files resolve to files in the testdata directory
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
t.deepEqual(fingerprints.addFingerprints(input), expected);
t.deepEqual(fingerprints.addFingerprints(input, getCLILogger()), expected);
});
test('missingRegions', t => {
@@ -189,5 +190,5 @@ test('missingRegions', t => {
// The URIs in the SARIF files resolve to files in the testdata directory
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
t.deepEqual(fingerprints.addFingerprints(input), expected);
t.deepEqual(fingerprints.addFingerprints(input, getCLILogger()), expected);
});

View File

@@ -1,7 +1,8 @@
import * as core from '@actions/core';
import * as fs from 'fs';
import Long from 'long';
import { Logger } from './logging';
const tab = '\t'.charCodeAt(0);
const space = ' '.charCodeAt(0);
const lf = '\n'.charCodeAt(0);
@@ -124,7 +125,7 @@ export function hash(callback: hashCallback, input: string) {
// Generate a hash callback function that updates the given result in-place
// when it recieves a hash for the correct line number. Ignores hashes for other lines.
function locationUpdateCallback(result: any, location: any): hashCallback {
function locationUpdateCallback(result: any, location: any, logger: Logger): hashCallback {
let locationStartLine = location.physicalLocation?.region?.startLine;
if (locationStartLine === undefined) {
// We expect the region section to be present, but it can be absent if the
@@ -148,7 +149,7 @@ function locationUpdateCallback(result: any, location: any): hashCallback {
if (!existingFingerprint) {
result.partialFingerprints.primaryLocationLineHash = hash;
} else if (existingFingerprint !== hash) {
core.warning('Calculated fingerprint of ' + hash +
logger.warning('Calculated fingerprint of ' + hash +
' for file ' + location.physicalLocation.artifactLocation.uri +
' line ' + lineNumber +
', but found existing inconsistent fingerprint value ' + existingFingerprint);
@@ -160,14 +161,14 @@ function locationUpdateCallback(result: any, location: any): hashCallback {
// the source file so we can hash it.
// If possible returns a absolute file path for the source file,
// or if not possible then returns undefined.
export function resolveUriToFile(location: any, artifacts: any[]): string | undefined {
export function resolveUriToFile(location: any, artifacts: any[], logger: Logger): string | undefined {
// This may be referencing an artifact
if (!location.uri && location.index !== undefined) {
if (typeof location.index !== 'number' ||
location.index < 0 ||
location.index >= artifacts.length ||
typeof artifacts[location.index].location !== 'object') {
core.debug(`Ignoring location as URI "${location.index}" is invalid`);
logger.debug(`Ignoring location as URI "${location.index}" is invalid`);
return undefined;
}
location = artifacts[location.index].location;
@@ -175,7 +176,7 @@ export function resolveUriToFile(location: any, artifacts: any[]): string | unde
// Get the URI and decode
if (typeof location.uri !== 'string') {
core.debug(`Ignoring location as index "${location.uri}" is invalid`);
logger.debug(`Ignoring location as index "${location.uri}" is invalid`);
return undefined;
}
let uri = decodeURIComponent(location.uri);
@@ -186,14 +187,14 @@ export function resolveUriToFile(location: any, artifacts: any[]): string | unde
uri = uri.substring(fileUriPrefix.length);
}
if (uri.indexOf('://') !== -1) {
core.debug(`Ignoring location URI "${uri}" as the scheme is not recognised`);
logger.debug(`Ignoring location URI "${uri}" as the scheme is not recognised`);
return undefined;
}
// Discard any absolute paths that aren't in the src root
const srcRootPrefix = process.env['GITHUB_WORKSPACE'] + '/';
if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
core.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
logger.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
return undefined;
}
@@ -206,7 +207,7 @@ export function resolveUriToFile(location: any, artifacts: any[]): string | unde
// Check the file exists
if (!fs.existsSync(uri)) {
core.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
return undefined;
}
@@ -215,7 +216,7 @@ export function resolveUriToFile(location: any, artifacts: any[]): string | unde
// Compute fingerprints for results in the given sarif file
// and return an updated sarif file contents.
export function addFingerprints(sarifContents: string): string {
export function addFingerprints(sarifContents: string, logger: Logger): string {
let sarif = JSON.parse(sarifContents);
// Gather together results for the same file and construct
@@ -229,18 +230,18 @@ export function addFingerprints(sarifContents: string): string {
// Check the primary location is defined correctly and is in the src root
const primaryLocation = (result.locations || [])[0];
if (!primaryLocation?.physicalLocation?.artifactLocation) {
core.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`);
logger.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`);
continue;
}
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts);
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts, logger);
if (!filepath) {
continue;
}
if (!callbacksByFile[filepath]) {
callbacksByFile[filepath] = [];
}
callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation));
callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation, logger));
}
}

47
src/languages.test.ts Normal file
View File

@@ -0,0 +1,47 @@
import test from 'ava';
import {isScannedLanguage, isTracedLanguage, Language, parseLanguage} from './languages';
import {setupTests} from './testing-utils';
setupTests(test);
test('parseLangauge', async t => {
// Exact matches
t.deepEqual(parseLanguage('csharp'), Language.csharp);
t.deepEqual(parseLanguage('cpp'), Language.cpp);
t.deepEqual(parseLanguage('go'), Language.go);
t.deepEqual(parseLanguage('java'), Language.java);
t.deepEqual(parseLanguage('javascript'), Language.javascript);
t.deepEqual(parseLanguage('python'), Language.python);
// Aliases
t.deepEqual(parseLanguage('c'), Language.cpp);
t.deepEqual(parseLanguage('c++'), Language.cpp);
t.deepEqual(parseLanguage('c#'), Language.csharp);
t.deepEqual(parseLanguage('typescript'), Language.javascript);
// Not matches
t.deepEqual(parseLanguage('foo'), undefined);
t.deepEqual(parseLanguage(' '), undefined);
t.deepEqual(parseLanguage(''), undefined);
});
test('isTracedLanguage', async t => {
t.true(isTracedLanguage(Language.cpp));
t.true(isTracedLanguage(Language.java));
t.true(isTracedLanguage(Language.csharp));
t.false(isTracedLanguage(Language.go));
t.false(isTracedLanguage(Language.javascript));
t.false(isTracedLanguage(Language.python));
});
test('isScannedLanguage', async t => {
t.false(isScannedLanguage(Language.cpp));
t.false(isScannedLanguage(Language.java));
t.false(isScannedLanguage(Language.csharp));
t.true(isScannedLanguage(Language.go));
t.true(isScannedLanguage(Language.javascript));
t.true(isScannedLanguage(Language.python));
});

44
src/languages.ts Normal file
View File

@@ -0,0 +1,44 @@
// All the languages supported by CodeQL
export enum Language {
csharp = 'csharp',
cpp = 'cpp',
go = 'go',
java = 'java',
javascript = 'javascript',
python = 'python',
}
// Additional names for languages
const LANGUAGE_ALIASES: {[lang: string]: Language} = {
'c': Language.cpp,
'c++': Language.cpp,
'c#': Language.csharp,
'typescript': Language.javascript,
};
// Translate from user input or GitHub's API names for languages to CodeQL's names for languages
export function parseLanguage(language: string): Language | undefined {
// Normalise to lower case
language = language.toLowerCase();
// See if it's an exact match
if (language in Language) {
return language as Language;
}
// Check language aliases
if (language in LANGUAGE_ALIASES) {
return LANGUAGE_ALIASES[language];
}
return undefined;
}
export function isTracedLanguage(language: Language): boolean {
return ['cpp', 'java', 'csharp'].includes(language);
}
export function isScannedLanguage(language: Language): boolean {
return !isTracedLanguage(language);
}

26
src/logging.ts Normal file
View File

@@ -0,0 +1,26 @@
import * as core from '@actions/core';
export interface Logger {
debug: (message: string) => void;
info: (message: string) => void;
warning: (message: string) => void;
error: (message: string) => void;
startGroup: (name: string) => void;
endGroup: () => void;
}
export function getActionsLogger(): Logger {
return core;
}
export function getCLILogger(): Logger {
return {
debug: console.debug,
info: console.info,
warning: console.warn,
error: console.error,
startGroup: () => undefined,
endGroup: () => undefined,
};
}

16
src/repository.ts Normal file
View File

@@ -0,0 +1,16 @@
// A repository name with owner, parsed into its two parts
export interface RepositoryNwo {
owner: string;
repo: string;
}
export function parseRepositoryNwo(input: string): RepositoryNwo {
const parts = input.split('/');
if (parts.length !== 2) {
throw new Error(`"${input}" is not a valid repository name`);
}
return {
owner: parts[0],
repo: parts[1],
};
}

View File

@@ -1,13 +1,12 @@
import * as core from '@actions/core';
import * as exec from '@actions/exec';
import * as io from '@actions/io';
import * as fs from 'fs';
import * as path from 'path';
import * as analysisPaths from './analysis-paths';
import { CodeQL, setupCodeQL } from './codeql';
import * as configUtils from './config-utils';
import * as sharedEnv from './shared-environment';
import { isTracedLanguage } from './languages';
import * as util from './util';
type TracerConfig = {
@@ -54,7 +53,7 @@ async function tracerConfig(
return info;
}
function concatTracerConfigs(configs: { [lang: string]: TracerConfig }): TracerConfig {
function concatTracerConfigs(tracerConfigs: TracerConfig[], config: configUtils.Config): TracerConfig {
// A tracer config is a map containing additional environment variables and a tracer 'spec' file.
// A tracer 'spec' file has the following format [log_file, number_of_blocks, blocks_text]
@@ -62,7 +61,7 @@ function concatTracerConfigs(configs: { [lang: string]: TracerConfig }): TracerC
const env: { [key: string]: string; } = {};
let copyExecutables = false;
let envSize = 0;
for (let v of Object.values(configs)) {
for (const v of tracerConfigs) {
for (let e of Object.entries(v.env)) {
const name = e[0];
const value = e[1];
@@ -82,7 +81,7 @@ function concatTracerConfigs(configs: { [lang: string]: TracerConfig }): TracerC
}
// Concatenate spec files into a new spec file
let languages = Object.keys(configs);
let languages = Object.keys(tracerConfigs);
const cppIndex = languages.indexOf('cpp');
// Make sure cpp is the last language, if it's present since it must be concatenated last
if (cppIndex !== -1) {
@@ -94,16 +93,15 @@ function concatTracerConfigs(configs: { [lang: string]: TracerConfig }): TracerC
let totalLines: string[] = [];
let totalCount = 0;
for (let lang of languages) {
const lines = fs.readFileSync(configs[lang].spec, 'utf8').split(/\r?\n/);
const lines = fs.readFileSync(tracerConfigs[lang].spec, 'utf8').split(/\r?\n/);
const count = parseInt(lines[1], 10);
totalCount += count;
totalLines.push(...lines.slice(2));
}
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
const spec = path.resolve(tempFolder, 'compound-spec');
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
const newLogFilePath = path.resolve(config.tempDir, 'compound-build-tracer.log');
const spec = path.resolve(config.tempDir, 'compound-spec');
const compoundTempFolder = path.resolve(config.tempDir, 'compound-temp');
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
if (copyExecutables) {
@@ -177,8 +175,8 @@ async function run() {
let codeql: CodeQL;
try {
if (util.should_abort('init', false) ||
!await util.sendStatusReport(await util.createStatusReportBase('init', 'starting', startedAt), true)) {
util.prepareLocalRunEnvironment();
if (!await util.sendStatusReport(await util.createStatusReportBase('init', 'starting', startedAt), true)) {
return;
}
@@ -188,12 +186,16 @@ async function run() {
core.endGroup();
core.startGroup('Load language configuration');
config = await configUtils.initConfig();
config = await configUtils.initConfig(
util.getRequiredEnvParam('RUNNER_TEMP'),
util.getRequiredEnvParam('RUNNER_TOOL_CACHE'),
codeql);
analysisPaths.includeAndExcludeAnalysisPaths(config);
core.endGroup();
} catch (e) {
core.setFailed(e.message);
console.log(e);
await util.sendStatusReport(await util.createStatusReportBase('init', 'aborted', startedAt, e.message));
return;
}
@@ -213,11 +215,10 @@ async function run() {
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
core.exportVariable('CODEQL_RAM', codeqlRam);
const databaseFolder = path.resolve(util.getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
await io.mkdirP(databaseFolder);
const databaseFolder = util.getCodeQLDatabasesDir(config.tempDir);
fs.mkdirSync(databaseFolder, { recursive: true });
let tracedLanguages: { [key: string]: TracerConfig } = {};
let scannedLanguages: string[] = [];
let tracedLanguageConfigs: TracerConfig[] = [];
// TODO: replace this code once CodeQL supports multi-language tracing
for (let language of config.languages) {
const languageDatabase = path.join(databaseFolder, language);
@@ -225,48 +226,40 @@ async function run() {
// Init language database
await codeql.databaseInit(languageDatabase, language, sourceRoot);
// TODO: add better detection of 'traced languages' instead of using a hard coded list
if (['cpp', 'java', 'csharp'].includes(language)) {
if (isTracedLanguage(language)) {
const config: TracerConfig = await tracerConfig(codeql, languageDatabase);
tracedLanguages[language] = config;
} else {
scannedLanguages.push(language);
tracedLanguageConfigs.push(config);
}
}
const tracedLanguageKeys = Object.keys(tracedLanguages);
if (tracedLanguageKeys.length > 0) {
const mainTracerConfig = concatTracerConfigs(tracedLanguages);
if (tracedLanguageConfigs.length > 0) {
const mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
if (mainTracerConfig.spec) {
for (let entry of Object.entries(mainTracerConfig.env)) {
core.exportVariable(entry[0], entry[1]);
}
core.exportVariable('ODASA_TRACER_CONFIGURATION', mainTracerConfig.spec);
const codeQLDir = path.dirname(codeql.getPath());
if (process.platform === 'darwin') {
core.exportVariable(
'DYLD_INSERT_LIBRARIES',
path.join(codeql.getDir(), 'tools', 'osx64', 'libtrace.dylib'));
path.join(codeQLDir, 'tools', 'osx64', 'libtrace.dylib'));
} else if (process.platform === 'win32') {
await exec.exec(
'powershell',
[
path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
path.resolve(codeql.getDir(), 'tools', 'win64', 'tracer.exe'),
path.resolve(codeQLDir, 'tools', 'win64', 'tracer.exe'),
],
{ env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
} else {
core.exportVariable('LD_PRELOAD', path.join(codeql.getDir(), 'tools', 'linux64', '${LIB}trace.so'));
core.exportVariable('LD_PRELOAD', path.join(codeQLDir, 'tools', 'linux64', '${LIB}trace.so'));
}
}
}
core.exportVariable(sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES, scannedLanguages.join(','));
core.exportVariable(sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES, tracedLanguageKeys.join(','));
// TODO: make this a "private" environment variable of the action
core.exportVariable(sharedEnv.CODEQL_ACTION_DATABASE_DIR, databaseFolder);
} catch (error) {
core.setFailed(error.message);
console.log(error);
await util.sendStatusReport(await util.createStatusReportBase(
'init',
'failure',
@@ -276,7 +269,6 @@ async function run() {
return;
}
await sendSuccessStatusReport(startedAt, config);
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
}
run().catch(e => {

View File

@@ -1,13 +1,7 @@
export const CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
export const CODEQL_ACTION_ANALYSIS_KEY = 'CODEQL_ACTION_ANALYSIS_KEY';
export const ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
export const CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
export const CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
// The time at which the first action (normally init) started executing.
// If a workflow invokes a different action without first invoking the init
// action (i.e. the upload action is being used by a third-party integrator)
// then this variable will be assigned the start time of the action invoked
// rather that the init action.
export const CODEQL_WORKFLOW_STARTED_AT = 'CODEQL_WORKFLOW_STARTED_AT';
// Populated when the init action completes successfully
export const CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';

22
src/testdata/empty-sarif.sarif vendored Normal file
View File

@@ -0,0 +1,22 @@
{
"$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
"version": "2.1.0",
"runs": [
{
"tool": {
"driver": {
"name": "LGTM.com",
"organization": "Semmle",
"version": "1.24.0-SNAPSHOT",
"rules": []
}
},
"results": [],
"columnKind": "utf16CodeUnits",
"properties": {
"semmle.formatSpecifier": "2.1.0",
"semmle.sourceLanguage": "java"
}
}
]
}

View File

@@ -55,6 +55,10 @@ export function setupTests(test: TestInterface<any>) {
// process.env only has strings fields, so a shallow copy is fine.
t.context.env = {};
Object.assign(t.context.env, process.env);
// Any test that runs code that expects to only be run on actions
// will depend on various environment variables.
process.env['GITHUB_API_URL'] = 'https://github.localhost/api/v3';
});
typedTest.afterEach.always(t => {

View File

@@ -1,5 +1,6 @@
import test from 'ava';
import { getCLILogger } from './logging';
import {setupTests} from './testing-utils';
import * as uploadLib from './upload-lib';
@@ -7,10 +8,10 @@ setupTests(test);
test('validateSarifFileSchema - valid', t => {
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile));
t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile, getCLILogger()));
});
test('validateSarifFileSchema - invalid', t => {
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
t.throws(() => uploadLib.validateSarifFileSchema(inputFile));
t.throws(() => uploadLib.validateSarifFileSchema(inputFile, getCLILogger()));
});

View File

@@ -7,9 +7,13 @@ import zlib from 'zlib';
import * as api from './api-client';
import * as fingerprints from './fingerprints';
import { Logger } from './logging';
import { RepositoryNwo } from './repository';
import * as sharedEnv from './shared-environment';
import * as util from './util';
type UploadMode = 'actions' | 'cli';
// Takes a list of paths to sarif files and combines them together,
// returning the contents of the combined sarif file.
export function combineSarifFiles(sarifFiles: string[]): string {
@@ -35,8 +39,15 @@ export function combineSarifFiles(sarifFiles: string[]): string {
// Upload the given payload.
// If the request fails then this will retry a small number of times.
async function uploadPayload(payload) {
core.info('Uploading results');
async function uploadPayload(
payload: any,
repositoryNwo: RepositoryNwo,
githubAuth: string,
githubApiUrl: string,
mode: UploadMode,
logger: Logger) {
logger.info('Uploading results');
// If in test mode we don't want to upload the results
const testMode = process.env['TEST_MODE'] === 'true' || false;
@@ -44,26 +55,29 @@ async function uploadPayload(payload) {
return;
}
const [owner, repo] = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
// Make up to 4 attempts to upload, and sleep for these
// number of seconds between each attempt.
// We don't want to backoff too much to avoid wasting action
// minutes, but just waiting a little bit could maybe help.
const backoffPeriods = [1, 5, 15];
const client = api.getApiClient(githubAuth, githubApiUrl);
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
const response = await api.getApiClient().request("PUT /repos/:owner/:repo/code-scanning/analysis", ({
owner: owner,
repo: repo,
const reqURL = mode === 'actions'
? 'PUT /repos/:owner/:repo/code-scanning/analysis'
: 'POST /repos/:owner/:repo/code-scanning/sarifs';
const response = await client.request(reqURL, ({
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
data: payload,
}));
core.debug('response status: ' + response.status);
logger.debug('response status: ' + response.status);
const statusCode = response.status;
if (statusCode === 202) {
core.info("Successfully uploaded results");
logger.info("Successfully uploaded results");
return;
}
@@ -77,7 +91,7 @@ async function uploadPayload(payload) {
// On a 5xx status code we may retry the request
if (attempt < backoffPeriods.length) {
// Log the failure as a warning but don't mark the action as failed yet
core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
logger.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
' seconds: (' + statusCode + ') ' + JSON.stringify(response.data));
// Sleep for the backoff period
@@ -109,18 +123,51 @@ export interface UploadStatusReport {
// Uploads a single sarif file or a directory of sarif files
// depending on what the path happens to refer to.
// Returns true iff the upload occurred and succeeded
export async function upload(input: string): Promise<UploadStatusReport> {
if (fs.lstatSync(input).isDirectory()) {
const sarifFiles = fs.readdirSync(input)
.filter(f => f.endsWith(".sarif"))
.map(f => path.resolve(input, f));
if (sarifFiles.length === 0) {
throw new Error("No SARIF files found to upload in \"" + input + "\".");
}
return await uploadFiles(sarifFiles);
} else {
return await uploadFiles([input]);
export async function upload(
sarifPath: string,
repositoryNwo: RepositoryNwo,
commitOid: string,
ref: string,
analysisKey: string | undefined,
analysisName: string | undefined,
workflowRunID: number | undefined,
checkoutPath: string,
environment: string | undefined,
githubAuth: string,
githubApiUrl: string,
mode: UploadMode,
logger: Logger): Promise<UploadStatusReport> {
const sarifFiles: string[] = [];
if (!fs.existsSync(sarifPath)) {
throw new Error(`Path does not exist: ${sarifPath}`);
}
if (fs.lstatSync(sarifPath).isDirectory()) {
fs.readdirSync(sarifPath)
.filter(f => f.endsWith(".sarif"))
.map(f => path.resolve(sarifPath, f))
.forEach(f => sarifFiles.push(f));
if (sarifFiles.length === 0) {
throw new Error("No SARIF files found to upload in \"" + sarifPath + "\".");
}
} else {
sarifFiles.push(sarifPath);
}
return await uploadFiles(
sarifFiles,
repositoryNwo,
commitOid,
ref,
analysisKey,
analysisName,
workflowRunID,
checkoutPath,
environment,
githubAuth,
githubApiUrl,
mode,
logger);
}
// Counts the number of results in the given SARIF file
@@ -134,17 +181,17 @@ export function countResultsInSarif(sarif: string): number {
// Validates that the given file path refers to a valid SARIF file.
// Throws an error if the file is invalid.
export function validateSarifFileSchema(sarifFilePath: string) {
export function validateSarifFileSchema(sarifFilePath: string, logger: Logger) {
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
const schema = require('../src/sarif_v2.1.0_schema.json');
const result = new jsonschema.Validator().validate(sarif, schema);
if (!result.valid) {
// Output the more verbose error messages in groups as these may be very large.
for (const error of result.errors) {
core.startGroup("Error details: " + error.stack);
core.info(JSON.stringify(error, null, 2));
core.endGroup();
logger.startGroup("Error details: " + error.stack);
logger.info(JSON.stringify(error, null, 2));
logger.endGroup();
}
// Set the main error message to the stacks of all the errors.
@@ -156,72 +203,79 @@ export function validateSarifFileSchema(sarifFilePath: string) {
// Uploads the given set of sarif files.
// Returns true iff the upload occurred and succeeded
async function uploadFiles(sarifFiles: string[]): Promise<UploadStatusReport> {
core.startGroup("Uploading results");
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
async function uploadFiles(
sarifFiles: string[],
repositoryNwo: RepositoryNwo,
commitOid: string,
ref: string,
analysisKey: string | undefined,
analysisName: string | undefined,
workflowRunID: number | undefined,
checkoutPath: string,
environment: string | undefined,
githubAuth: string,
githubApiUrl: string,
mode: UploadMode,
logger: Logger): Promise<UploadStatusReport> {
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
if (process.env[sentinelEnvVar]) {
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
logger.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
if (mode === 'actions') {
// This check only works on actions as env vars don't persist between calls to the CLI
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
if (process.env[sentinelEnvVar]) {
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
// Validate that the files we were asked to upload are all valid SARIF files
for (const file of sarifFiles) {
validateSarifFileSchema(file);
validateSarifFileSchema(file, logger);
}
const commitOid = await util.getCommitOid();
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
const ref = util.getRef();
const analysisKey = await util.getAnalysisKey();
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
const startedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
let sarifPayload = combineSarifFiles(sarifFiles);
sarifPayload = fingerprints.addFingerprints(sarifPayload);
sarifPayload = fingerprints.addFingerprints(sarifPayload, logger);
const zipped_sarif = zlib.gzipSync(sarifPayload).toString('base64');
let checkoutPath = core.getInput('checkout_path');
let checkoutURI = fileUrl(checkoutPath);
const workflowRunID = parseInt(workflowRunIDStr, 10);
if (Number.isNaN(workflowRunID)) {
throw new Error('GITHUB_RUN_ID must define a non NaN workflow run ID');
}
let matrix: string | undefined = core.getInput('matrix');
if (matrix === "null" || matrix === "") {
matrix = undefined;
}
const toolNames = util.getToolNames(sarifPayload);
const payload = JSON.stringify({
"commit_oid": commitOid,
"ref": ref,
"analysis_key": analysisKey,
"analysis_name": analysisName,
"sarif": zipped_sarif,
"workflow_run_id": workflowRunID,
"checkout_uri": checkoutURI,
"environment": matrix,
"started_at": startedAt,
"tool_names": toolNames,
});
let payload: string;
if (mode === 'actions') {
payload = JSON.stringify({
"commit_oid": commitOid,
"ref": ref,
"analysis_key": analysisKey,
"analysis_name": analysisName,
"sarif": zipped_sarif,
"workflow_run_id": workflowRunID,
"checkout_uri": checkoutURI,
"environment": environment,
"started_at": process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT],
"tool_names": toolNames,
});
} else {
payload = JSON.stringify({
"commit_sha": commitOid,
"ref": ref,
"sarif": zipped_sarif,
"checkout_uri": checkoutURI,
"tool_name": toolNames[0],
});
}
// Log some useful debug info about the info
const rawUploadSizeBytes = sarifPayload.length;
core.debug("Raw upload size: " + rawUploadSizeBytes + " bytes");
logger.debug("Raw upload size: " + rawUploadSizeBytes + " bytes");
const zippedUploadSizeBytes = zipped_sarif.length;
core.debug("Base64 zipped upload size: " + zippedUploadSizeBytes + " bytes");
logger.debug("Base64 zipped upload size: " + zippedUploadSizeBytes + " bytes");
const numResultInSarif = countResultsInSarif(sarifPayload);
core.debug("Number of results in upload: " + numResultInSarif);
logger.debug("Number of results in upload: " + numResultInSarif);
// Make the upload
await uploadPayload(payload);
core.endGroup();
await uploadPayload(payload, repositoryNwo, githubAuth, githubApiUrl, mode, logger);
return {
raw_upload_size_bytes: rawUploadSizeBytes,

View File

@@ -1,5 +1,7 @@
import * as core from '@actions/core';
import { getActionsLogger } from './logging';
import { parseRepositoryNwo } from './repository';
import * as upload_lib from './upload-lib';
import * as util from './util';
@@ -16,17 +18,30 @@ async function sendSuccessStatusReport(startedAt: Date, uploadStats: upload_lib.
async function run() {
const startedAt = new Date();
if (util.should_abort('upload-sarif', false) ||
!await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'starting', startedAt), true)) {
if (!await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'starting', startedAt), true)) {
return;
}
try {
const uploadStats = await upload_lib.upload(core.getInput('sarif_file'));
const uploadStats = await upload_lib.upload(
core.getInput('sarif_file'),
parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')),
await util.getCommitOid(),
util.getRef(),
await util.getAnalysisKey(),
util.getRequiredEnvParam('GITHUB_WORKFLOW'),
util.getWorkflowRunID(),
core.getInput('checkout_path'),
core.getInput('matrix'),
core.getInput('token'),
util.getRequiredEnvParam('GITHUB_API_URL'),
'actions',
getActionsLogger());
await sendSuccessStatusReport(startedAt, uploadStats);
} catch (error) {
core.setFailed(error.message);
console.log(error);
await util.sendStatusReport(await util.createStatusReportBase(
'upload-sarif',
'failure',

View File

@@ -67,3 +67,86 @@ test('getRef() throws on the empty string', t => {
process.env["GITHUB_REF"] = "";
t.throws(util.getRef);
});
test('isLocalRun() runs correctly', t => {
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
process.env.CODEQL_LOCAL_RUN = '';
t.assert(!util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = 'false';
t.assert(!util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = '0';
t.assert(!util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = 'true';
t.assert(util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = 'hucairz';
t.assert(util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = origLocalRun;
});
test('prepareEnvironment() when a local run', t => {
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
process.env.CODEQL_LOCAL_RUN = 'false';
process.env.GITHUB_JOB = 'YYY';
util.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, 'YYY');
process.env.CODEQL_LOCAL_RUN = 'true';
util.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, 'YYY');
process.env.GITHUB_JOB = '';
util.prepareLocalRunEnvironment();
// updated
t.deepEqual(process.env.GITHUB_JOB, 'UNKNOWN-JOB');
process.env.CODEQL_LOCAL_RUN = origLocalRun;
});
test('getExtraOptionsEnvParam() succeeds on valid JSON with invalid options (for now)', t => {
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
const options = {foo: 42};
process.env.CODEQL_ACTION_EXTRA_OPTIONS = JSON.stringify(options);
t.deepEqual(util.getExtraOptionsEnvParam(), <any>options);
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
});
test('getExtraOptionsEnvParam() succeeds on valid options', t => {
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
const options = { database: { init: ["--debug"] } };
process.env.CODEQL_ACTION_EXTRA_OPTIONS =
JSON.stringify(options);
t.deepEqual(util.getExtraOptionsEnvParam(), options);
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
});
test('getExtraOptionsEnvParam() fails on invalid JSON', t => {
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
process.env.CODEQL_ACTION_EXTRA_OPTIONS = "{{invalid-json}}";
t.throws(util.getExtraOptionsEnvParam);
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
});

View File

@@ -8,28 +8,23 @@ import * as api from './api-client';
import * as sharedEnv from './shared-environment';
/**
* Should the current action be aborted?
*
* This method should be called at the start of all CodeQL actions and they
* should abort cleanly if this returns true without failing the action.
* This method will call `core.setFailed` if necessary.
* The API URL for github.com.
*/
export function should_abort(actionName: string, requireInitActionHasRun: boolean): boolean {
export const GITHUB_DOTCOM_API_URL = "https://api.github.com";
// Check that required aspects of the environment are present
const ref = process.env['GITHUB_REF'];
if (ref === undefined) {
core.setFailed('GITHUB_REF must be set.');
return true;
}
/**
* Get the API URL for the GitHub instance we are connected to.
* May be for github.com or for an enterprise instance.
*/
export function getInstanceAPIURL(): string {
return process.env["GITHUB_API_URL"] || GITHUB_DOTCOM_API_URL;
}
// If the init action is required, then check the it completed successfully.
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
return true;
}
return false;
/**
* Are we running against a GitHub Enterpise instance, as opposed to github.com.
*/
export function isEnterprise(): boolean {
return getInstanceAPIURL() !== GITHUB_DOTCOM_API_URL;
}
/**
@@ -44,6 +39,46 @@ export function getRequiredEnvParam(paramName: string): string {
return value;
}
/**
* Get the extra options for the codeql commands.
*/
export function getExtraOptionsEnvParam(): object {
const varName = 'CODEQL_ACTION_EXTRA_OPTIONS';
const raw = process.env[varName];
if (raw === undefined || raw.length === 0) {
return {};
}
try {
return JSON.parse(raw);
} catch (e) {
throw new Error(
varName +
' environment variable is set, but does not contain valid JSON: ' +
e.message
);
}
}
export function isLocalRun(): boolean {
return !!process.env.CODEQL_LOCAL_RUN
&& process.env.CODEQL_LOCAL_RUN !== 'false'
&& process.env.CODEQL_LOCAL_RUN !== '0';
}
/**
* Ensures all required environment variables are set in the context of a local run.
*/
export function prepareLocalRunEnvironment() {
if (!isLocalRun()) {
return;
}
core.debug('Action is running locally.');
if (!process.env.GITHUB_JOB) {
core.exportVariable('GITHUB_JOB', 'UNKNOWN-JOB');
}
}
/**
* Gets the SHA of the commit that is currently checked out.
*/
@@ -80,7 +115,7 @@ async function getWorkflowPath(): Promise<string> {
const repo = repo_nwo[1];
const run_id = Number(getRequiredEnvParam('GITHUB_RUN_ID'));
const apiClient = api.getApiClient();
const apiClient = api.getActionsApiClient();
const runsResponse = await apiClient.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
owner,
repo,
@@ -93,6 +128,17 @@ async function getWorkflowPath(): Promise<string> {
return workflowResponse.data.path;
}
/**
* Get the workflow run ID.
*/
export function getWorkflowRunID(): number {
const workflowRunID = parseInt(getRequiredEnvParam('GITHUB_RUN_ID'), 10);
if (Number.isNaN(workflowRunID)) {
throw new Error('GITHUB_RUN_ID must define a non NaN workflow run ID');
}
return workflowRunID;
}
/**
* Get the analysis key paramter for the current job.
*
@@ -101,7 +147,9 @@ async function getWorkflowPath(): Promise<string> {
* the github API, but after that the result will be cached.
*/
export async function getAnalysisKey(): Promise<string> {
let analysisKey = process.env[sharedEnv.CODEQL_ACTION_ANALYSIS_KEY];
const analysisKeyEnvVar = 'CODEQL_ACTION_ANALYSIS_KEY';
let analysisKey = process.env[analysisKeyEnvVar];
if (analysisKey !== undefined) {
return analysisKey;
}
@@ -110,7 +158,7 @@ export async function getAnalysisKey(): Promise<string> {
const jobName = getRequiredEnvParam('GITHUB_JOB');
analysisKey = workflowPath + ':' + jobName;
core.exportVariable(sharedEnv.CODEQL_ACTION_ANALYSIS_KEY, analysisKey);
core.exportVariable(analysisKeyEnvVar, analysisKey);
return analysisKey;
}
@@ -248,13 +296,23 @@ export async function sendStatusReport<S extends StatusReportBase>(
statusReport: S,
ignoreFailures?: boolean): Promise<boolean> {
const statusReportJSON = JSON.stringify(statusReport);
if (isEnterprise()) {
core.debug("Not sending status report to GitHub Enterprise");
return true;
}
if (isLocalRun()) {
core.debug("Not sending status report because this is a local run");
return true;
}
const statusReportJSON = JSON.stringify(statusReport);
core.debug('Sending status report: ' + statusReportJSON);
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
const [owner, repo] = nwo.split("/");
const statusResponse = await api.getApiClient().request('PUT /repos/:owner/:repo/code-scanning/analysis/status', {
const client = api.getActionsApiClient();
const statusResponse = await client.request('PUT /repos/:owner/:repo/code-scanning/analysis/status', {
owner: owner,
repo: repo,
data: statusReportJSON,
@@ -337,27 +395,41 @@ export function getMemoryFlag(): string {
}
/**
* Get the codeql `--threads` value specified for the `threads` input. The value
* defaults to 1. The value will be capped to the number of available CPUs.
* Get the codeql `--threads` value specified for the `threads` input.
* If not value was specified, all available threads will be used.
*
* The value will be capped to the number of available CPUs.
*
* @returns string
*/
export function getThreadsFlag(): string {
let numThreads = 1;
let numThreads: number;
const numThreadsString = core.getInput("threads");
const maxThreads = os.cpus().length;
if (numThreadsString) {
numThreads = Number(numThreadsString);
if (Number.isNaN(numThreads)) {
throw new Error(`Invalid threads setting "${numThreadsString}", specified.`);
}
const maxThreads = os.cpus().length;
if (numThreads > maxThreads) {
core.info(`Clamping desired number of threads (${numThreads}) to max available (${maxThreads}).`);
numThreads = maxThreads;
}
const minThreads = -maxThreads;
if (numThreads < minThreads) {
core.info(`Clamping desired number of free threads (${numThreads}) to max available (${minThreads}).`);
numThreads = minThreads;
}
} else {
// Default to using all threads
numThreads = maxThreads;
}
return `--threads=${numThreads}`;
}
/**
* Get the directory where CodeQL databases should be placed.
*/
export function getCodeQLDatabasesDir(tempDir: string) {
return path.resolve(tempDir, 'codeql_databases');
}